prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>SEInfo.java<|end_file_name|><|fim▁begin|>/* * Copyright 2006-2010 Virtual Laboratory for e-Science (www.vl-e.nl) * Copyright 2012-2013 Netherlands eScience Center. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * You may obtain a copy of the License at the following location: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * For the full license, see: LICENSE.txt (located in the root folder of this distribution). * --- */ // source: package nl.esciencecenter.vlet.vfs.lfc; public class SEInfo { public String hostname=null; public int optionalPort=-1; public SEInfo(String infoStr) {<|fim▁hole|> String strs[]=infoStr.split(":"); if (strs.length>0) hostname=strs[0]; if (strs.length>1) optionalPort=Integer.parseInt(strs[1]); } public boolean hasExplicitPort() { return (optionalPort>0); } public int getPort() { return optionalPort; } public String getHostname() { return hostname; } }<|fim▁end|>
// fail not or else fail later if ((infoStr==null) || infoStr.equals("")) throw new NullPointerException("Storage Element info string can not be null or empty");
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! This is a simple "Flat" rendering pipeline. //! It doesn't support blended objects and uses front-to-back ordering. //! The pipeline is meant for simple applications and fall-back paths. use std::marker::PhantomData; use gfx; use gfx_phase; use gfx_scene; /// A short typedef for the phase. pub type Phase<R> = gfx_phase::CachedPhase<R, ::Material<R>, ::view::Info<f32>, Technique<R>, >; mod param { #![allow(missing_docs)] use gfx::shade::TextureParam; gfx_parameters!( Struct { u_Transform@ mvp: [[f32; 4]; 4], u_Color@ color: [f32; 4], t_Diffuse@ texture: TextureParam<R>, u_AlphaTest@ alpha_test: f32, }); } const FLAT_VS : &'static [u8] = include_bytes!("../../gpu/flat.glslv"); const FLAT_FS : &'static [u8] = include_bytes!("../../gpu/flat.glslf"); const FLAT_TEX_VS: &'static [u8] = include_bytes!("../../gpu/flat_tex.glslv"); const FLAT_TEX_FS: &'static [u8] = include_bytes!("../../gpu/flat_tex.glslf"); /// Pipeline creation error. #[derive(Clone, Debug, PartialEq)] pub enum Error { /// Failed to create a texture. Texture(gfx::tex::TextureError), /// Failed to link a program. Program(gfx::ProgramError), } impl From<gfx::tex::TextureError> for Error { fn from(e: gfx::tex::TextureError) -> Error { Error::Texture(e) } } impl From<gfx::ProgramError> for Error { fn from(e: gfx::ProgramError) -> Error { Error::Program(e) } } /// The core technique of the pipeline. pub struct Technique<R: gfx::Resources> { program: gfx::handle::Program<R>, program_textured: gfx::handle::Program<R>, state: gfx::DrawState, /// The default texture used for materials that don't have it. pub default_texture: gfx::handle::Texture<R>, } impl<R: gfx::Resources> Technique<R> { /// Create a new technique. pub fn new<F: gfx::Factory<R>>(factory: &mut F) -> Result<Technique<R>, Error> { use gfx::traits::FactoryExt; Ok(Technique { program: try!(factory.link_program(FLAT_VS, FLAT_FS)), program_textured: try!(factory.link_program(FLAT_TEX_VS, FLAT_TEX_FS)), state: gfx::DrawState::new().depth(gfx::state::Comparison::LessEqual, true), default_texture: try!(factory.create_texture_rgba8_static(1, 1, &[0xFFFFFFFF])), }) } } #[allow(missing_docs)] #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] pub enum Kernel { Flat, Textured,<|fim▁hole|>impl<R: gfx::Resources> gfx_phase::Technique<R, ::Material<R>, ::view::Info<f32>> for Technique<R> { type Kernel = Kernel; type Params = param::Struct<R>; fn test(&self, mesh: &gfx::Mesh<R>, mat: &::Material<R>) -> Option<Kernel> { let textured = mat.texture.is_some() && mesh.attributes.iter().find(|a| a.name == "a_Tex0").is_some(); match mat.transparency { ::Transparency::Opaque if textured => Some(Kernel::Textured), ::Transparency::Opaque => Some(Kernel::Flat), ::Transparency::Cutout(v) if textured => Some(Kernel::AlphaCut(v)), _ => None } } fn compile<'a>(&'a self, kernel: Kernel) -> gfx_phase::TechResult<'a, R, param::Struct<R>> { ( if kernel != Kernel::Flat { &self.program_textured } else { &self.program }, param::Struct { mvp: [[0.0; 4]; 4], color: [0.0; 4], texture: (self.default_texture.clone(), None), alpha_test: if let Kernel::AlphaCut(v) = kernel { v as f32 / 255 as f32 }else { 0.0 }, _r: PhantomData, }, &self.state, None, ) } fn fix_params(&self, mat: &::Material<R>, space: &::view::Info<f32>, params: &mut param::Struct<R>) { use cgmath::FixedArray; params.mvp = *space.mx_vertex.as_fixed(); params.color = mat.color; if let Some(ref tex) = mat.texture { params.texture = tex.clone(); } } } /// The flat pipeline. pub struct Pipeline<R: gfx::Resources> { /// The only rendering phase. pub phase: Phase<R>, /// Background color. Set to none if you don't want the screen to be cleared. pub background: Option<gfx::ColorValue>, } impl<R: gfx::Resources> Pipeline<R> { /// Create a new pipeline. pub fn new<F: gfx::Factory<R>>(factory: &mut F) -> Result<Pipeline<R>, Error> { Technique::new(factory).map(|tech| Pipeline { phase: gfx_phase::Phase::new("Main", tech) .with_sort(gfx_phase::sort::front_to_back) .with_cache(), background: Some([0.0; 4]), }) } } impl<R: gfx::Resources> ::Pipeline<f32, R> for Pipeline<R> { fn render<A, T>(&mut self, scene: &A, camera: &A::Camera, stream: &mut T) -> Result<A::Status, gfx_scene::Error> where A: gfx_scene::AbstractScene<R, ViewInfo = ::view::Info<f32>, Material = ::Material<R>>, T: gfx::Stream<R>, { // clear if let Some(color) = self.background { stream.clear(gfx::ClearData { color: color, depth: 1.0, stencil: 0, }); } // draw scene.draw(&mut self.phase, camera, stream) } }<|fim▁end|>
AlphaCut(::AlphaThreshold), }
<|file_name|>factory.rs<|end_file_name|><|fim▁begin|>// Copyright 2015-2017 Parity Technologies (UK) Ltd. // This file is part of Parity. // Parity is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // Parity is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License // along with Parity. If not, see <http://www.gnu.org/licenses/>. //! Evm factory. //! //! TODO: consider spliting it into two separate files. use std::fmt; use std::sync::Arc; use evm::Evm; use util::{U256, Uint}; use super::interpreter::SharedCache; #[derive(Debug, PartialEq, Clone)] /// Type of EVM to use. pub enum VMType { /// JIT EVM #[cfg(feature = "jit")] Jit, /// RUST EVM Interpreter } impl fmt::Display for VMType { #[cfg(feature="jit")] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", match *self { VMType::Jit => "JIT", VMType::Interpreter => "INT" }) } #[cfg(not(feature="jit"))] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", match *self { VMType::Interpreter => "INT" }) } } impl Default for VMType { fn default() -> Self { VMType::Interpreter } } impl VMType { /// Return all possible VMs (JIT, Interpreter) #[cfg(feature = "jit")] pub fn all() -> Vec<VMType> { vec![VMType::Jit, VMType::Interpreter] } /// Return all possible VMs (Interpreter) #[cfg(not(feature = "jit"))] pub fn all() -> Vec<VMType> { vec![VMType::Interpreter] } /// Return new jit if it's possible #[cfg(not(feature = "jit"))] pub fn jit() -> Option<Self> { None } /// Return new jit if it's possible #[cfg(feature = "jit")] pub fn jit() -> Option<Self> { Some(VMType::Jit) } } /// Evm factory. Creates appropriate Evm. #[derive(Clone)] pub struct Factory { evm: VMType, evm_cache: Arc<SharedCache>, } impl Factory { /// Create fresh instance of VM /// Might choose implementation depending on supplied gas. #[cfg(feature = "jit")] pub fn create(&self, gas: U256) -> Box<Evm> {<|fim▁hole|> Box::new(super::jit::JitEvm::default()) }, VMType::Interpreter => if Self::can_fit_in_usize(gas) { Box::new(super::interpreter::Interpreter::<usize>::new(self.evm_cache.clone())) } else { Box::new(super::interpreter::Interpreter::<U256>::new(self.evm_cache.clone())) } } } /// Create fresh instance of VM /// Might choose implementation depending on supplied gas. #[cfg(not(feature = "jit"))] pub fn create(&self, gas: U256) -> Box<Evm> { match self.evm { VMType::Interpreter => if Self::can_fit_in_usize(gas) { Box::new(super::interpreter::Interpreter::<usize>::new(self.evm_cache.clone())) } else { Box::new(super::interpreter::Interpreter::<U256>::new(self.evm_cache.clone())) } } } /// Create new instance of specific `VMType` factory, with a size in bytes /// for caching jump destinations. pub fn new(evm: VMType, cache_size: usize) -> Self { Factory { evm: evm, evm_cache: Arc::new(SharedCache::new(cache_size)), } } fn can_fit_in_usize(gas: U256) -> bool { gas == U256::from(gas.low_u64() as usize) } } impl Default for Factory { /// Returns jitvm factory #[cfg(all(feature = "jit", not(test)))] fn default() -> Factory { Factory { evm: VMType::Jit, evm_cache: Arc::new(SharedCache::default()), } } /// Returns native rust evm factory #[cfg(any(not(feature = "jit"), test))] fn default() -> Factory { Factory { evm: VMType::Interpreter, evm_cache: Arc::new(SharedCache::default()), } } } #[test] fn test_create_vm() { let _vm = Factory::default().create(U256::zero()); } /// Create tests by injecting different VM factories #[macro_export] macro_rules! evm_test( (ignorejit => $name_test: ident: $name_jit: ident, $name_int: ident) => { #[test] #[ignore] #[cfg(feature = "jit")] fn $name_jit() { $name_test(Factory::new(VMType::Jit, 1024 * 32)); } #[test] fn $name_int() { $name_test(Factory::new(VMType::Interpreter, 1024 * 32)); } }; ($name_test: ident: $name_jit: ident, $name_int: ident) => { #[test] #[cfg(feature = "jit")] fn $name_jit() { $name_test(Factory::new(VMType::Jit, 1024 * 32)); } #[test] fn $name_int() { $name_test(Factory::new(VMType::Interpreter, 1024 * 32)); } } ); /// Create ignored tests by injecting different VM factories #[macro_export] macro_rules! evm_test_ignore( ($name_test: ident: $name_jit: ident, $name_int: ident) => { #[test] #[ignore] #[cfg(feature = "jit")] #[cfg(feature = "ignored-tests")] fn $name_jit() { $name_test(Factory::new(VMType::Jit, 1024 * 32)); } #[test] #[ignore] #[cfg(feature = "ignored-tests")] fn $name_int() { $name_test(Factory::new(VMType::Interpreter, 1024 * 32)); } } );<|fim▁end|>
match self.evm { VMType::Jit => {
<|file_name|>jquery.highlight-3.js<|end_file_name|><|fim▁begin|>/* highlight v3 Highlights arbitrary terms. <http://johannburkard.de/blog/programming/javascript/highlight-javascript-text-higlighting-jquery-plugin.html> MIT license. Johann Burkard <http://johannburkard.de> <mailto:[email protected]> */ jQuery.fn.highlight = function(pat) { function innerHighlight(node, pat) { var skip = 0; if (node.nodeType == 3) { var pos = node.data.toUpperCase().indexOf(pat); if (pos >= 0) { var spannode = document.createElement('span'); spannode.className = 'term'; var middlebit = node.splitText(pos); var endbit = middlebit.splitText(pat.length); var middleclone = middlebit.cloneNode(true); spannode.appendChild(middleclone); middlebit.parentNode.replaceChild(spannode, middlebit); skip = 1; } } else if (node.nodeType == 1 && node.childNodes && !/(script|style)/i.test(node.tagName)) {<|fim▁hole|> } return skip; } return this.each(function() { innerHighlight(this, pat.toUpperCase()); }); }; jQuery.fn.removeHighlight = function() { return this.find("span.term").each(function() { $(this).contents().unwrap(); }); };<|fim▁end|>
for (var i = 0; i < node.childNodes.length; ++i) { i += innerHighlight(node.childNodes[i], pat); }
<|file_name|>Intitule.java<|end_file_name|><|fim▁begin|>// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.8-b130911.1802 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2015.10.21 at 02:36:24 PM CEST // package nl.wetten.bwbng.toestand; import java.util.ArrayList; import java.util.List; import javax.xml.bind.JAXBElement; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElementRef; import javax.xml.bind.annotation.XmlElementRefs; import javax.xml.bind.annotation.XmlMixed; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.adapters.CollapsedStringAdapter; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;choice maxOccurs="unbounded" minOccurs="0"> * &lt;group ref="{}tekst.minimaal"/> * &lt;element ref="{}nootref"/> * &lt;element ref="{}noot"/> * &lt;/choice> * &lt;element ref="{}meta-data" minOccurs="0"/> * &lt;/sequence> * &lt;attGroup ref="{}attlist.intitule"/> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "content" }) @XmlRootElement(name = "intitule") public class Intitule { @XmlElementRefs({ @XmlElementRef(name = "nadruk", type = Nadruk.class, required = false), @XmlElementRef(name = "omissie", type = Omissie.class, required = false), @XmlElementRef(name = "sup", type = JAXBElement.class, required = false), @XmlElementRef(name = "noot", type = Noot.class, required = false), @XmlElementRef(name = "unl", type = JAXBElement.class, required = false), @XmlElementRef(name = "meta-data", type = MetaData.class, required = false), @XmlElementRef(name = "nootref", type = Nootref.class, required = false), @XmlElementRef(name = "ovl", type = JAXBElement.class, required = false), @XmlElementRef(name = "inf", type = JAXBElement.class, required = false) }) @XmlMixed protected List<Object> content; @XmlAttribute(name = "id") @XmlSchemaType(name = "anySimpleType") protected String id; @XmlAttribute(name = "status") protected String status; @XmlAttribute(name = "terugwerking") @XmlSchemaType(name = "anySimpleType") protected String terugwerking; @XmlAttribute(name = "label-id") @XmlSchemaType(name = "anySimpleType") protected String labelId; @XmlAttribute(name = "stam-id") @XmlSchemaType(name = "anySimpleType") protected String stamId; @XmlAttribute(name = "versie-id") @XmlSchemaType(name = "anySimpleType") protected String versieId; @XmlAttribute(name = "publicatie") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String publicatie; /** * Gets the value of the content property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the content property. * * <p> * For example, to add a new item, do as follows: * <pre> * getContent().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link Nadruk } * {@link Omissie } * {@link JAXBElement }{@code <}{@link String }{@code >} * {@link Noot } * {@link JAXBElement }{@code <}{@link String }{@code >} * {@link MetaData } * {@link Nootref } * {@link JAXBElement }{@code <}{@link String }{@code >} * {@link JAXBElement }{@code <}{@link String }{@code >} * {@link String } * * */ public List<Object> getContent() { if (content == null) { content = new ArrayList<Object>(); } return this.content; } /** * Gets the value of the id property. * * @return * possible object is * {@link String } * */ public String getId() { return id; } /** * Sets the value of the id property. * * @param value * allowed object is * {@link String } * */ public void setId(String value) { this.id = value; } /** * Gets the value of the status property. * * @return * possible object is * {@link String } * */ public String getStatus() { return status; } /** * Sets the value of the status property. * * @param value * allowed object is * {@link String } * */ public void setStatus(String value) { this.status = value; } /** * Gets the value of the terugwerking property. * * @return * possible object is * {@link String }<|fim▁hole|> * */ public String getTerugwerking() { return terugwerking; } /** * Sets the value of the terugwerking property. * * @param value * allowed object is * {@link String } * */ public void setTerugwerking(String value) { this.terugwerking = value; } /** * Gets the value of the labelId property. * * @return * possible object is * {@link String } * */ public String getLabelId() { return labelId; } /** * Sets the value of the labelId property. * * @param value * allowed object is * {@link String } * */ public void setLabelId(String value) { this.labelId = value; } /** * Gets the value of the stamId property. * * @return * possible object is * {@link String } * */ public String getStamId() { return stamId; } /** * Sets the value of the stamId property. * * @param value * allowed object is * {@link String } * */ public void setStamId(String value) { this.stamId = value; } /** * Gets the value of the versieId property. * * @return * possible object is * {@link String } * */ public String getVersieId() { return versieId; } /** * Sets the value of the versieId property. * * @param value * allowed object is * {@link String } * */ public void setVersieId(String value) { this.versieId = value; } /** * Gets the value of the publicatie property. * * @return * possible object is * {@link String } * */ public String getPublicatie() { return publicatie; } /** * Sets the value of the publicatie property. * * @param value * allowed object is * {@link String } * */ public void setPublicatie(String value) { this.publicatie = value; } }<|fim▁end|>
<|file_name|>ctl.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python import argparse import sys import os import subprocess import signal import getpass import simplejson from termcolor import colored import ConfigParser import StringIO import functools import time import random import string from configobj import ConfigObj import tempfile import pwd, grp import traceback import uuid import yaml import re from zstacklib import * import jinja2 import socket import struct import fcntl import commands import threading import itertools import platform from datetime import datetime, timedelta import multiprocessing mysql_db_config_script=''' echo "modify my.cnf" if [ -f /etc/mysql/mariadb.conf.d/50-server.cnf ]; then #ubuntu 16.04 mysql_conf=/etc/mysql/mariadb.conf.d/50-server.cnf elif [ -f /etc/mysql/my.cnf ]; then # Ubuntu 14.04 mysql_conf=/etc/mysql/my.cnf elif [ -f /etc/my.cnf ]; then # centos mysql_conf=/etc/my.cnf fi sed -i 's/^bind-address/#bind-address/' $mysql_conf sed -i 's/^skip-networking/#skip-networking/' $mysql_conf sed -i 's/^bind-address/#bind-address/' $mysql_conf grep 'binlog_format=' $mysql_conf >/dev/null 2>&1 if [ $? -ne 0 ]; then echo "binlog_format=mixed" sed -i '/\[mysqld\]/a binlog_format=mixed\' $mysql_conf fi grep 'log_bin_trust_function_creators=' $mysql_conf >/dev/null 2>&1 if [ $? -ne 0 ]; then echo "log_bin_trust_function_creators=1" sed -i '/\[mysqld\]/a log_bin_trust_function_creators=1\' $mysql_conf fi grep 'expire_logs=' $mysql_conf >/dev/null 2>&1 if [ $? -ne 0 ]; then echo "expire_logs=30" sed -i '/\[mysqld\]/a expire_logs=30\' $mysql_conf fi grep 'max_binlog_size=' $mysql_conf >/dev/null 2>&1 if [ $? -ne 0 ]; then echo "max_binlog_size=500m" sed -i '/\[mysqld\]/a max_binlog_size=500m\' $mysql_conf fi grep 'log-bin=' $mysql_conf >/dev/null 2>&1 if [ $? -ne 0 ]; then echo "log-bin=mysql-binlog" sed -i '/\[mysqld\]/a log-bin=mysql-binlog\' $mysql_conf fi grep 'max_connections' $mysql_conf >/dev/null 2>&1 if [ $? -ne 0 ]; then echo "max_connections=1024" sed -i '/\[mysqld\]/a max_connections=1024\' $mysql_conf else echo "max_connections=1024" sed -i 's/max_connections.*/max_connections=1024/g' $mysql_conf fi grep '^character-set-server' $mysql_conf >/dev/null 2>&1 if [ $? -ne 0 ]; then echo "binlog_format=mixed" sed -i '/\[mysqld\]/a character-set-server=utf8\' $mysql_conf fi grep '^skip-name-resolve' $mysql_conf >/dev/null 2>&1 if [ $? -ne 0 ]; then sed -i '/\[mysqld\]/a skip-name-resolve\' $mysql_conf fi grep 'tmpdir' $mysql_conf >/dev/null 2>&1 if [ $? -ne 0 ]; then mysql_tmp_path="/var/lib/mysql/tmp" if [ ! -x "$mysql_tmp_path" ]; then mkdir "$mysql_tmp_path" chown mysql:mysql "$mysql_tmp_path" chmod 1777 "$mysql_tmp_path" fi echo "tmpdir=/var/lib/mysql/tmp" sed -i '/\[mysqld\]/a tmpdir=/var/lib/mysql/tmp\' $mysql_conf fi ''' def signal_handler(signal, frame): sys.exit(0) signal.signal(signal.SIGINT, signal_handler) def loop_until_timeout(timeout, interval=1): def wrap(f): @functools.wraps(f) def inner(*args, **kwargs): current_time = time.time() expired = current_time + timeout while current_time < expired: if f(*args, **kwargs): return True time.sleep(interval) current_time = time.time() return False return inner return wrap def find_process_by_cmdline(cmdlines): pids = [pid for pid in os.listdir('/proc') if pid.isdigit()] for pid in pids: try: with open(os.path.join('/proc', pid, 'cmdline'), 'r') as fd: cmdline = fd.read() is_find = True for c in cmdlines: if c not in cmdline: is_find = False break if not is_find: continue return pid except IOError: continue return None def ssh_run_full(ip, cmd, params=[], pipe=True): remote_path = '/tmp/%s.sh' % uuid.uuid4() script = '''/bin/bash << EOF cat << EOF1 > %s %s EOF1 /bin/bash %s %s ret=$? rm -f %s exit $ret EOF''' % (remote_path, cmd, remote_path, ' '.join(params), remote_path) scmd = ShellCmd('ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no %s "%s"' % (ip, script), pipe=pipe) scmd(False) return scmd def ssh_run(ip, cmd, params=[]): scmd = ssh_run_full(ip, cmd, params) if scmd.return_code != 0: scmd.raise_error() return scmd.stdout def ssh_run_no_pipe(ip, cmd, params=[]): scmd = ssh_run_full(ip, cmd, params, False) if scmd.return_code != 0: scmd.raise_error() return scmd.stdout class CtlError(Exception): pass def warn(msg): sys.stdout.write(colored('WARNING: %s\n' % msg, 'yellow')) def error(msg): sys.stderr.write(colored('ERROR: %s\n' % msg, 'red')) sys.exit(1) def error_not_exit(msg): sys.stderr.write(colored('ERROR: %s\n' % msg, 'red')) def info(*msg): if len(msg) == 1: out = '%s\n' % ''.join(msg) else: out = ''.join(msg) sys.stdout.write(out) def get_detail_version(): detailed_version_file = os.path.join(ctl.zstack_home, "VERSION") if os.path.exists(detailed_version_file): with open(detailed_version_file, 'r') as fd: detailed_version = fd.read() return detailed_version else: return None def check_ip_port(host, port): import socket sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) result = sock.connect_ex((host, int(port))) return result == 0 def compare_version(version1, version2): def normalize(v): return [int(x) for x in re.sub(r'(\.0+)*$','', v).split(".")] return cmp(normalize(version2), normalize(version1)) def get_zstack_version(db_hostname, db_port, db_user, db_password): query = MySqlCommandLineQuery() query.host = db_hostname query.port = db_port query.user = db_user query.password = db_password query.table = 'zstack' query.sql = "select version from schema_version order by version desc" ret = query.query() versions = [r['version'] for r in ret] versions.sort(cmp=compare_version) version = versions[0] return version def get_default_gateway_ip(): '''This function will return default route gateway ip address''' with open("/proc/net/route") as gateway: try: for item in gateway: fields = item.strip().split() if fields[1] != '00000000' or not int(fields[3], 16) & 2: continue if fields[7] == '00000000': return socket.inet_ntoa(struct.pack("=L", int(fields[2], 16))) except ValueError: return None def get_default_ip(): cmd = ShellCmd("""dev=`ip route|grep default|head -n 1|awk -F "dev" '{print $2}' | awk -F " " '{print $1}'`; ip addr show $dev |grep "inet "|awk '{print $2}'|head -n 1 |awk -F '/' '{print $1}'""") cmd(False) return cmd.stdout.strip() def get_yum_repo_from_property(): yum_repo = ctl.read_property('Ansible.var.zstack_repo') if not yum_repo: return yum_repo # avoid http server didn't start when install package if 'zstack-mn' in yum_repo: yum_repo = yum_repo.replace("zstack-mn","zstack-local") if 'qemu-kvm-ev-mn' in yum_repo: yum_repo = yum_repo.replace("qemu-kvm-ev-mn","qemu-kvm-ev") return yum_repo def get_host_list(table_name): db_hostname, db_port, db_user, db_password = ctl.get_live_mysql_portal() query = MySqlCommandLineQuery() query.host = db_hostname query.port = db_port query.user = db_user query.password = db_password query.table = 'zstack' query.sql = "select * from %s" % table_name host_vo = query.query() return host_vo def get_vrouter_list(): ip_list = [] db_hostname, db_port, db_user, db_password = ctl.get_live_mysql_portal() query = MySqlCommandLineQuery() query.host = db_hostname query.port = db_port query.user = db_user query.password = db_password query.table = 'zstack' query.sql = "select ip from VmNicVO where deviceId = 0 and vmInstanceUuid in (select uuid from VirtualRouterVmVO)" vrouter_ip_list = query.query() for ip in vrouter_ip_list: ip_list.append(ip['ip']) return ip_list def get_ha_mn_list(conf_file): with open(conf_file, 'r') as fd: ha_conf_content = yaml.load(fd.read()) mn_list = ha_conf_content['host_list'].split(',') return mn_list def stop_mevoco(host_post_info): command = "zstack-ctl stop_node && zstack-ctl stop_ui" logger.debug("[ HOST: %s ] INFO: starting run shell command: '%s' " % (host_post_info.host, command)) (status, output)= commands.getstatusoutput("ssh -o StrictHostKeyChecking=no -i %s root@%s '%s'" % (host_post_info.private_key, host_post_info.host, command)) if status != 0: logger.error("[ HOST: %s ] INFO: shell command: '%s' failed" % (host_post_info.host, command)) error("Something wrong on host: %s\n %s" % (host_post_info.host, output)) else: logger.debug("[ HOST: %s ] SUCC: shell command: '%s' successfully" % (host_post_info.host, command)) def start_mevoco(host_post_info): command = "zstack-ctl start_node && zstack-ctl start_ui" logger.debug("[ HOST: %s ] INFO: starting run shell command: '%s' " % (host_post_info.host, command)) (status, output)= commands.getstatusoutput("ssh -o StrictHostKeyChecking=no -i %s root@%s '%s'" % (host_post_info.private_key, host_post_info.host, command)) if status != 0: logger.error("[ HOST: %s ] FAIL: shell command: '%s' failed" % (host_post_info.host, command)) error("Something wrong on host: %s\n %s" % (host_post_info.host, output)) else: logger.debug("[ HOST: %s ] SUCC: shell command: '%s' successfully" % (host_post_info.host, command)) class ExceptionWrapper(object): def __init__(self, msg): self.msg = msg def __enter__(self): pass def __exit__(self, exc_type, exc_val, exc_tb): if globals().get('verbose', False) and exc_type and exc_val and exc_tb: error_not_exit(''.join(traceback.format_exception(exc_type, exc_val, exc_tb))) if exc_type == CtlError: return if exc_val: error('%s\n%s' % (str(exc_val), self.msg)) def on_error(msg): return ExceptionWrapper(msg) def error_if_tool_is_missing(tool): if shell_return('which %s' % tool) != 0: raise CtlError('cannot find tool "%s", please install it and re-run' % tool) def expand_path(path): if path.startswith('~'): return os.path.expanduser(path) else: return os.path.abspath(path) def check_host_info_format(host_info): '''check install ha and install multi mn node info format''' if '@' not in host_info: error("Host connect information should follow format: 'root:password@host_ip', please check your input!") else: # get user and password if ':' not in host_info.split('@')[0]: error("Host connect information should follow format: 'root:password@host_ip', please check your input!") else: user = host_info.split('@')[0].split(':')[0] password = host_info.split('@')[0].split(':')[1] if user != "" and user != "root": print "Only root user can be supported, please change user to root" if user == "": user = "root" # get ip and port if ':' not in host_info.split('@')[1]: ip = host_info.split('@')[1] port = '22' else: ip = host_info.split('@')[1].split(':')[0] port = host_info.split('@')[1].split(':')[1] return (user, password, ip, port) def check_host_password(password, ip): command ='timeout 10 sshpass -p %s ssh -q -o UserKnownHostsFile=/dev/null -o PubkeyAuthentication=no -o ' \ 'StrictHostKeyChecking=no root@%s echo ""' % (password, ip) (status, output) = commands.getstatusoutput(command) if status != 0: error("Connect to host: '%s' with password '%s' failed! Please check password firstly and make sure you have " "disabled UseDNS in '/etc/ssh/sshd_config' on %s" % (ip, password, ip)) def get_ip_by_interface(device_name): s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) return socket.inet_ntoa(fcntl.ioctl( s.fileno(), 0x8915, struct.pack('256s', device_name[:15]) )[20:24]) def start_remote_mn( host_post_info): command = "zstack-ctl start_node && zstack-ctl start_ui" (status, output) = commands.getstatusoutput("ssh -o StrictHostKeyChecking=no -i %s root@%s '%s'" % (UpgradeHACmd.private_key_name, host_post_info.host, command)) if status != 0: error("Something wrong on host: %s\n %s" % (host_post_info.host, output)) logger.debug("[ HOST: %s ] SUCC: shell command: '%s' successfully" % (host_post_info.host, command)) class SpinnerInfo(object): spinner_status = {} def __init__(self): self.output = "" self.name = "" class ZstackSpinner(object): def __init__(self, spinner_info): self.output = spinner_info.output self.name = spinner_info.name self.spinner = itertools.cycle("|/~\\") self.thread = threading.Thread(target=self.run, args=()) self.thread.daemon = True self.thread.start() def run(self): time.sleep(.2) while SpinnerInfo.spinner_status[self.name]: sys.stdout.write("\r %s: ... %s " % (self.output, next(self.spinner))) sys.stdout.flush() time.sleep(.1) print "\r %s: ... %s" % (self.output, colored("PASS","green")) class Ansible(object): def __init__(self, yaml, host='localhost', debug=False, ssh_key='none'): self.yaml = yaml self.host = host self.debug = debug self.ssh_key = ssh_key def __call__(self, *args, **kwargs): error_if_tool_is_missing('ansible-playbook') cmd = ''' yaml_file=`mktemp` cat <<EOF >> $$yaml_file $yaml EOF ansible_cmd="ansible-playbook $$yaml_file -i '$host,'" if [ $debug -eq 1 ]; then ansible_cmd="$$ansible_cmd -vvvv" fi if [ "$ssh_key" != "none" ]; then ansible_cmd="$$ansible_cmd --private-key=$ssh_key" ssh -oPasswordAuthentication=no -oStrictHostKeyChecking=no -i $ssh_key $host 'echo hi > /dev/null' else ssh -oPasswordAuthentication=no -oStrictHostKeyChecking=no $host 'echo hi > /dev/null' fi if [ $$? -ne 0 ]; then ansible_cmd="$$ansible_cmd --ask-pass" fi eval $$ansible_cmd ret=$$? rm -f $$yaml_file exit $$ret ''' t = string.Template(cmd) cmd = t.substitute({ 'yaml': self.yaml, 'host': self.host, 'debug': int(self.debug), 'ssh_key': self.ssh_key }) with on_error('Ansible failure'): try: shell_no_pipe(cmd) except CtlError: raise Exception('see prior Ansible log for detailed information') def ansible(yaml, host='localhost', debug=False, ssh_key=None): Ansible(yaml, host, debug, ssh_key or 'none')() def reset_dict_value(dict_name, value): return dict.fromkeys(dict_name, value) def check_zstack_user(): try: pwd.getpwnam('zstack') except KeyError: raise CtlError('cannot find user account "zstack", your installation seems incomplete') try: grp.getgrnam('zstack') except KeyError: raise CtlError('cannot find user account "zstack", your installation seems incomplete') class UseUserZstack(object): def __init__(self): self.root_uid = None self.root_gid = None check_zstack_user() def __enter__(self): self.root_uid = os.getuid() self.root_gid = os.getgid() self.root_home = os.environ['HOME'] os.setegid(grp.getgrnam('zstack').gr_gid) os.seteuid(pwd.getpwnam('zstack').pw_uid) os.environ['HOME'] = os.path.expanduser('~zstack') def __exit__(self, exc_type, exc_val, exc_tb): os.seteuid(self.root_uid) os.setegid(self.root_gid) os.environ['HOME'] = self.root_home def use_user_zstack(): return UseUserZstack() class PropertyFile(object): def __init__(self, path, use_zstack=True): self.path = path self.use_zstack = use_zstack if not os.path.isfile(self.path): raise CtlError('cannot find property file at %s' % self.path) with on_error("errors on reading %s" % self.path): self.config = ConfigObj(self.path, write_empty_values=True) def read_all_properties(self): with on_error("errors on reading %s" % self.path): return self.config.items() def delete_properties(self, keys): for k in keys: if k in self.config: del self.config[k] with use_user_zstack(): self.config.write() def read_property(self, key): with on_error("errors on reading %s" % self.path): return self.config.get(key, None) def write_property(self, key, value): with on_error("errors on writing (%s=%s) to %s" % (key, value, self.path)): if self.use_zstack: with use_user_zstack(): self.config[key] = value self.config.write() else: self.config[key] = value self.config.write() def write_properties(self, lst): with on_error("errors on writing list of key-value%s to %s" % (lst, self.path)): if self.use_zstack: with use_user_zstack(): for key, value in lst: self.config[key] = value self.config.write() else: for key, value in lst: self.config[key] = value self.config.write() class CtlParser(argparse.ArgumentParser): def error(self, message): sys.stderr.write('error:%s\n' % message) self.print_help() sys.exit(1) class Ctl(object): DEFAULT_ZSTACK_HOME = '/usr/local/zstack/apache-tomcat/webapps/zstack/' USER_ZSTACK_HOME_DIR = os.path.expanduser('~zstack') LAST_ALIVE_MYSQL_IP = "MYSQL_LATEST_IP" LAST_ALIVE_MYSQL_PORT = "MYSQL_LATEST_PORT" LOGGER_DIR = "/var/log/zstack/" LOGGER_FILE = "zstack-ctl.log" def __init__(self): self.commands = {} self.command_list = [] self.main_parser = CtlParser(prog='zstackctl', description="ZStack management tool", formatter_class=argparse.RawTextHelpFormatter) self.main_parser.add_argument('-v', help="verbose, print execution details", dest="verbose", action="store_true", default=False) self.zstack_home = None self.properties_file_path = None self.verbose = False self.extra_arguments = None def register_command(self, cmd): assert cmd.name, "command name cannot be None" assert cmd.description, "command description cannot be None" self.commands[cmd.name] = cmd self.command_list.append(cmd) def _locate_zstack_home(self): env_path = os.path.expanduser(SetEnvironmentVariableCmd.PATH) if os.path.isfile(env_path): env = PropertyFile(env_path) self.zstack_home = env.read_property('ZSTACK_HOME') if not self.zstack_home: self.zstack_home = os.environ.get('ZSTACK_HOME', None) if not self.zstack_home: warn('ZSTACK_HOME is not set, default to %s' % self.DEFAULT_ZSTACK_HOME) self.zstack_home = self.DEFAULT_ZSTACK_HOME if not os.path.isdir(self.zstack_home): raise CtlError('cannot find ZSTACK_HOME at %s, please set it in .bashrc or use zstack-ctl setenv ZSTACK_HOME=path' % self.zstack_home) os.environ['ZSTACK_HOME'] = self.zstack_home self.properties_file_path = os.path.join(self.zstack_home, 'WEB-INF/classes/zstack.properties') self.ssh_private_key = os.path.join(self.zstack_home, 'WEB-INF/classes/ansible/rsaKeys/id_rsa') self.ssh_public_key = os.path.join(self.zstack_home, 'WEB-INF/classes/ansible/rsaKeys/id_rsa.pub') if not os.path.isfile(self.properties_file_path): warn('cannot find %s, your ZStack installation may have crashed' % self.properties_file_path) def get_env(self, name): env = PropertyFile(SetEnvironmentVariableCmd.PATH) return env.read_property(name) def delete_env(self, name): env = PropertyFile(SetEnvironmentVariableCmd.PATH) env.delete_properties([name]) def put_envs(self, vs): if not os.path.exists(SetEnvironmentVariableCmd.PATH): shell('su - zstack -c "mkdir -p %s"' % os.path.dirname(SetEnvironmentVariableCmd.PATH)) shell('su - zstack -c "touch %s"' % SetEnvironmentVariableCmd.PATH) env = PropertyFile(SetEnvironmentVariableCmd.PATH) env.write_properties(vs) def run(self): create_log(Ctl.LOGGER_DIR, Ctl.LOGGER_FILE) if os.getuid() != 0: raise CtlError('zstack-ctl needs root privilege, please run with sudo') metavar_list = [] for n,cmd in enumerate(self.command_list): if cmd.hide is False: metavar_list.append(cmd.name) else: self.command_list[n].description = None metavar_string = '{' + ','.join(metavar_list) + '}' subparsers = self.main_parser.add_subparsers(help="All sub-commands", dest="sub_command_name", metavar=metavar_string) for cmd in self.command_list: if cmd.description is not None: cmd.install_argparse_arguments(subparsers.add_parser(cmd.name, help=cmd.description + '\n\n')) else: cmd.install_argparse_arguments(subparsers.add_parser(cmd.name)) args, self.extra_arguments = self.main_parser.parse_known_args(sys.argv[1:]) self.verbose = args.verbose globals()['verbose'] = self.verbose cmd = self.commands[args.sub_command_name] if cmd.need_zstack_home(): self._locate_zstack_home() if cmd.need_zstack_user(): check_zstack_user() cmd(args) def internal_run(self, cmd_name, args=''): cmd = self.commands[cmd_name] assert cmd, 'cannot find command %s' % cmd_name params = [cmd_name] params.extend(args.split()) args_obj, _ = self.main_parser.parse_known_args(params) if cmd.need_zstack_home(): self._locate_zstack_home() if cmd.need_zstack_user(): check_zstack_user() cmd(args_obj) def read_property_list(self, key): prop = PropertyFile(self.properties_file_path) ret = [] for name, value in prop.read_all_properties(): if name.startswith(key): ret.append((name, value)) return ret def read_all_properties(self): prop = PropertyFile(self.properties_file_path) return prop.read_all_properties() def read_property(self, key): prop = PropertyFile(self.properties_file_path) val = prop.read_property(key) # our code assume all values are strings if isinstance(val, list): return ','.join(val) else: return val def write_properties(self, properties): prop = PropertyFile(self.properties_file_path) with on_error('property must be in format of "key=value", no space before and after "="'): prop.write_properties(properties) def write_property(self, key, value): prop = PropertyFile(self.properties_file_path) with on_error('property must be in format of "key=value", no space before and after "="'): prop.write_property(key, value) def get_db_url(self): db_url = self.read_property("DB.url") if not db_url: db_url = self.read_property('DbFacadeDataSource.jdbcUrl') if not db_url: raise CtlError("cannot find DB url in %s. please set DB.url" % self.properties_file_path) return db_url def get_live_mysql_portal(self): hostname_ports, user, password = self.get_database_portal() last_ip = ctl.get_env(self.LAST_ALIVE_MYSQL_IP) last_port = ctl.get_env(self.LAST_ALIVE_MYSQL_PORT) if last_ip and last_port and (last_ip, last_port) in hostname_ports: first = (last_ip, last_port) lst = [first] for it in hostname_ports: if it != first: lst.append(it) hostname_ports = lst errors = [] for hostname, port in hostname_ports: if password: sql = 'mysql --host=%s --port=%s --user=%s --password=%s -e "select 1"' % (hostname, port, user, password) else: sql = 'mysql --host=%s --port=%s --user=%s -e "select 1"' % (hostname, port, user) cmd = ShellCmd(sql) cmd(False) if cmd.return_code == 0: # record the IP and port, so next time we will try them first ctl.put_envs([ (self.LAST_ALIVE_MYSQL_IP, hostname), (self.LAST_ALIVE_MYSQL_PORT, port) ]) return hostname, port, user, password errors.append('failed to connect to the mysql server[hostname:%s, port:%s, user:%s, password:%s]: %s %s' % ( hostname, port, user, password, cmd.stderr, cmd.stdout )) raise CtlError('\n'.join(errors)) def get_database_portal(self): db_user = self.read_property("DB.user") if not db_user: db_user = self.read_property('DbFacadeDataSource.user') if not db_user: raise CtlError("cannot find DB user in %s. please set DB.user" % self.properties_file_path) db_password = self.read_property("DB.password") if db_password is None: db_password = self.read_property('DbFacadeDataSource.password') if db_password is None: raise CtlError("cannot find DB password in %s. please set DB.password" % self.properties_file_path) db_url = self.get_db_url() host_name_ports = [] def parse_hostname_ports(prefix): ips = db_url.lstrip(prefix).lstrip('/').split('/')[0] ips = ips.split(',') for ip in ips: if ":" in ip: hostname, port = ip.split(':') host_name_ports.append((hostname, port)) else: host_name_ports.append((ip, '3306')) if db_url.startswith('jdbc:mysql:loadbalance:'): parse_hostname_ports('jdbc:mysql:loadbalance:') elif db_url.startswith('jdbc:mysql:'): parse_hostname_ports('jdbc:mysql:') return host_name_ports, db_user, db_password def check_if_management_node_has_stopped(self, force=False): db_hostname, db_port, db_user, db_password = self.get_live_mysql_portal() def get_nodes(): query = MySqlCommandLineQuery() query.user = db_user query.password = db_password query.host = db_hostname query.port = db_port query.table = 'zstack' query.sql = 'select hostname,heartBeat from ManagementNodeVO' return query.query() def check(): nodes = get_nodes() if nodes: node_ips = [n['hostname'] for n in nodes] raise CtlError('there are some management nodes%s are still running. Please stop all of them before performing the database upgrade.' 'If you are sure they have stopped, use option --force and run this command again.\n' 'If you are upgrade by all in on installer, use option -F and run all in one installer again.\n' 'WARNING: the database may crash if you run this command with --force but without stopping management nodes' % node_ips) def bypass_check(): nodes = get_nodes() if nodes: node_ips = [n['hostname'] for n in nodes] info("it seems some nodes%s are still running. As you have specified option --force, let's wait for 10s to make sure those are stale records. Please be patient." % node_ips) time.sleep(10) new_nodes = get_nodes() for n in new_nodes: for o in nodes: if o['hostname'] == n['hostname'] and o['heartBeat'] != n['heartBeat']: raise CtlError("node[%s] is still Running! Its heart-beat changed from %s to %s in last 10s. Please make sure you really stop it" % (n['hostname'], o['heartBeat'], n['heartBeat'])) if force: bypass_check() else: check() ctl = Ctl() def script(cmd, args=None, no_pipe=False): if args: t = string.Template(cmd) cmd = t.substitute(args) fd, script_path = tempfile.mkstemp(suffix='.sh') os.fdopen(fd, 'w').write(cmd) try: if ctl.verbose: info('execute script:\n%s\n' % cmd) if no_pipe: shell_no_pipe('bash %s' % script_path) else: shell('bash %s' % script_path) finally: os.remove(script_path) class ShellCmd(object): def __init__(self, cmd, workdir=None, pipe=True): self.cmd = cmd if pipe: self.process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, cwd=workdir) else: self.process = subprocess.Popen(cmd, shell=True, cwd=workdir) self.return_code = None self.stdout = None self.stderr = None def raise_error(self): err = [] err.append('failed to execute shell command: %s' % self.cmd) err.append('return code: %s' % self.process.returncode) err.append('stdout: %s' % self.stdout) err.append('stderr: %s' % self.stderr) raise CtlError('\n'.join(err)) def __call__(self, is_exception=True): if ctl.verbose: info('executing shell command[%s]:' % self.cmd) (self.stdout, self.stderr) = self.process.communicate() if is_exception and self.process.returncode != 0: self.raise_error() self.return_code = self.process.returncode if ctl.verbose: info(simplejson.dumps({ "shell" : self.cmd, "return_code" : self.return_code, "stdout": self.stdout, "stderr": self.stderr }, ensure_ascii=True, sort_keys=True, indent=4)) return self.stdout def shell(cmd, is_exception=True): return ShellCmd(cmd)(is_exception) def shell_no_pipe(cmd, is_exception=True): return ShellCmd(cmd, pipe=False)(is_exception) def shell_return(cmd): scmd = ShellCmd(cmd) scmd(False) return scmd.return_code class Command(object): def __init__(self): self.name = None self.description = None self.hide = False self.cleanup_routines = [] self.quiet = False def install_argparse_arguments(self, parser): pass def install_cleanup_routine(self, func): self.cleanup_routines.append(func) def need_zstack_home(self): return True def need_zstack_user(self): return True def __call__(self, *args, **kwargs): try: self.run(*args) if not self.quiet: logger.info('Start running command [ zstack-ctl %s ]' % ' '.join(sys.argv[1:])) finally: for c in self.cleanup_routines: c() def run(self, args): raise CtlError('the command is not implemented') def create_check_mgmt_node_command(timeout=10, mn_node='127.0.0.1'): USE_CURL = 0 USE_WGET = 1 NO_TOOL = 2 def use_tool(): cmd = ShellCmd('which wget') cmd(False) if cmd.return_code == 0: return USE_WGET else: cmd = ShellCmd('which curl') cmd(False) if cmd.return_code == 0: return USE_CURL else: return NO_TOOL what_tool = use_tool() if what_tool == USE_CURL: return ShellCmd('''curl --noproxy --connect-timeout 1 --retry %s --retry-delay 0 --retry-max-time %s --max-time %s -H "Content-Type: application/json" -d '{"org.zstack.header.apimediator.APIIsReadyToGoMsg": {}}' http://%s:8080/zstack/api''' % (timeout, timeout, timeout, mn_node)) elif what_tool == USE_WGET: return ShellCmd('''wget --no-proxy -O- --tries=%s --timeout=1 --header=Content-Type:application/json --post-data='{"org.zstack.header.apimediator.APIIsReadyToGoMsg": {}}' http://%s:8080/zstack/api''' % (timeout, mn_node)) else: return None def find_process_by_cmdline(keyword): pids = [pid for pid in os.listdir('/proc') if pid.isdigit()] for pid in pids: try: with open(os.path.join('/proc', pid, 'cmdline'), 'r') as fd: cmdline = fd.read() if keyword not in cmdline: continue return pid except IOError: continue return None class MySqlCommandLineQuery(object): def __init__(self): self.user = None self.password = None self.host = 'localhost' self.port = 3306 self.sql = None self.table = None def query(self): assert self.user, 'user cannot be None' assert self.sql, 'sql cannot be None' assert self.table, 'table cannot be None' sql = "%s\G" % self.sql if self.password: cmd = '''mysql -u %s -p%s --host %s --port %s -t %s -e "%s"''' % (self.user, self.password, self.host, self.port, self.table, sql) else: cmd = '''mysql -u %s --host %s --port %s -t %s -e "%s"''' % (self.user, self.host, self.port, self.table, sql) output = shell(cmd) output = output.strip(' \t\n\r') ret = [] if not output: return ret current = None for l in output.split('\n'): if current is None and not l.startswith('*********'): raise CtlError('cannot parse mysql output generated by the sql "%s", output:\n%s' % (self.sql, output)) if l.startswith('*********'): if current: ret.append(current) current = {} else: l = l.strip() key, value = l.split(':', 1) current[key.strip()] = value[1:] if current: ret.append(current) return ret class ShowStatusCmd(Command): def __init__(self): super(ShowStatusCmd, self).__init__() self.name = 'status' self.description = 'show ZStack status and information.' ctl.register_command(self) def install_argparse_arguments(self, parser): parser.add_argument('--host', help='SSH URL, for example, [email protected], to show the management node status on a remote machine') parser.add_argument('--quiet', '-q', help='Do not log this action.', action='store_true', default=False) def _stop_remote(self, args): shell_no_pipe('ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no %s "/usr/bin/zstack-ctl status"' % args.host) def run(self, args): self.quiet = args.quiet if args.host: self._stop_remote(args) return log_path = os.path.join(ctl.zstack_home, "../../logs/management-server.log") log_path = os.path.normpath(log_path) info_list = [ "ZSTACK_HOME: %s" % ctl.zstack_home, "zstack.properties: %s" % ctl.properties_file_path, "log4j2.xml: %s" % os.path.join(os.path.dirname(ctl.properties_file_path), 'log4j2.xml'), "PID file: %s" % os.path.join(os.path.expanduser('~zstack'), "management-server.pid"), "log file: %s" % log_path ] def check_zstack_status(): cmd = create_check_mgmt_node_command() def write_status(status): info_list.append('MN status: %s' % status) if not cmd: write_status('cannot detect status, no wget and curl installed') return cmd(False) pid = get_management_node_pid() if cmd.return_code != 0: if pid: write_status('%s, the management node seems to become zombie as it stops responding APIs but the ' 'process(PID: %s) is still running. Please stop the node using zstack-ctl stop_node' % (colored('Unknown', 'yellow'), pid)) else: write_status(colored('Stopped', 'red')) return if 'false' in cmd.stdout: write_status('Starting, should be ready in a few seconds') elif 'true' in cmd.stdout: write_status(colored('Running', 'green') + ' [PID:%s]' % pid) else: write_status('Unknown') def show_version(): try: db_hostname, db_port, db_user, db_password = ctl.get_live_mysql_portal() except: info('version: %s' % colored('unknown, MySQL is not running', 'yellow')) return if db_password: cmd = ShellCmd('''mysql -u %s -p%s --host %s --port %s -t zstack -e "show tables like 'schema_version'"''' % (db_user, db_password, db_hostname, db_port)) else: cmd = ShellCmd('''mysql -u %s --host %s --port %s -t zstack -e "show tables like 'schema_version'"''' % (db_user, db_hostname, db_port)) cmd(False) if cmd.return_code != 0: info('version: %s' % colored('unknown, MySQL is not running', 'yellow')) return out = cmd.stdout if 'schema_version' not in out: version = '0.6' else: version = get_zstack_version(db_hostname, db_port, db_user, db_password) detailed_version = get_detail_version() if detailed_version is not None: info('version: %s (%s)' % (version, detailed_version)) else: info('version: %s' % version) check_zstack_status() info('\n'.join(info_list)) ctl.internal_run('ui_status', args='-q') show_version() class DeployDBCmd(Command): DEPLOY_DB_SCRIPT_PATH = "WEB-INF/classes/deploydb.sh" ZSTACK_PROPERTY_FILE = "WEB-INF/classes/zstack.properties" def __init__(self): super(DeployDBCmd, self).__init__() self.name = "deploydb" self.description = ( "deploy a new ZStack database, create a user 'zstack' with password specified in '--zstack-password',\n" "and update zstack.properties if --no-update is not set.\n" "\nDANGER: this will erase the existing ZStack database.\n" "NOTE: If the database is running on a remote host, please make sure you have granted privileges to the root user by:\n" "\n\tGRANT ALL PRIVILEGES ON *.* TO 'root'@'%%' IDENTIFIED BY 'your_root_password' WITH GRANT OPTION;\n" "\tFLUSH PRIVILEGES;\n" ) ctl.register_command(self) def update_db_config(self): update_db_config_script = mysql_db_config_script fd, update_db_config_script_path = tempfile.mkstemp() os.fdopen(fd, 'w').write(update_db_config_script) info('update_db_config_script_path is: %s' % update_db_config_script_path) ShellCmd('bash %s' % update_db_config_script_path)() os.remove(update_db_config_script_path) def install_argparse_arguments(self, parser): parser.add_argument('--root-password', help='root user password of MySQL. [DEFAULT] empty password') parser.add_argument('--zstack-password', help='password of user "zstack". [DEFAULT] empty password') parser.add_argument('--host', help='IP or DNS name of MySQL host; default is localhost', default='localhost') parser.add_argument('--port', help='port of MySQL host; default is 3306', type=int, default=3306) parser.add_argument('--no-update', help='do NOT update database information to zstack.properties; if you do not know what this means, do not use it', action='store_true', default=False) parser.add_argument('--drop', help='drop existing zstack database', action='store_true', default=False) parser.add_argument('--keep-db', help='keep existing zstack database and not raise error.', action='store_true', default=False) def run(self, args): error_if_tool_is_missing('mysql') script_path = os.path.join(ctl.zstack_home, self.DEPLOY_DB_SCRIPT_PATH) if not os.path.exists(script_path): error('cannot find %s, your ZStack installation may have been corrupted, please reinstall it' % script_path) property_file_path = os.path.join(ctl.zstack_home, self.ZSTACK_PROPERTY_FILE) if not os.path.exists(property_file_path): error('cannot find %s, your ZStack installation may have been corrupted, please reinstall it' % property_file_path) if args.root_password: check_existing_db = 'mysql --user=root --password=%s --host=%s --port=%s -e "use zstack"' % (args.root_password, args.host, args.port) else: check_existing_db = 'mysql --user=root --host=%s --port=%s -e "use zstack"' % (args.host, args.port) self.update_db_config() cmd = ShellCmd(check_existing_db) cmd(False) if not args.root_password: args.root_password = "''" if not args.zstack_password: args.zstack_password = "''" if cmd.return_code == 0 and not args.drop: if args.keep_db: info('detected existing zstack database and keep it; if you want to drop it, please append parameter --drop, instead of --keep-db\n') else: raise CtlError('detected existing zstack database; if you are sure to drop it, please append parameter --drop or use --keep-db to keep the database') else: cmd = ShellCmd('bash %s root %s %s %s %s' % (script_path, args.root_password, args.host, args.port, args.zstack_password)) cmd(False) if cmd.return_code != 0: if ('ERROR 1044' in cmd.stdout or 'ERROR 1044' in cmd.stderr) or ('Access denied' in cmd.stdout or 'Access denied' in cmd.stderr): raise CtlError( "failed to deploy database, access denied; if your root password is correct and you use IP rather than localhost," "it's probably caused by the privileges are not granted to root user for remote access; please see instructions in 'zstack-ctl -h'." "error details: %s, %s\n" % (cmd.stdout, cmd.stderr) ) else: cmd.raise_error() if not args.no_update: if args.zstack_password == "''": args.zstack_password = '' properties = [ ("DB.user", "zstack"), ("DB.password", args.zstack_password), ("DB.url", 'jdbc:mysql://%s:%s' % (args.host, args.port)), ] ctl.write_properties(properties) info('Successfully deployed ZStack database and updated corresponding DB information in %s' % property_file_path) class TailLogCmd(Command): def __init__(self): super(TailLogCmd, self).__init__() self.name = 'taillog' self.description = "shortcut to print management node log to stdout" ctl.register_command(self) def run(self, args): log_path = os.path.join(ctl.zstack_home, "../../logs/management-server.log") log_path = os.path.normpath(log_path) if not os.path.isfile(log_path): raise CtlError('cannot find %s' % log_path) script = ShellCmd('tail -f %s' % log_path, pipe=False) script() class ConfigureCmd(Command): def __init__(self): super(ConfigureCmd, self).__init__() self.name = 'configure' self.description = "configure zstack.properties" ctl.register_command(self) def install_argparse_arguments(self, parser): parser.add_argument('--host', help='SSH URL, for example, [email protected], to set properties in zstack.properties on the remote machine') parser.add_argument('--duplicate-to-remote', help='SSH URL, for example, [email protected], to copy zstack.properties on this machine to the remote machine') parser.add_argument('--use-file', help='path to a file that will be used to as zstack.properties') def _configure_remote_node(self, args): shell_no_pipe('ssh %s "/usr/bin/zstack-ctl configure %s"' % (args.host, ' '.join(ctl.extra_arguments))) def _duplicate_remote_node(self, args): tmp_file_name = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(8)) tmp_file_name = os.path.join('/tmp/', tmp_file_name) with open(ctl.properties_file_path, 'r') as fd: txt = fd.read() cmd = '''ssh -T %s << EOF cat <<EOT > %s %s EOT if [ $? != 0 ]; then print "cannot create temporary properties file" exit 1 fi /usr/bin/zstack-ctl configure --use-file %s ret=$? rm -f %s exit $ret EOF ''' shell_no_pipe(cmd % (args.duplicate_to_remote, tmp_file_name, txt, tmp_file_name, tmp_file_name)) info("successfully copied %s to remote machine %s" % (ctl.properties_file_path, args.duplicate_to_remote)) def _use_file(self, args): path = os.path.expanduser(args.use_file) if not os.path.isfile(path): raise CtlError('cannot find file %s' % path) shell('cp -f %s %s' % (path, ctl.properties_file_path)) def run(self, args): if args.use_file: self._use_file(args) return if args.duplicate_to_remote: self._duplicate_remote_node(args) return if not ctl.extra_arguments: raise CtlError('please input properties that are in format of "key=value" split by space') if args.host: self._configure_remote_node(args) return properties = [l.split('=', 1) for l in ctl.extra_arguments] ctl.write_properties(properties) def get_management_node_pid(): DEFAULT_PID_FILE_PATH = os.path.join(os.path.expanduser('~zstack'), "management-server.pid") pid = find_process_by_cmdline('appName=zstack') if pid: return pid pid_file_path = ctl.read_property('pidFilePath') if not pid_file_path: pid_file_path = DEFAULT_PID_FILE_PATH if not os.path.exists(pid_file_path): return None def is_zstack_process(pid): cmdline = os.path.join('/proc/%s/cmdline' % pid) with open(cmdline, 'r') as fd: content = fd.read() return 'appName=zstack' in content with open(pid_file_path, 'r') as fd: pid = fd.read() try: pid = int(pid) proc_pid = '/proc/%s' % pid if os.path.exists(proc_pid): if is_zstack_process(pid): return pid else: return None except Exception: return None return None class StopAllCmd(Command): def __init__(self): super(StopAllCmd, self).__init__() self.name = 'stop' self.description = 'stop all ZStack related services including zstack management node, web UI' \ ' if those services are installed' ctl.register_command(self) def run(self, args): def stop_mgmt_node(): info(colored('Stopping ZStack management node, it may take a few minutes...', 'blue')) ctl.internal_run('stop_node') def stop_ui(): virtualenv = '/var/lib/zstack/virtualenv/zstack-dashboard' if not os.path.exists(virtualenv): info('skip stopping web UI, it is not installed') return info(colored('Stopping ZStack web UI, it may take a few minutes...', 'blue')) ctl.internal_run('stop_ui') stop_ui() stop_mgmt_node() class StartAllCmd(Command): def __init__(self): super(StartAllCmd, self).__init__() self.name = 'start' self.description = 'start all ZStack related services including zstack management node, web UI' \ ' if those services are installed' ctl.register_command(self) def install_argparse_arguments(self, parser): parser.add_argument('--daemon', help='Start ZStack in daemon mode. Only used with systemd.', action='store_true', default=True) def run(self, args): def start_mgmt_node(): info(colored('Starting ZStack management node, it may take a few minutes...', 'blue')) if args.daemon: ctl.internal_run('start_node', '--daemon') else: ctl.internal_run('start_node') def start_ui(): virtualenv = '/var/lib/zstack/virtualenv/zstack-dashboard' if not os.path.exists(virtualenv): info('skip starting web UI, it is not installed') return info(colored('Starting ZStack web UI, it may take a few minutes...', 'blue')) ctl.internal_run('start_ui') start_mgmt_node() start_ui() class StartCmd(Command): START_SCRIPT = '../../bin/startup.sh' SET_ENV_SCRIPT = '../../bin/setenv.sh' MINIMAL_CPU_NUMBER = 4 #MINIMAL_MEM_SIZE unit is KB, here is 6GB, in linxu, 6GB is 5946428 KB #Save some memory for kdump etc. The actual limitation is 5000000KB MINIMAL_MEM_SIZE = 5000000 def __init__(self): super(StartCmd, self).__init__() self.name = 'start_node' self.description = 'start the ZStack management node on this machine' ctl.register_command(self) def install_argparse_arguments(self, parser): parser.add_argument('--host', help='SSH URL, for example, [email protected], to start the management node on a remote machine') parser.add_argument('--timeout', help='Wait for ZStack Server startup timeout, default is 300 seconds.', default=300) parser.add_argument('--daemon', help='Start ZStack in daemon mode. Only used with systemd.', action='store_true', default=False) def _start_remote(self, args): info('it may take a while because zstack-ctl will wait for management node ready to serve API') shell_no_pipe('ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no %s "/usr/bin/zstack-ctl start_node --timeout=%s"' % (args.host, args.timeout)) def check_cpu_mem(self): if multiprocessing.cpu_count() < StartCmd.MINIMAL_CPU_NUMBER: error("CPU number should not less than %d" % StartCmd.MINIMAL_CPU_NUMBER) status, output = commands.getstatusoutput("cat /proc/meminfo | grep MemTotal | awk -F \":\" '{print $2}' | awk -F \" \" '{print $1}'") if status == 0: if int(output) < StartCmd.MINIMAL_MEM_SIZE: error("Memory size should not less than %d KB" % StartCmd.MINIMAL_MEM_SIZE) else: warn("Can't get system memory size from /proc/meminfo") def check_hostname(self): hn = shell('hostname').strip() if '.' in hn: error("The hostname cannot contain '.', current hostname is '%s'.\n" "Please use the following commands to modify hostname and reset rabbitmq:\n" " # hostnamectl set-hostname $NEW_HOSTNAME\n" " # zstack-ctl reset_rabbitmq" % hn) def run(self, args): self.check_cpu_mem() self.check_hostname() if args.host: self._start_remote(args) return # clean the error log before booting boot_error_log = os.path.join(ctl.USER_ZSTACK_HOME_DIR, 'bootError.log') shell('rm -f %s' % boot_error_log) pid = get_management_node_pid() if pid: info('the management node[pid:%s] is already running' % pid) return else: shell('rm -f %s' % os.path.join(os.path.expanduser('~zstack'), "management-server.pid")) def check_java_version(): ver = shell('java -version 2>&1 | grep -w version') if '1.8' not in ver: raise CtlError('ZStack requires Java8, your current version is %s\n' 'please run "update-alternatives --config java" to set Java to Java8') def check_8080(): if shell_return('netstat -nap | grep :8080[[:space:]] | grep LISTEN > /dev/null') == 0: raise CtlError('8080 is occupied by some process. Please use netstat to find out and stop it') def check_9090(): if shell_return('netstat -nap | grep :9090[[:space:]] | grep LISTEN | grep -v prometheus > /dev/null') == 0: raise CtlError('9090 is occupied by some process. Please use netstat to find out and stop it') def check_msyql(): db_hostname, db_port, db_user, db_password = ctl.get_live_mysql_portal() if not check_ip_port(db_hostname, db_port): raise CtlError('unable to connect to %s:%s, please check if the MySQL is running and the firewall rules' % (db_hostname, db_port)) with on_error('unable to connect to MySQL'): shell('mysql --host=%s --user=%s --password=%s --port=%s -e "select 1"' % (db_hostname, db_user, db_password, db_port)) def open_iptables_port(protocol, port_list): distro = platform.dist()[0] if type(port_list) is not list: error("port list should be list") for port in port_list: if distro == 'centos': shell('iptables-save | grep -- "-A INPUT -p %s -m %s --dport %s -j ACCEPT" > /dev/null || ' '(iptables -I INPUT -p %s -m %s --dport %s -j ACCEPT && service iptables save)' % (protocol, protocol, port, protocol, protocol, port)) elif distro == 'Ubuntu': shell('iptables-save | grep -- "-A INPUT -p %s -m %s --dport %s -j ACCEPT" > /dev/null || ' '(iptables -I INPUT -p %s -m %s --dport %s -j ACCEPT && /etc/init.d/iptables-persistent save)' % (protocol, protocol, port, protocol, protocol, port)) else: shell('iptables-save | grep -- "-A INPUT -p %s -m %s --dport %s -j ACCEPT" > /dev/null || ' 'iptables -I INPUT -p %s -m %s --dport %s -j ACCEPT ' % (protocol, protocol, port, protocol, protocol, port)) def check_rabbitmq(): RABBIT_PORT = 5672 def check_username_password_if_need(ip, username, password): if not username or not password: return cmd = ShellCmd('curl -u %s:%s http://%s:15672/api/whoami' % (username, password, ip)) cmd(False) if cmd.return_code == 7: warn('unable to connect to the rabbitmq management plugin at %s:15672. The possible reasons are:\n' ' 1) the plugin is not installed, you can install it by "rabbitmq-plugins enable rabbitmq_management,"\n' ' then restart the rabbitmq by "service rabbitmq-server restart"\n' ' 2) the port 15672 is blocked by the firewall\n' 'without the plugin, we cannot check the validity of the rabbitmq username/password configured in zstack.properties' % ip) elif cmd.return_code != 0: cmd.raise_error() else: if 'error' in cmd.stdout: raise CtlError('unable to connect to the rabbitmq server[ip:%s] with username/password configured in zstack.properties.\n' 'If you have reset the rabbimtq server, get the username/password from zstack.properties and do followings on the rabbitmq server:\n' '1) rabbitmqctl add_user $username $password\n' '2) rabbitmqctl set_user_tags $username administrator\n' '3) rabbitmqctl set_permissions -p / $username ".*" ".*" ".*"\n' % ip) with on_error('unable to get RabbitMQ server IPs from %s, please check CloudBus.serverIp.0'): ips = ctl.read_property_list('CloudBus.serverIp.') if not ips: raise CtlError('no RabbitMQ IPs defined in %s, please specify it use CloudBus.serverIp.0=the_ip' % ctl.properties_file_path) rabbit_username = ctl.read_property('CloudBus.rabbitmqUsername') rabbit_password = ctl.read_property('CloudBus.rabbitmqPassword') if rabbit_password and not rabbit_username: raise CtlError('CloudBus.rabbitmqPassword is set but CloudBus.rabbitmqUsername is missing in zstack.properties') elif not rabbit_password and rabbit_username: raise CtlError('CloudBus.rabbitmqUsername is set but CloudBus.rabbitmqPassword is missing in zstack.properties') success = False workable_ip = None for key, ip in ips: if ":" in ip: ip, port = ip.split(':') else: port = RABBIT_PORT if check_ip_port(ip, port): workable_ip = ip success = True else: warn('cannot connect to the RabbitMQ server[ip:%s, port:%s]' % (ip, RABBIT_PORT)) if not success: raise CtlError('cannot connect to all RabbitMQ servers[ip:%s, port:%s] defined in %s, please reset rabbitmq by: "zstack-ctl reset_rabbitmq"' % (ips, RABBIT_PORT, ctl.properties_file_path)) else: check_username_password_if_need(workable_ip, rabbit_username, rabbit_password) def prepare_setenv(): setenv_path = os.path.join(ctl.zstack_home, self.SET_ENV_SCRIPT) catalina_opts = [ '-Djava.net.preferIPv4Stack=true', '-Dcom.sun.management.jmxremote=true', '-Djava.security.egd=file:/dev/./urandom', ] if ctl.extra_arguments: catalina_opts.extend(ctl.extra_arguments) upgrade_params = ctl.get_env('ZSTACK_UPGRADE_PARAMS') if upgrade_params: catalina_opts.extend(upgrade_params.split(' ')) co = ctl.get_env('CATALINA_OPTS') if co: info('use CATALINA_OPTS[%s] set in environment zstack environment variables; check out them by "zstack-ctl getenv"' % co) catalina_opts.extend(co.split(' ')) def has_opt(prefix): for opt in catalina_opts: if opt.startswith(prefix): return True return False if not has_opt('-Xms'): catalina_opts.append('-Xms512M') if not has_opt('-Xmx'): catalina_opts.append('-Xmx4096M') with open(setenv_path, 'w') as fd: fd.write('export CATALINA_OPTS=" %s"' % ' '.join(catalina_opts)) def start_mgmt_node(): shell('sudo -u zstack sh %s -DappName=zstack' % os.path.join(ctl.zstack_home, self.START_SCRIPT)) info("successfully started Tomcat container; now it's waiting for the management node ready for serving APIs, which may take a few seconds") def wait_mgmt_node_start(): log_path = os.path.join(ctl.zstack_home, "../../logs/management-server.log") timeout = int(args.timeout) @loop_until_timeout(timeout) def check(): if os.path.exists(boot_error_log): with open(boot_error_log, 'r') as fd: raise CtlError('the management server fails to boot; details can be found in the log[%s],' 'here is a brief of the error:\n%s' % (log_path, fd.read())) cmd = create_check_mgmt_node_command(1) cmd(False) return cmd.return_code == 0 if not check(): raise CtlError('no management-node-ready message received within %s seconds, please check error in log file %s' % (timeout, log_path)) user = getpass.getuser() if user != 'root': raise CtlError('please use sudo or root user') check_java_version() check_8080() check_9090() check_msyql() check_rabbitmq() prepare_setenv() open_iptables_port('udp',['123']) start_mgmt_node() #sleep a while, since zstack won't start up so quickly time.sleep(5) try: wait_mgmt_node_start() except CtlError as e: try: info("the management node failed to start, stop it now ...") ctl.internal_run('stop_node') except: pass raise e if not args.daemon: shell('which systemctl >/dev/null 2>&1; [ $? -eq 0 ] && systemctl start zstack', is_exception = False) info('successfully started management node') ctl.delete_env('ZSTACK_UPGRADE_PARAMS') class StopCmd(Command): STOP_SCRIPT = "../../bin/shutdown.sh" def __init__(self): super(StopCmd, self).__init__() self.name = 'stop_node' self.description = 'stop the ZStack management node on this machine' ctl.register_command(self) def install_argparse_arguments(self, parser): parser.add_argument('--host', help='SSH URL, for example, [email protected], to stop the management node on a remote machine') parser.add_argument('--force', '-f', help='force kill the java process, without waiting.', action="store_true", default=False) def _stop_remote(self, args): if args.force: shell_no_pipe('ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no %s "/usr/bin/zstack-ctl stop_node --force"' % args.host) else: shell_no_pipe('ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no %s "/usr/bin/zstack-ctl stop_node"' % args.host) def run(self, args): if args.host: self._stop_remote(args) return pid = get_management_node_pid() if not pid: info('the management node has been stopped') return timeout = 30 if not args.force: @loop_until_timeout(timeout) def wait_stop(): return get_management_node_pid() is None shell('bash %s' % os.path.join(ctl.zstack_home, self.STOP_SCRIPT)) if wait_stop(): info('successfully stopped management node') return pid = get_management_node_pid() if pid: if not args.force: info('unable to stop management node within %s seconds, kill it' % timeout) with on_error('unable to kill -9 %s' % pid): shell('kill -9 %s' % pid) class RestartNodeCmd(Command): def __init__(self): super(RestartNodeCmd, self).__init__() self.name = 'restart_node' self.description = 'restart the management node' ctl.register_command(self) def run(self, args): ctl.internal_run('stop_node') ctl.internal_run('start_node') class SaveConfigCmd(Command): DEFAULT_PATH = '~/.zstack/' def __init__(self): super(SaveConfigCmd, self).__init__() self.name = 'save_config' self.description = 'save ZStack configuration from ZSTACK_HOME to specified folder' ctl.register_command(self) def install_argparse_arguments(self, parser): parser.add_argument('--save-to', help='the folder where ZStack configurations should be saved') def run(self, args): path = args.save_to if not path: path = self.DEFAULT_PATH path = os.path.expanduser(path) if not os.path.exists(path): os.makedirs(path) properties_file_path = os.path.join(path, 'zstack.properties') shell('yes | cp %s %s' % (ctl.properties_file_path, properties_file_path)) ssh_private_key_path = os.path.join(path, 'id_rsa') ssh_public_key_path = os.path.join(path, 'id_rsa.pub') shell('yes | cp %s %s' % (ctl.ssh_private_key, ssh_private_key_path)) shell('yes | cp %s %s' % (ctl.ssh_public_key, ssh_public_key_path)) info('successfully saved %s to %s' % (ctl.properties_file_path, properties_file_path)) class RestoreConfigCmd(Command): DEFAULT_PATH = '~/.zstack/' def __init__(self): super(RestoreConfigCmd, self).__init__() self.name = "restore_config" self.description = 'restore ZStack configuration from specified folder to ZSTACK_HOME' ctl.register_command(self) def install_argparse_arguments(self, parser): parser.add_argument('--restore-from', help='the folder where ZStack configurations should be found') def run(self, args): path = args.restore_from if not path: path = self.DEFAULT_PATH path = os.path.expanduser(path) if os.path.isdir(path): properties_file_path = os.path.join(path, 'zstack.properties') elif os.path.isfile(path): properties_file_path = path else: raise CtlError('cannot find zstack.properties at %s' % path) shell('yes | cp %s %s' % (properties_file_path, ctl.properties_file_path)) ssh_private_key_path = os.path.join(path, 'id_rsa') ssh_public_key_path = os.path.join(path, 'id_rsa.pub') shell('yes | cp %s %s' % (ssh_private_key_path, ctl.ssh_private_key)) shell('yes | cp %s %s' % (ssh_public_key_path, ctl.ssh_public_key)) info('successfully restored zstack.properties and ssh identity keys from %s to %s' % (properties_file_path, ctl.properties_file_path)) class InstallDbCmd(Command): def __init__(self): super(InstallDbCmd, self).__init__() self.name = "install_db" self.description = ( "install MySQL database on a target machine which can be a remote machine or the local machine." "\nNOTE: you may need to set --login-password to password of previous MySQL root user, if the machine used to have MySQL installed and removed." "\nNOTE: if you hasn't setup public key for ROOT user on the remote machine, this command will prompt you for password of SSH ROOT user for the remote machine." ) ctl.register_command(self) def install_argparse_arguments(self, parser): parser.add_argument('--host', help='host IP, for example, 192.168.0.212, please specify the real IP rather than "localhost" or "127.0.0.1" when installing on local machine; otherwise management nodes on other machines cannot access the DB.', required=True) parser.add_argument('--root-password', help="new password of MySQL root user; an empty password is used if both this option and --login-password option are omitted") parser.add_argument('--login-password', help="login password of MySQL root user; an empty password is used if this option is omitted." "\n[NOTE] this option is needed only when the machine has MySQL previously installed and removed; the old MySQL root password will be left in the system," "you need to input it in order to reset root password for the new installed MySQL.", default=None) parser.add_argument('--debug', help="open Ansible debug option", action="store_true", default=False) parser.add_argument('--yum', help="Use ZStack predefined yum repositories. The valid options include: alibase,aliepel,163base,ustcepel,zstack-local. NOTE: only use it when you know exactly what it does.", default=None) parser.add_argument('--no-backup', help='do NOT backup the database. If the database is very large and you have manually backup it, using this option will fast the upgrade process. [DEFAULT] false', default=False) parser.add_argument('--ssh-key', help="the path of private key for SSH login $host; if provided, Ansible will use the specified key as private key to SSH login the $host", default=None) def run(self, args): if not args.yum: args.yum = get_yum_repo_from_property() script = ShellCmd("ip addr |grep 'inet '|grep -v '127.0.0.1'|awk '{print $2}'|awk -F '/' '{print $1}'") script(True) current_host_ips = script.stdout.split('\n') yaml = '''--- - hosts: $host remote_user: root vars: root_password: $root_password login_password: $login_password yum_repo: "$yum_repo" tasks: - name: pre-install script script: $pre_install_script - name: install MySQL for RedHat 6 through user defined repos when: ansible_os_family == 'RedHat' and ansible_distribution_version < '7' and yum_repo != 'false' shell: yum clean metadata; yum --disablerepo=* --enablerepo={{yum_repo}} --nogpgcheck install -y mysql mysql-server register: install_result - name: install MySQL for RedHat 6 through system defined repos when: ansible_os_family == 'RedHat' and ansible_distribution_version < '7' and yum_repo == 'false' shell: "yum clean metadata; yum --nogpgcheck install -y mysql mysql-server " register: install_result - name: install MySQL for RedHat 7 from local when: ansible_os_family == 'RedHat' and ansible_distribution_version >= '7' and yum_repo != 'false' shell: yum clean metadata; yum --disablerepo=* --enablerepo={{yum_repo}} --nogpgcheck install -y mariadb mariadb-server iptables-services register: install_result - name: install MySQL for RedHat 7 from local when: ansible_os_family == 'RedHat' and ansible_distribution_version >= '7' and yum_repo == 'false' shell: yum clean metadata; yum --nogpgcheck install -y mariadb mariadb-server iptables-services register: install_result - name: install MySQL for Ubuntu when: ansible_os_family == 'Debian' apt: pkg={{item}} update_cache=yes with_items: - mariadb-client - mariadb-server - iptables-persistent register: install_result - name: open 3306 port when: ansible_os_family == 'RedHat' shell: iptables-save | grep -- "-A INPUT -p tcp -m tcp --dport 3306 -j ACCEPT" > /dev/null || (iptables -I INPUT -p tcp -m tcp --dport 3306 -j ACCEPT && service iptables save) - name: open 3306 port when: ansible_os_family != 'RedHat' shell: iptables-save | grep -- "-A INPUT -p tcp -m tcp --dport 3306 -j ACCEPT" > /dev/null || (iptables -I INPUT -p tcp -m tcp --dport 3306 -j ACCEPT && /etc/init.d/iptables-persistent save) - name: run post-install script script: $post_install_script - name: enable MySQL daemon on RedHat 6 when: ansible_os_family == 'RedHat' and ansible_distribution_version < '7' service: name=mysqld state=restarted enabled=yes - name: enable MySQL daemon on RedHat 7 when: ansible_os_family == 'RedHat' and ansible_distribution_version >= '7' service: name=mariadb state=restarted enabled=yes - name: enable MySQL on Ubuntu when: ansible_os_family == 'Debian' service: name=mysql state=restarted enabled=yes - name: change root password shell: $change_password_cmd register: change_root_result ignore_errors: yes - name: grant remote access when: change_root_result.rc == 0 shell: $grant_access_cmd - name: rollback MySQL installation on RedHat 6 when: ansible_os_family == 'RedHat' and ansible_distribution_version < '7' and change_root_result.rc != 0 and install_result.changed == True shell: rpm -ev mysql mysql-server - name: rollback MySQL installation on RedHat 7 when: ansible_os_family == 'RedHat' and ansible_distribution_version >= '7' and change_root_result.rc != 0 and install_result.changed == True shell: rpm -ev mariadb mariadb-server - name: rollback MySql installation on Ubuntu when: ansible_os_family == 'Debian' and change_root_result.rc != 0 and install_result.changed == True apt: pkg={{item}} state=absent update_cache=yes with_items: - mysql-client - mysql-server - name: failure fail: > msg="failed to change root password of MySQL, see prior error in task 'change root password'; the possible cause is the machine used to have MySQL installed and removed, the previous password of root user is remaining on the machine; try using --login-password. We have rolled back the MySQL installation so you can safely run install_db again with --login-password set." when: change_root_result.rc != 0 and install_result.changed == False ''' if not args.root_password and not args.login_password: args.root_password = '''"''"''' more_cmd = ' ' for ip in current_host_ips: if not ip: continue more_cmd += "GRANT ALL PRIVILEGES ON *.* TO 'root'@'%s' IDENTIFIED BY '' WITH GRANT OPTION;" % ip grant_access_cmd = '''/usr/bin/mysql -u root -e "GRANT ALL PRIVILEGES ON *.* TO 'root'@'localhost' IDENTIFIED BY '' WITH GRANT OPTION; GRANT ALL PRIVILEGES ON *.* TO 'root'@'%s' IDENTIFIED BY '' WITH GRANT OPTION; %s FLUSH PRIVILEGES;"''' % (args.host, more_cmd) else: if not args.root_password: args.root_password = args.login_password more_cmd = ' ' for ip in current_host_ips: if not ip: continue more_cmd += "GRANT ALL PRIVILEGES ON *.* TO 'root'@'%s' IDENTIFIED BY '%s' WITH GRANT OPTION;" % (ip, args.root_password) grant_access_cmd = '''/usr/bin/mysql -u root -p%s -e "GRANT ALL PRIVILEGES ON *.* TO 'root'@'localhost' IDENTIFIED BY '%s' WITH GRANT OPTION; GRANT ALL PRIVILEGES ON *.* TO 'root'@'%s' IDENTIFIED BY '%s' WITH GRANT OPTION; %s FLUSH PRIVILEGES;"''' % (args.root_password, args.root_password, args.host, args.root_password, more_cmd) if args.login_password is not None: change_root_password_cmd = '/usr/bin/mysqladmin -u root -p{{login_password}} password {{root_password}}' else: change_root_password_cmd = '/usr/bin/mysqladmin -u root password {{root_password}}' pre_install_script = ''' if [ -f /etc/redhat-release ] ; then grep ' 7' /etc/redhat-release if [ $? -eq 0 ]; then [ -d /etc/yum.repos.d/ ] && [ ! -f /etc/yum.repos.d/epel.repo ] && echo -e "[epel]\nname=Extra Packages for Enterprise Linux \$releasever - \$basearce - mirrors.aliyun.com\nmirrorlist=https://mirrors.fedoraproject.org/metalink?repo=epel-7&arch=\$basearch\nfailovermethod=priority\nenabled=1\ngpgcheck=0\n" > /etc/yum.repos.d/epel.repo else [ -d /etc/yum.repos.d/ ] && [ ! -f /etc/yum.repos.d/epel.repo ] && echo -e "[epel]\nname=Extra Packages for Enterprise Linux \$releasever - \$basearce - mirrors.aliyun.com\nmirrorlist=https://mirrors.fedoraproject.org/metalink?repo=epel-6&arch=\$basearch\nfailovermethod=priority\nenabled=1\ngpgcheck=0\n" > /etc/yum.repos.d/epel.repo fi [ -d /etc/yum.repos.d/ ] && echo -e "#aliyun base\n[alibase]\nname=CentOS-\$releasever - Base - mirrors.aliyun.com\nfailovermethod=priority\nbaseurl=http://mirrors.aliyun.com/centos/\$releasever/os/\$basearch/\ngpgcheck=0\nenabled=0\n \n#released updates \n[aliupdates]\nname=CentOS-\$releasever - Updates - mirrors.aliyun.com\nfailovermethod=priority\nbaseurl=http://mirrors.aliyun.com/centos/\$releasever/updates/\$basearch/\nenabled=0\ngpgcheck=0\n \n[aliextras]\nname=CentOS-\$releasever - Extras - mirrors.aliyun.com\nfailovermethod=priority\nbaseurl=http://mirrors.aliyun.com/centos/\$releasever/extras/\$basearch/\nenabled=0\ngpgcheck=0\n \n[aliepel]\nname=Extra Packages for Enterprise Linux \$releasever - \$basearce - mirrors.aliyun.com\nbaseurl=http://mirrors.aliyun.com/epel/\$releasever/\$basearch\nfailovermethod=priority\nenabled=0\ngpgcheck=0\n" > /etc/yum.repos.d/zstack-aliyun-yum.repo [ -d /etc/yum.repos.d/ ] && echo -e "#163 base\n[163base]\nname=CentOS-\$releasever - Base - mirrors.163.com\nfailovermethod=priority\nbaseurl=http://mirrors.163.com/centos/\$releasever/os/\$basearch/\ngpgcheck=0\nenabled=0\n \n#released updates \n[163updates]\nname=CentOS-\$releasever - Updates - mirrors.163.com\nfailovermethod=priority\nbaseurl=http://mirrors.163.com/centos/\$releasever/updates/\$basearch/\nenabled=0\ngpgcheck=0\n \n#additional packages that may be useful\n[163extras]\nname=CentOS-\$releasever - Extras - mirrors.163.com\nfailovermethod=priority\nbaseurl=http://mirrors.163.com/centos/\$releasever/extras/\$basearch/\nenabled=0\ngpgcheck=0\n \n[ustcepel]\nname=Extra Packages for Enterprise Linux \$releasever - \$basearch - ustc \nbaseurl=http://centos.ustc.edu.cn/epel/\$releasever/\$basearch\nfailovermethod=priority\nenabled=0\ngpgcheck=0\n" > /etc/yum.repos.d/zstack-163-yum.repo fi ################### #Check DNS hijacking ################### hostname=`hostname` pintret=`ping -c 1 -W 2 $hostname 2>/dev/null | head -n1` echo $pintret | grep 'PING' > /dev/null [ $? -ne 0 ] && exit 0 ip=`echo $pintret | cut -d' ' -f 3 | cut -d'(' -f 2 | cut -d')' -f 1` ip_1=`echo $ip | cut -d'.' -f 1` [ "127" = "$ip_1" ] && exit 0 ip addr | grep $ip > /dev/null [ $? -eq 0 ] && exit 0 echo "The hostname($hostname) of your machine is resolved to IP($ip) which is none of IPs of your machine. It's likely your DNS server has been hijacking, please try fixing it or add \"ip_of_your_host $hostname\" to /etc/hosts. DNS hijacking will cause MySQL and RabbitMQ not working." exit 1 ''' fd, pre_install_script_path = tempfile.mkstemp() os.fdopen(fd, 'w').write(pre_install_script) def cleanup_pre_install_script(): os.remove(pre_install_script_path) self.install_cleanup_routine(cleanup_pre_install_script) post_install_script = mysql_db_config_script fd, post_install_script_path = tempfile.mkstemp() os.fdopen(fd, 'w').write(post_install_script) def cleanup_post_install_script(): os.remove(post_install_script_path) self.install_cleanup_routine(cleanup_post_install_script) t = string.Template(yaml) if args.yum: yum_repo = args.yum else: yum_repo = 'false' yaml = t.substitute({ 'host': args.host, 'change_password_cmd': change_root_password_cmd, 'root_password': args.root_password, 'login_password': args.login_password, 'grant_access_cmd': grant_access_cmd, 'pre_install_script': pre_install_script_path, 'yum_folder': ctl.zstack_home, 'yum_repo': yum_repo, 'post_install_script': post_install_script_path }) ansible(yaml, args.host, args.debug, args.ssh_key) class UpgradeHACmd(Command): '''This feature only support zstack offline image currently''' host_post_info_list = [] current_dir = os.path.dirname(os.path.realpath(__file__)) conf_dir = "/var/lib/zstack/ha/" private_key_name = conf_dir + "ha_key" conf_file = conf_dir + "ha.yaml" logger_dir = "/var/log/zstack/" logger_file = "ha.log" community_iso = "/opt/ZStack-Community-x86_64-DVD-1.4.0.iso" bridge = "" SpinnerInfo.spinner_status = {'upgrade_repo':False,'stop_mevoco':False, 'upgrade_mevoco':False,'upgrade_db':False, 'backup_db':False, 'check_init':False, 'start_mevoco':False} ha_config_content = None def __init__(self): super(UpgradeHACmd, self).__init__() self.name = "upgrade_ha" self.description = "upgrade high availability environment for ZStack-Enterprise." ctl.register_command(self) def install_argparse_arguments(self, parser): parser.add_argument('--zstack-enterprise-installer','--enterprise', help="The new zstack-enterprise installer package, get it from http://cdn.zstack.io/product_downloads/zstack-enterprise/", required=True) parser.add_argument('--iso', help="get it from http://cdn.zstack.io/product_downloads/iso/", required=True) def upgrade_repo(self, iso, tmp_iso, host_post_info): command = ( "yum clean --enablerepo=zstack-local metadata && pkg_list=`rsync | grep \"not installed\" | awk" " '{ print $2 }'` && for pkg in $pkg_list; do yum --disablerepo=* --enablerepo=zstack-local install " "-y $pkg; done;") run_remote_command(command, host_post_info) command = "mkdir -p %s" % tmp_iso run_remote_command(command, host_post_info) command = "mount -o loop %s %s" % (iso, tmp_iso) run_remote_command(command, host_post_info) command = "rsync -au --delete %s /opt/zstack-dvd/" % tmp_iso run_remote_command(command, host_post_info) command = "umount %s" % tmp_iso run_remote_command(command, host_post_info) command = "rm -rf %s" % tmp_iso run_remote_command(command, host_post_info) def check_file_exist(self, file, host_post_info_list): if os.path.isabs(file) is False: error("Make sure you pass file name with absolute path") else: if os.path.isfile(file) is False: error("Didn't find file %s" % file) else: for host_post_info in host_post_info_list: if file_dir_exist("path=%s" % file, host_post_info) is False: copy_arg = CopyArg() copy_arg.src = file copy_arg.dest = file copy(copy_arg, host_post_info) # do not enable due to lot of customer version def check_file_md5sum(self): pass def check_mn_running(self,host_post_info): cmd = create_check_mgmt_node_command(timeout=4, mn_node=host_post_info.host) cmd(False) if cmd.return_code != 0: error("Check management node %s status failed, make sure the status is running before upgrade" % host_post_info.host) else: if 'false' in cmd.stdout: error('The management node %s is starting, please wait a few seconds to upgrade' % host_post_info.host) elif 'true' in cmd.stdout: return 0 else: error('The management node %s status is: Unknown, please start the management node before upgrade' % host_post_info.host) def upgrade_mevoco(self, mevoco_installer, host_post_info): mevoco_dir = os.path.dirname(mevoco_installer) mevoco_bin = os.path.basename(mevoco_installer) command = "rm -rf /tmp/zstack_upgrade.lock && cd %s && bash %s -u -i " % (mevoco_dir, mevoco_bin) logger.debug("[ HOST: %s ] INFO: starting run shell command: '%s' " % (host_post_info.host, command)) (status, output)= commands.getstatusoutput("ssh -o StrictHostKeyChecking=no -i %s root@%s '%s'" % (UpgradeHACmd.private_key_name, host_post_info.host, command)) if status != 0: error("Something wrong on host: %s\n %s" % (host_post_info.host, output)) logger.debug("[ HOST: %s ] SUCC: shell command: '%s' successfully" % (host_post_info.host, command)) def run(self, args): # create log create_log(UpgradeHACmd.logger_dir, UpgradeHACmd.logger_file) spinner_info = SpinnerInfo() spinner_info.output = "Checking system and init environment" spinner_info.name = 'check_init' SpinnerInfo.spinner_status['check_init'] = True ZstackSpinner(spinner_info) if os.path.isfile(UpgradeHACmd.conf_file) is not True: error("Didn't find HA config file %s, please contact support for upgrade" % UpgradeHACmd.conf_file) host_inventory = UpgradeHACmd.conf_dir + 'host' yum_repo = get_yum_repo_from_property() private_key_name = UpgradeHACmd.conf_dir+ "ha_key" if args.iso is None: community_iso = UpgradeHACmd.community_iso else: community_iso = args.iso mn_list = get_ha_mn_list(UpgradeHACmd.conf_file) host1_ip = mn_list[0] host2_ip = mn_list[1] if len(mn_list) > 2: host3_ip = mn_list[2] # init host1 parameter self.host1_post_info = HostPostInfo() self.host1_post_info.host = host1_ip self.host1_post_info.host_inventory = host_inventory self.host1_post_info.private_key = private_key_name self.host1_post_info.yum_repo = yum_repo self.host1_post_info.post_url = "" # init host2 parameter self.host2_post_info = HostPostInfo() self.host2_post_info.host = host2_ip self.host2_post_info.host_inventory = host_inventory self.host2_post_info.private_key = private_key_name self.host2_post_info.yum_repo = yum_repo self.host2_post_info.post_url = "" if len(mn_list) > 2: # init host3 parameter self.host3_post_info = HostPostInfo() self.host3_post_info.host = host3_ip self.host3_post_info.host_inventory = host_inventory self.host3_post_info.private_key = private_key_name self.host3_post_info.yum_repo = yum_repo self.host3_post_info.post_url = "" UpgradeHACmd.host_post_info_list = [self.host1_post_info, self.host2_post_info] if len(mn_list) > 2: UpgradeHACmd.host_post_info_list = [self.host1_post_info, self.host2_post_info, self.host3_post_info] for host in UpgradeHACmd.host_post_info_list: # to do check mn all running self.check_mn_running(host) for file in [args.mevoco_installer, community_iso]: self.check_file_exist(file, UpgradeHACmd.host_post_info_list) spinner_info = SpinnerInfo() spinner_info.output = "Starting to upgrade repo" spinner_info.name = "upgrade_repo" SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status,False) SpinnerInfo.spinner_status['upgrade_repo'] = True ZstackSpinner(spinner_info) rand_dir_name = uuid.uuid4() tmp_iso = "/tmp/%s/iso/" % rand_dir_name for host_post_info in UpgradeHACmd.host_post_info_list: self.upgrade_repo(community_iso, tmp_iso, host_post_info) spinner_info = SpinnerInfo() spinner_info.output = "Stopping mevoco" spinner_info.name = "stop_mevoco" SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status,False) SpinnerInfo.spinner_status['stop_mevoco'] = True ZstackSpinner(spinner_info) for host_post_info in UpgradeHACmd.host_post_info_list: stop_mevoco(host_post_info) # backup db before upgrade spinner_info = SpinnerInfo() spinner_info.output = "Starting to backup database" spinner_info.name = "backup_db" SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status,False) SpinnerInfo.spinner_status['backup_db'] = True ZstackSpinner(spinner_info) (status, output) = commands.getstatusoutput("zstack-ctl dump_mysql >> /dev/null 2>&1") spinner_info = SpinnerInfo() spinner_info.output = "Starting to upgrade mevoco" spinner_info.name = "upgrade_mevoco" SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status,False) SpinnerInfo.spinner_status['upgrade_mevoco'] = True ZstackSpinner(spinner_info) for host_post_info in UpgradeHACmd.host_post_info_list: self.upgrade_mevoco(args.mevoco_installer, host_post_info) spinner_info = SpinnerInfo() spinner_info.output = "Starting to upgrade database" spinner_info.name = "upgrade_db" SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status,False) SpinnerInfo.spinner_status['upgrade_db'] = True ZstackSpinner(spinner_info) (status, output) = commands.getstatusoutput("zstack-ctl upgrade_db") if status != 0: error("Upgrade mysql failed: %s" % output) else: logger.debug("SUCC: shell command: 'zstack-ctl upgrade_db' successfully" ) spinner_info = SpinnerInfo() spinner_info.output = "Starting mevoco" spinner_info.name = "start_mevoco" SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status,False) SpinnerInfo.spinner_status['start_mevoco'] = True ZstackSpinner(spinner_info) for host_post_info in UpgradeHACmd.host_post_info_list: start_remote_mn(host_post_info) SpinnerInfo.spinner_status['start_mevoco'] = False time.sleep(.2) info(colored("\nUpgrade HA successfully!","blue")) class AddManagementNodeCmd(Command): SpinnerInfo.spinner_status = {'check_init':False,'add_key':False,'deploy':False,'config':False,'start':False,'install_ui':False} install_pkgs = ['openssl'] logger_dir = '/var/log/zstack/' logger_file = 'zstack-ctl.log' def __init__(self): super(AddManagementNodeCmd, self).__init__() self.name = "add_multi_management" self.description = "add multi management node." ctl.register_command(self) def install_argparse_arguments(self, parser): parser.add_argument('--host-list','--hosts',nargs='+', help="All hosts connect info follow below format: 'root:passwd1@host1_ip root:passwd2@host2_ip ...' ", required=True) parser.add_argument('--force-reinstall','-f',action="store_true", default=False) parser.add_argument('--ssh-key', help="the path of private key for SSH login $host; if provided, Ansible will use the " "specified key as private key to SSH login the $host, default will use zstack private key", default=None) def add_public_key_to_host(self, key_path, host_info): command ='timeout 10 sshpass -p %s ssh-copy-id -o UserKnownHostsFile=/dev/null -o PubkeyAuthentication=no' \ ' -o StrictHostKeyChecking=no -i %s root@%s' % (host_info.remote_pass, key_path, host_info.host) (status, output) = commands.getstatusoutput(command) if status != 0: error("Copy public key '%s' to host: '%s' failed:\n %s" % (key_path, host_info.host, output)) def deploy_mn_on_host(self,args, host_info, key): if args.force_reinstall is True: command = 'zstack-ctl install_management_node --host=%s --ssh-key="%s" --force-reinstall' % (host_info.host, key) else: command = 'zstack-ctl install_management_node --host=%s --ssh-key="%s"' % (host_info.host, key) (status, output) = commands.getstatusoutput(command) if status != 0: error("deploy mn on host %s failed:\n %s" % (host_info.host, output)) def install_ui_on_host(self, key, host_info): command = 'zstack-ctl install_ui --host=%s --ssh-key=%s' % (host_info.host, key) (status, output) = commands.getstatusoutput(command) if status != 0: error("deploy ui on host %s failed:\n %s" % (host_info.host, output)) def config_mn_on_host(self, key, host_info): command = "scp -i %s %s root@%s:%s" % (key, ctl.properties_file_path, host_info.host, ctl.properties_file_path) (status, output) = commands.getstatusoutput(command) if status != 0: error("copy config to host %s failed:\n %s" % (host_info.host, output)) command = "ssh -q -i %s -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no root@%s zstack-ctl configure " \ "management.server.ip=%s && zstack-ctl save_config" % (key, host_info.host, host_info.host) (status, output) = commands.getstatusoutput(command) if status != 0: error("config management server %s failed:\n %s" % (host_info.host, output)) def start_mn_on_host(self, host_info, key): command = "ssh -q -i %s -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no root@%s zstack-ctl " \ "start_node " % (key, host_info.host) (status, output) = commands.getstatusoutput(command) command = "ln -s /opt/zstack-dvd/ /usr/local/zstack/apache-tomcat/webapps/zstack/static/zstack-dvd" run_remote_command(command, host_info, True, True) if status != 0: error("start node on host %s failed:\n %s" % (host_info.host, output)) command = "ssh -q -i %s -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no root@%s zstack-ctl " \ "start_ui" % (key, host_info.host) (status, output) = commands.getstatusoutput(command) if status != 0: error("start ui on host %s failed:\n %s" % (host_info.host, output)) def install_packages(self, pkg_list, host_info): distro = platform.dist()[0] if distro == "centos": for pkg in pkg_list: yum_install_package(pkg, host_info) elif distro == "Ubuntu": apt_install_packages(pkg_list, host_info) def run(self, args): create_log(AddManagementNodeCmd.logger_dir, AddManagementNodeCmd.logger_file) host_info_list = [] if args.ssh_key is None: args.ssh_key = ctl.zstack_home + "/WEB-INF/classes/ansible/rsaKeys/id_rsa.pub" private_key = args.ssh_key.split('.')[0] spinner_info = SpinnerInfo() spinner_info.output = "Checking system and init environment" spinner_info.name = 'check_init' SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status,False) SpinnerInfo.spinner_status['check_init'] = True ZstackSpinner(spinner_info) for host in args.host_list: inventory_file = ctl.zstack_home + "/../../../ansible/hosts" host_info = HostPostInfo() host_info.private_key = private_key host_info.host_inventory = inventory_file (host_info.remote_user, host_info.remote_pass, host_info.host, host_info.remote_port) = check_host_info_format(host) check_host_password(host_info.remote_pass, host_info.host) command = "cat %s | grep %s || echo %s >> %s" % (inventory_file, host_info.host, host_info.host, inventory_file) (status, output) = commands.getstatusoutput(command) if status != 0 : error(output) host_info_list.append(host_info) for host_info in host_info_list: spinner_info = SpinnerInfo() spinner_info.output = "Add public key to host %s" % host_info.host spinner_info.name = 'add_key' SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status,False) SpinnerInfo.spinner_status['add_key'] = True ZstackSpinner(spinner_info) self.add_public_key_to_host(args.ssh_key, host_info) spinner_info = SpinnerInfo() spinner_info.output = "Deploy management node to host %s" % host_info.host spinner_info.name = 'deploy' SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status,False) SpinnerInfo.spinner_status['deploy'] = True ZstackSpinner(spinner_info) self.deploy_mn_on_host(args, host_info, private_key) self.install_packages(AddManagementNodeCmd.install_pkgs, host_info) spinner_info = SpinnerInfo() spinner_info.output = "Config management node on host %s" % host_info.host spinner_info.name = 'config' SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status,False) SpinnerInfo.spinner_status['config'] = True ZstackSpinner(spinner_info) self.config_mn_on_host(private_key, host_info) spinner_info = SpinnerInfo() spinner_info.output = "Install UI on host %s" % host_info.host spinner_info.name = 'install_ui' SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status,False) SpinnerInfo.spinner_status['install_ui'] = True ZstackSpinner(spinner_info) self.install_ui_on_host(private_key, host_info) spinner_info = SpinnerInfo() spinner_info.output = "Start management node on host %s" % host_info.host spinner_info.name = 'start' SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status,False) SpinnerInfo.spinner_status['start'] = True ZstackSpinner(spinner_info) self.start_mn_on_host(host_info,private_key) SpinnerInfo.spinner_status['start'] = False time.sleep(0.2) info(colored("\nAll management nodes add successfully",'blue')) class RecoverHACmd(Command): '''This feature only support zstack offline image currently''' host_post_info_list = [] current_dir = os.path.dirname(os.path.realpath(__file__)) conf_dir = "/var/lib/zstack/ha/" conf_file = conf_dir + "ha.yaml" host_inventory = conf_dir + 'host' private_key = conf_dir + 'ha_key' logger_dir = "/var/log/zstack/" logger_file = "ha.log" bridge = "" SpinnerInfo.spinner_status = {'cluster':False, 'mysql':False,'mevoco':False, 'check_init':False, 'cluster':False} ha_config_content = None def __init__(self): super(RecoverHACmd, self).__init__() self.name = "recover_ha" self.description = "Recover high availability environment for Mevoco." ctl.register_command(self) def stop_mysql_service(self, host_post_info): command = "service mysql stop" run_remote_command(command, host_post_info) mysqld_status = run_remote_command("netstat -ltnp | grep :4567[[:space:]]", host_post_info, return_status=True) if mysqld_status is True: run_remote_command("lsof -i tcp:4567 | awk 'NR!=1 {print $2}' | xargs kill -9", host_post_info) def reboot_cluster_service(self, host_post_info): service_status("haproxy", "state=started", host_post_info) service_status("keepalived", "state=started", host_post_info) service_status("rabbitmq-server", "state=started", host_post_info) def recover_mysql(self, host_post_info, host_post_info_list): for host_info in host_post_info_list: self.stop_mysql_service(host_info) command = "service mysql bootstrap" status, output = run_remote_command(command,host_post_info,True,True) if status is False: return False for host_info in host_post_info_list: if host_info.host != host_post_info.host: command = "service mysql start" status, output = run_remote_command(command,host_info,True,True) if status is False: return False command = "service mysql restart" status, output = run_remote_command(command,host_post_info,True,True) return status def sync_prometheus(self, host_post_info): # sync prometheus data sync_arg = SyncArg() sync_arg.src = '/var/lib/zstack/prometheus/' sync_arg.dest = '/var/lib/zstack/prometheus/' sync(sync_arg, host_post_info) def run(self, args): create_log(UpgradeHACmd.logger_dir, UpgradeHACmd.logger_file) spinner_info = SpinnerInfo() spinner_info.output = "Checking system and init environment" spinner_info.name = 'check_init' SpinnerInfo.spinner_status['check_init'] = True ZstackSpinner(spinner_info) host3_exist = False if os.path.isfile(RecoverHACmd.conf_file) is not True: error("Didn't find HA config file %s, please use traditional 'zstack-ctl install_ha' to recover your cluster" % RecoverHACmd.conf_file) if os.path.exists(RecoverHACmd.conf_file): with open(RecoverHACmd.conf_file, 'r') as f: RecoverHACmd.ha_config_content = yaml.load(f) if RecoverHACmd.ha_config_content['host_list'] is None: error("Didn't find host_list in config file %s" % RecoverHACmd.conf_file) host_list = RecoverHACmd.ha_config_content['host_list'].split(',') if len(host_list) == 2: host1_ip = host_list[0] host2_ip = host_list[1] if len(host_list) == 3: host3_exist = True host3_ip = host_list[2] if os.path.exists(RecoverHACmd.conf_file) and RecoverHACmd.ha_config_content is not None : if "bridge_name" in RecoverHACmd.ha_config_content: RecoverHACmd.bridge = RecoverHACmd.ha_config_content['bridge_name'] else: error("Didn't find 'bridge_name' in config file %s" % RecoverHACmd.conf_file) local_ip = get_ip_by_interface(RecoverHACmd.bridge) host_post_info_list = [] # init host1 parameter host1_post_info = HostPostInfo() host1_post_info.host = host1_ip host1_post_info.host_inventory = RecoverHACmd.host_inventory host1_post_info.private_key = RecoverHACmd.private_key host_post_info_list.append(host1_post_info) host2_post_info = HostPostInfo() host2_post_info.host = host2_ip host2_post_info.host_inventory = RecoverHACmd.host_inventory host2_post_info.private_key = RecoverHACmd.private_key host_post_info_list.append(host2_post_info) if host3_exist is True: host3_post_info = HostPostInfo() host3_post_info.host = host3_ip host3_post_info.host_inventory = RecoverHACmd.host_inventory host3_post_info.private_key = RecoverHACmd.private_key host_post_info_list.append(host3_post_info) spinner_info = SpinnerInfo() spinner_info.output = "Starting to recovery mysql" spinner_info.name = "mysql" SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status, False) SpinnerInfo.spinner_status['mysql'] = True ZstackSpinner(spinner_info) mysql_recover_status = False for host_post_info in host_post_info_list: recover_status = self.recover_mysql(host_post_info, host_post_info_list) if recover_status is True: mysql_recover_status = True break if mysql_recover_status is False: error("Recover mysql failed! Please check log /var/log/zstack/ha.log") spinner_info = SpinnerInfo() spinner_info.output = "Starting to recovery cluster" spinner_info.name = "cluster" SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status, False) SpinnerInfo.spinner_status['cluster'] = True ZstackSpinner(spinner_info) for host_post_info in host_post_info_list: self.reboot_cluster_service(host_post_info) spinner_info = SpinnerInfo() spinner_info.output = "Starting to sync monitor data" spinner_info.name = "prometheus" SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status, False) SpinnerInfo.spinner_status['prometheus'] = True ZstackSpinner(spinner_info) for host_post_info in host_post_info_list: if host_post_info.host != local_ip: self.sync_prometheus(host_post_info) spinner_info = SpinnerInfo() spinner_info.output = "Starting Mevoco" spinner_info.name = "mevoco" SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status, False) SpinnerInfo.spinner_status['mevoco'] = True ZstackSpinner(spinner_info) for host_post_info in host_post_info_list: start_remote_mn(host_post_info) SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status, False) time.sleep(.3) info(colored("The cluster has been recovered successfully!", "blue")) class InstallHACmd(Command): '''This feature only support zstack offline image currently''' host_post_info_list = [] current_dir = os.path.dirname(os.path.realpath(__file__)) conf_dir = "/var/lib/zstack/ha/" conf_file = conf_dir + "ha.yaml" logger_dir = "/var/log/zstack/" logger_file = "ha.log" bridge = "" SpinnerInfo.spinner_status = {'mysql':False,'rabbitmq':False, 'haproxy_keepalived':False, 'Mevoco':False, 'stop_mevoco':False, 'check_init':False, 'recovery_cluster':False}<|fim▁hole|> def __init__(self): super(InstallHACmd, self).__init__() self.name = "install_ha" self.description = "install high availability environment for Mevoco." ctl.register_command(self) def install_argparse_arguments(self, parser): parser.add_argument('--host1-info','--h1', help="The first host connect info follow below format: 'root:password@ip_address' ", required=True) parser.add_argument('--host2-info','--h2', help="The second host connect info follow below format: 'root:password@ip_address' ", required=True) parser.add_argument('--host3-info','--h3', help="The third host connect info follow below format: 'root:password@ip_address' ", default=False) parser.add_argument('--vip', help="The virtual IP address for HA setup", default=None) parser.add_argument('--gateway', help="The gateway IP address for HA setup", default=None) parser.add_argument('--bridge', help="The bridge device name, default is br_eth0", ) parser.add_argument('--mysql-root-password','--root-pass', help="Password of MySQL root user", default="zstack123") parser.add_argument('--mysql-user-password','--user-pass', help="Password of MySQL user zstack", default="zstack123") parser.add_argument('--rabbit-password','--rabbit-pass', help="RabbitMQ password; if set, the password will be created on RabbitMQ for username " "specified by --rabbit-username. [DEFAULT] rabbitmq default password", default="zstack123") parser.add_argument('--drop', action='store_true', default=False, help="Force delete mysql data for re-deploy HA") parser.add_argument('--keep-db', action='store_true', default=False, help='keep existing zstack database and not raise error') parser.add_argument('--recovery-from-this-host','--recover', action='store_true', default=False, help="This argument for admin to recovery mysql from the last shutdown mysql server") parser.add_argument('--perfect-mode', action='store_true', default=False, help="This mode will re-connect mysql faster") def get_formatted_netmask(self, device_name): '''This function will return formatted netmask. eg. 172.20.12.16/24 will return 24''' netmask = socket.inet_ntoa(fcntl.ioctl(socket.socket(socket.AF_INET, socket.SOCK_DGRAM), 35099, struct.pack('256s', device_name))[20:24]) formatted_netmask = sum([bin(int(x)).count('1') for x in netmask.split('.')]) return formatted_netmask def run(self, args): spinner_info = SpinnerInfo() spinner_info.output = "Checking system and init environment" spinner_info.name = 'check_init' SpinnerInfo.spinner_status['check_init'] = True ZstackSpinner(spinner_info) if args.bridge is None: InstallHACmd.bridge = 'br_eth0' else: InstallHACmd.bridge = args.bridge if os.path.exists(InstallHACmd.conf_file): with open(InstallHACmd.conf_file, 'r') as f: InstallHACmd.ha_config_content = yaml.load(f) if args.vip is None and args.recovery_from_this_host is False: error("Install HA must assign a vip") # check gw ip is available if args.gateway is None: if get_default_gateway_ip() is None: error("Can't get the gateway IP address from system, please check your route table or pass specific " \ "gateway through \"--gateway\" argument") else: gateway_ip = get_default_gateway_ip() else: gateway_ip = args.gateway (status, output) = commands.getstatusoutput('ping -c 1 %s' % gateway_ip) if status != 0: error("The gateway %s unreachable!" % gateway_ip) # check input host info host1_info = args.host1_info host1_connect_info_list = check_host_info_format(host1_info) args.host1 = host1_connect_info_list[2] args.host1_password = host1_connect_info_list[1] host2_info = args.host2_info host2_connect_info_list = check_host_info_format(host2_info) args.host2 = host2_connect_info_list[2] args.host2_password = host2_connect_info_list[1] if args.host3_info is not False: host3_info = args.host3_info host3_connect_info_list = check_host_info_format(host3_info) args.host3 = host3_connect_info_list[2] args.host3_password = host3_connect_info_list[1] # check root password is available if args.host1_password != args.host2_password: error("Host1 password and Host2 password must be the same, Please change one of them!") elif args.host3_info is not False: if not args.host1_password == args.host2_password == args.host3_password: error("All hosts root password must be the same. Please check your host password!") check_host_password(args.host1_password, args.host1) check_host_password(args.host2_password, args.host2) if args.host3_info is not False: check_host_password(args.host3_password, args.host3) # check image type zstack_local_repo = os.path.isfile("/etc/yum.repos.d/zstack-local.repo") galera_repo = os.path.isfile("/etc/yum.repos.d/galera.repo") if zstack_local_repo is False or galera_repo is False: error("This feature only support ZStack community CentOS 7 image") # check network configuration interface_list = os.listdir('/sys/class/net/') if InstallHACmd.bridge not in interface_list and args.recovery_from_this_host is False: error("Make sure you have already run the 'zs-network-setting' to setup the network environment, or set the" " bridge name with --bridge, default bridge name is br_eth0 ") if InstallHACmd.bridge.split('br_')[1] not in interface_list: error("bridge %s should add the interface %s, make sure you have setup the interface or specify the right" " bridge name" % (InstallHACmd.bridge, InstallHACmd.bridge.split('br_')[1])) # check keepalived label should not longer than 15 characters if len(InstallHACmd.bridge) >= 13: error("bridge name length cannot be longer than 13 characters") # check user start this command on host1 if args.recovery_from_this_host is False: local_ip = get_ip_by_interface(InstallHACmd.bridge) if args.host1 != local_ip: error("Please run this command at host1 %s, or change your host1 ip to local host ip" % args.host1) # check user input wrong host2 ip if args.host2 == args.host1: error("The host1 and host2 should not be the same ip address!") elif args.host3_info is not False: if args.host2 == args.host3 or args.host1 == args.host3: error("The host1, host2 and host3 should not be the same ip address!") # create log create_log(InstallHACmd.logger_dir, InstallHACmd.logger_file) # create config if not os.path.exists(InstallHACmd.conf_dir): os.makedirs(InstallHACmd.conf_dir) yum_repo = get_yum_repo_from_property() private_key_name = InstallHACmd.conf_dir+ "ha_key" public_key_name = InstallHACmd.conf_dir+ "ha_key.pub" if os.path.isfile(public_key_name) is not True: command = "echo -e 'y\n'|ssh-keygen -q -t rsa -N \"\" -f %s" % private_key_name (status, output) = commands.getstatusoutput(command) if status != 0: error("Generate private key %s failed! Generate manually or rerun the process!" % private_key_name) with open(public_key_name) as public_key_file: public_key = public_key_file.read() # create inventory file with open('%s/host' % InstallHACmd.conf_dir,'w') as f: f.writelines([args.host1+'\n', args.host2+'\n']) if args.host3_info is not False: with open('%s/host' % InstallHACmd.conf_dir,'w') as f: f.writelines([args.host1+'\n', args.host2+'\n', args.host3+'\n']) #host_inventory = '%s,%s' % (args.host1, args.host2) host_inventory = InstallHACmd.conf_dir + 'host' # init host1 parameter self.host1_post_info = HostPostInfo() self.host1_post_info.host = args.host1 self.host1_post_info.host_inventory = host_inventory self.host1_post_info.private_key = private_key_name self.host1_post_info.yum_repo = yum_repo self.host1_post_info.vip = args.vip self.host1_post_info.gateway_ip = gateway_ip self.host1_post_info.rabbit_password = args.rabbit_password self.host1_post_info.mysql_password = args.mysql_root_password self.host1_post_info.mysql_userpassword = args.mysql_user_password self.host1_post_info.post_url = "" self.host_post_info_list.append(self.host1_post_info) # init host2 parameter self.host2_post_info = HostPostInfo() self.host2_post_info.host = args.host2 self.host2_post_info.host_inventory = host_inventory self.host2_post_info.private_key = private_key_name self.host2_post_info.yum_repo = yum_repo self.host2_post_info.vip = args.vip self.host2_post_info.gateway_ip = gateway_ip self.host2_post_info.rabbit_password = args.rabbit_password self.host2_post_info.mysql_password = args.mysql_root_password self.host2_post_info.mysql_userpassword = args.mysql_user_password self.host2_post_info.post_url = "" self.host_post_info_list.append(self.host2_post_info) if args.host3_info is not False: # init host3 parameter self.host3_post_info = HostPostInfo() self.host3_post_info.host = args.host3 self.host3_post_info.host_inventory = host_inventory self.host3_post_info.private_key = private_key_name self.host3_post_info.yum_repo = yum_repo self.host3_post_info.vip = args.vip self.host3_post_info.gateway_ip = gateway_ip self.host3_post_info.rabbit_password = args.rabbit_password self.host3_post_info.mysql_password = args.mysql_root_password self.host3_post_info.mysql_userpassword = args.mysql_user_password self.host3_post_info.post_url = "" self.host_post_info_list.append(self.host3_post_info) # init all variables in map local_map = { "mysql_connect_timeout" : 60000, "mysql_socket_timeout" : 60000 } if args.perfect_mode is True: local_map['mysql_connect_timeout'] = 2000 local_map['mysql_socket_timeout'] = 2000 add_public_key_command = 'if [ ! -d ~/.ssh ]; then mkdir -p ~/.ssh; chmod 700 ~/.ssh; fi && if [ ! -f ~/.ssh/authorized_keys ]; ' \ 'then touch ~/.ssh/authorized_keys; chmod 600 ~/.ssh/authorized_keys; fi && pub_key="%s";grep ' \ '"%s" ~/.ssh/authorized_keys > /dev/null; if [ $? -eq 1 ]; ' \ 'then echo "%s" >> ~/.ssh/authorized_keys; fi && exit 0;'\ % (public_key.strip('\n'), public_key.strip('\n'), public_key.strip('\n')) # add ha public key to host1 ssh_add_public_key_command = "sshpass -p %s ssh -q -o UserKnownHostsFile=/dev/null -o " \ "PubkeyAuthentication=no -o StrictHostKeyChecking=no root@%s '%s'" % \ (args.host1_password, args.host1, add_public_key_command) (status, output) = commands.getstatusoutput(ssh_add_public_key_command) if status != 0: error(output) # add ha public key to host2 ssh_add_public_key_command = "sshpass -p %s ssh -q -o UserKnownHostsFile=/dev/null -o " \ "PubkeyAuthentication=no -o StrictHostKeyChecking=no root@%s '%s' " % \ (args.host2_password, args.host2, add_public_key_command) (status, output) = commands.getstatusoutput(ssh_add_public_key_command) if status != 0: error(output) # add ha public key to host3 if args.host3_info is not False: ssh_add_public_key_command = "sshpass -p %s ssh -q -o UserKnownHostsFile=/dev/null -o " \ "PubkeyAuthentication=no -o StrictHostKeyChecking=no root@%s '%s' " % \ (args.host3_password, args.host3, add_public_key_command) (status, output) = commands.getstatusoutput(ssh_add_public_key_command) if status != 0: error(output) # sync ansible key in two host copy_arg = CopyArg() copy_arg.src = ctl.zstack_home + "/WEB-INF/classes/ansible/rsaKeys/" copy_arg.dest = ctl.zstack_home + "/WEB-INF/classes/ansible/rsaKeys/" copy(copy_arg,self.host2_post_info) command = "chmod 600 %s" % copy_arg.src + "id_rsa" run_remote_command(command, self.host2_post_info) if args.host3_info is not False: copy(copy_arg,self.host3_post_info) run_remote_command(command, self.host3_post_info) # check whether to recovery the HA cluster if args.recovery_from_this_host is True: if os.path.exists(InstallHACmd.conf_file) and InstallHACmd.ha_config_content is not None and args.bridge is None: if "bridge_name" in InstallHACmd.ha_config_content: InstallHACmd.bridge = InstallHACmd.ha_config_content['bridge_name'] local_ip = get_ip_by_interface(InstallHACmd.bridge) if local_ip != args.host1 and local_ip != args.host2: if args.host3_info is not False: if local_ip != args.host3: error("Make sure you are running the 'zs-network-setting' command on host1 or host2 or host3") else: error("Make sure you are running the 'zs-network-setting' command on host1 or host2") # stop mevoco spinner_info = SpinnerInfo() spinner_info.output = "Stop Mevoco on all management nodes" spinner_info.name = "stop_mevoco" SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status, False) SpinnerInfo.spinner_status['stop_mevoco'] = True ZstackSpinner(spinner_info) for host_info in self.host_post_info_list: stop_mevoco(host_info) spinner_info = SpinnerInfo() spinner_info.output = "Starting to recovery mysql from this host" spinner_info.name = "recovery_cluster" SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status,False) SpinnerInfo.spinner_status['recovery_cluster'] = True ZstackSpinner(spinner_info) # kill mysql process to make sure mysql bootstrap can work service_status("mysql", "state=stopped", self.host1_post_info) mysqld_status = run_remote_command("netstat -ltnp | grep :4567[[:space:]]", self.host1_post_info, return_status=True) if mysqld_status is True: run_remote_command("lsof -i tcp:4567 | awk 'NR!=1 {print $2}' | xargs kill -9", self.host1_post_info) service_status("mysql", "state=stopped", self.host2_post_info) mysqld_status = run_remote_command("netstat -ltnp | grep :4567[[:space:]] ", self.host2_post_info, return_status=True) if mysqld_status is True: run_remote_command("lsof -i tcp:4567 | awk 'NR!=1 {print $2}' | xargs kill -9", self.host2_post_info) if args.host3_info is not False: service_status("mysql", "state=stopped", self.host3_post_info) mysqld_status = run_remote_command("netstat -ltnp | grep :4567[[:space:]]", self.host3_post_info, return_status=True) if mysqld_status is True: run_remote_command("lsof -i tcp:4567 | awk 'NR!=1 {print $2}' | xargs kill -9", self.host3_post_info) command = "service mysql bootstrap" (status, output) = commands.getstatusoutput(command) if status != 0: error(output) else: #command = "service mysql start" if local_ip == self.host1_post_info.host: # make sure vip will be on this host, so start haproxy firstly service_status("haproxy","state=started", self.host1_post_info) service_status("keepalived","state=started", self.host1_post_info) service_status("rabbitmq-server","state=started", self.host1_post_info) #run_remote_command(command, self.host2_post_info) service_status("mysql","state=started", self.host2_post_info) service_status("haproxy","state=started", self.host2_post_info) service_status("keepalived","state=started", self.host2_post_info) service_status("rabbitmq-server","state=started", self.host2_post_info) if args.host3_info is not False: #run_remote_command(command, self.host3_post_info) service_status("mysql","state=started", self.host3_post_info) service_status("haproxy","state=started", self.host3_post_info) service_status("keepalived","state=started", self.host3_post_info) service_status("rabbitmq-server","state=started", self.host3_post_info) #command = "service mysql restart" #run_remote_command(command, self.host1_post_info) service_status("mysql","state=restarted", self.host1_post_info) elif local_ip == self.host2_post_info.host: service_status("haproxy","state=started", self.host2_post_info) service_status("keepalived","state=started", self.host2_post_info) service_status("rabbitmq-server","state=started", self.host2_post_info) #run_remote_command(command, self.host1_post_info) service_status("mysql","state=started", self.host1_post_info) service_status("haproxy","state=started", self.host1_post_info) service_status("keepalived","state=started", self.host1_post_info) service_status("rabbitmq-server","state=started", self.host1_post_info) if args.host3_info is not False: #run_remote_command(command, self.host3_post_info) service_status("mysql","state=started", self.host3_post_info) service_status("haproxy","state=started", self.host3_post_info) service_status("keepalived","state=started", self.host3_post_info) service_status("rabbitmq-server","state=started", self.host3_post_info) #command = "service mysql restart" #run_remote_command(command, self.host2_post_info) service_status("mysql","state=restarted", self.host2_post_info) else: # localhost must be host3 service_status("haproxy","state=started", self.host3_post_info) service_status("keepalived","state=started", self.host3_post_info) service_status("rabbitmq-server","state=started", self.host3_post_info) #run_remote_command(command, self.host1_post_info) service_status("mysql","state=started", self.host1_post_info) service_status("haproxy","state=started", self.host1_post_info) service_status("keepalived","state=started", self.host1_post_info) service_status("rabbitmq-server","state=started", self.host1_post_info) service_status("mysql","state=started", self.host2_post_info) service_status("haproxy","state=started", self.host2_post_info) service_status("keepalived","state=started", self.host2_post_info) service_status("rabbitmq-server","state=started", self.host2_post_info) #command = "service mysql restart" #run_remote_command(command, self.host2_post_info) service_status("mysql","state=restarted", self.host3_post_info) # sync prometheus data sync_arg = SyncArg() sync_arg.src = '/var/lib/zstack/prometheus/' sync_arg.dest = '/var/lib/zstack/prometheus/' sync(sync_arg, self.host2_post_info) if args.host3_info is not False: sync(sync_arg, self.host3_post_info) # start mevoco spinner_info.output = "Starting Mevoco" spinner_info.name = "mevoco" SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status,False) SpinnerInfo.spinner_status['mevoco'] = True ZstackSpinner(spinner_info) for host_info in self.host_post_info_list: start_mevoco(host_info) SpinnerInfo.spinner_status['mevoco'] = False time.sleep(.2) info("The cluster has been recovered!") sys.exit(0) # generate ha config host_list = "%s,%s" % (self.host1_post_info.host, self.host2_post_info.host) if args.host3_info is not False: host_list = "%s,%s,%s" % (self.host1_post_info.host, self.host2_post_info.host, self.host3_post_info.host) ha_conf_file = open(InstallHACmd.conf_file, 'w') ha_info = {'vip':args.vip, 'gateway':self.host1_post_info.gateway_ip, 'bridge_name':InstallHACmd.bridge, 'mevoco_url':'http://' + args.vip + ':8888', 'cluster_url':'http://'+ args.vip +':9132/zstack', 'host_list':host_list} yaml.dump(ha_info, ha_conf_file, default_flow_style=False) command = "mkdir -p %s" % InstallHACmd.conf_dir run_remote_command(command, self.host2_post_info) if len(self.host_post_info_list) == 3: run_remote_command(command, self.host3_post_info) copy_arg = CopyArg() copy_arg.src = InstallHACmd.conf_dir copy_arg.dest = InstallHACmd.conf_dir copy(copy_arg,self.host2_post_info) command = "chmod 600 %s" % InstallHACmd.conf_dir + "ha_key" run_remote_command(command, self.host2_post_info) if len(self.host_post_info_list) == 3: copy(copy_arg,self.host3_post_info) run_remote_command(command, self.host3_post_info) # get iptables from system config service_status("iptables","state=restarted",self.host1_post_info) service_status("iptables","state=restarted",self.host2_post_info) if args.host3_info is not False: service_status("iptables","state=restarted",self.host3_post_info) # remove mariadb for avoiding conflict with mevoco install process command = "rpm -q mariadb | grep 'not installed' || yum remove -y mariadb" run_remote_command(command, self.host1_post_info) run_remote_command(command, self.host2_post_info) if args.host3_info is not False: run_remote_command(command, self.host3_post_info) command = "hostnamectl set-hostname zstack-1" run_remote_command(command, self.host1_post_info) command = "hostnamectl set-hostname zstack-2" run_remote_command(command, self.host2_post_info) if args.host3_info is not False: command = "hostnamectl set-hostname zstack-3" run_remote_command(command, self.host3_post_info) # remove old zstack-1 and zstack-2 in hosts file update_file("/etc/hosts", "regexp='\.*zstack\.*' state=absent", self.host1_post_info) update_file("/etc/hosts", "regexp='\.*zstack\.*' state=absent", self.host2_post_info) update_file("/etc/hosts", "line='%s zstack-1'" % args.host1, self.host1_post_info) update_file("/etc/hosts", "line='%s zstack-2'" % args.host2, self.host1_post_info) if args.host3_info is not False: update_file("/etc/hosts", "line='%s zstack-3'" % args.host3, self.host1_post_info) update_file("/etc/hosts", "line='%s zstack-1'" % args.host1, self.host2_post_info) update_file("/etc/hosts", "line='%s zstack-2'" % args.host2, self.host2_post_info) if args.host3_info is not False: update_file("/etc/hosts", "line='%s zstack-3'" % args.host3, self.host2_post_info) if args.host3_info is not False: update_file("/etc/hosts", "line='%s zstack-1'" % args.host1, self.host3_post_info) update_file("/etc/hosts", "line='%s zstack-2'" % args.host2, self.host3_post_info) update_file("/etc/hosts", "line='%s zstack-3'" % args.host3, self.host3_post_info) #save iptables at last command = " iptables -C INPUT -s %s/32 -j ACCEPT >/dev/null 2>&1 || iptables -I INPUT -s %s/32 -j ACCEPT" % (self.host2_post_info.host, self.host2_post_info.host) run_remote_command(command, self.host1_post_info) if args.host3_info is not False: command = " iptables -C INPUT -s %s/32 -j ACCEPT >/dev/null 2>&1 || iptables -I INPUT -s %s/32 -j ACCEPT" % (self.host3_post_info.host, self.host3_post_info.host) run_remote_command(command, self.host1_post_info) command = " iptables -C INPUT -s %s/32 -j ACCEPT >/dev/null 2>&1 || iptables -I INPUT -s %s/32 -j ACCEPT" % (self.host1_post_info.host, self.host1_post_info.host) run_remote_command(command, self.host2_post_info) if args.host3_info is not False: command = " iptables -C INPUT -s %s/32 -j ACCEPT >/dev/null 2>&1 || iptables -I INPUT -s %s/32 -j ACCEPT" % (self.host3_post_info.host, self.host3_post_info.host) run_remote_command(command, self.host2_post_info) if args.host3_info is not False: command = " iptables -C INPUT -s %s/32 -j ACCEPT >/dev/null 2>&1 || iptables -I INPUT -s %s/32 -j ACCEPT" % (self.host1_post_info.host, self.host1_post_info.host) run_remote_command(command, self.host3_post_info) command = " iptables -C INPUT -s %s/32 -j ACCEPT >/dev/null 2>&1 || iptables -I INPUT -s %s/32 -j ACCEPT" % (self.host2_post_info.host, self.host2_post_info.host) run_remote_command(command, self.host3_post_info) # stop haproxy and keepalived service for avoiding terminal status disturb command = "service keepalived stop && service haproxy stop || echo True" run_remote_command(command, self.host1_post_info) run_remote_command(command, self.host2_post_info) if args.host3_info is not False: run_remote_command(command, self.host3_post_info) #pass all the variables to other HA deploy process InstallHACmd.host_post_info_list = [self.host1_post_info, self.host2_post_info] if args.host3_info is not False: InstallHACmd.host_post_info_list = [self.host1_post_info, self.host2_post_info, self.host3_post_info] # setup mysql ha spinner_info = SpinnerInfo() spinner_info.output = "Starting to deploy Mysql HA" spinner_info.name = 'mysql' SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status,False) SpinnerInfo.spinner_status['mysql'] = True ZstackSpinner(spinner_info) MysqlHA()() # setup rabbitmq ha spinner_info = SpinnerInfo() spinner_info.output ="Starting to deploy Rabbitmq HA" spinner_info.name = 'rabbitmq' SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status,False) SpinnerInfo.spinner_status['rabbitmq'] = True ZstackSpinner(spinner_info) RabbitmqHA()() # setup haproxy and keepalived spinner_info = SpinnerInfo() spinner_info.output = "Starting to deploy Haproxy and Keepalived" spinner_info.name = 'haproxy_keepalived' SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status,False) SpinnerInfo.spinner_status['haproxy_keepalived'] = True ZstackSpinner(spinner_info) HaproxyKeepalived()() #install database on local management node command = "zstack-ctl stop" run_remote_command(command, self.host1_post_info) run_remote_command(command, self.host2_post_info) if args.host3_info is not False: run_remote_command(command, self.host3_post_info) if args.keep_db is True: command = "zstack-ctl deploydb --keep-db --host=%s --port=3306 --zstack-password=%s --root-password=%s" \ % (args.host1, args.mysql_user_password, args.mysql_root_password) run_remote_command(command, self.host1_post_info) elif args.drop is True: command = "zstack-ctl deploydb --drop --host=%s --port=3306 --zstack-password=%s --root-password=%s" \ % (args.host1, args.mysql_user_password, args.mysql_root_password) run_remote_command(command, self.host1_post_info) else: command = "zstack-ctl deploydb --host=%s --port=3306 --zstack-password=%s --root-password=%s" \ % (args.host1, args.mysql_user_password, args.mysql_root_password) run_remote_command(command, self.host1_post_info) command = "zstack-ctl configure DB.url=jdbc:mysql://%s:53306/{database}?connectTimeout=%d\&socketTimeout=%d"\ % (args.vip, local_map['mysql_connect_timeout'], local_map['mysql_socket_timeout']) run_remote_command(command, self.host1_post_info) command = "zstack-ctl configure CloudBus.rabbitmqPassword=%s" % args.mysql_user_password run_remote_command(command, self.host1_post_info) # copy zstack-1 property to zstack-2 and update the management.server.ip # update zstack-1 firstly update_file("%s" % ctl.properties_file_path, "regexp='^CloudBus\.serverIp\.0' line='CloudBus.serverIp.0=%s'" % args.vip, self.host1_post_info) update_file("%s" % ctl.properties_file_path, "regexp='^CloudBus\.serverIp\.1' state=absent" , self.host1_post_info) update_file("%s" % ctl.properties_file_path, "regexp='^CloudBus\.rabbitmqUsername' line='CloudBus.rabbitmqUsername=zstack'", self.host1_post_info) update_file("%s" % ctl.properties_file_path, "regexp='^CloudBus\.rabbitmqPassword' line='CloudBus.rabbitmqPassword=%s'" % args.rabbit_password, self.host1_post_info) update_file("%s" % ctl.properties_file_path, "regexp='^CloudBus\.rabbitmqHeartbeatTimeout' line='CloudBus.rabbitmqHeartbeatTimeout=10'", self.host1_post_info) update_file("%s" % ctl.properties_file_path, "regexp='management\.server\.ip' line='management.server.ip = %s'" % args.host1, self.host1_post_info) copy_arg = CopyArg() copy_arg.src = ctl.properties_file_path copy_arg.dest = ctl.properties_file_path copy(copy_arg, self.host2_post_info) update_file("%s" % ctl.properties_file_path, "regexp='management\.server\.ip' line='management.server.ip = %s'" % args.host2, self.host2_post_info) if args.host3_info is not False: copy(copy_arg, self.host3_post_info) update_file("%s" % ctl.properties_file_path, "regexp='management\.server\.ip' line='management.server.ip = %s'" % args.host3, self.host3_post_info) #finally, start zstack-1 and zstack-2 spinner_info = SpinnerInfo() spinner_info.output = "Starting Mevoco" spinner_info.name = "mevoco" SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status,False) SpinnerInfo.spinner_status['mevoco'] = True ZstackSpinner(spinner_info) # Add zstack-ctl start to rc.local for auto recovery when system reboot command = "service iptables save" run_remote_command(command, self.host1_post_info) run_remote_command(command, self.host2_post_info) if args.host3_info is not False: run_remote_command(command, self.host3_post_info) command = "zstack-ctl install_ui" run_remote_command(command, self.host1_post_info) run_remote_command(command, self.host2_post_info) if args.host3_info is not False: run_remote_command(command, self.host3_post_info) command = "zstack-ctl start" (status, output)= commands.getstatusoutput("ssh -o StrictHostKeyChecking=no -i %s root@%s '%s'" % (private_key_name, args.host1, command)) if status != 0: error("Something wrong on host: %s\n %s" % (args.host1, output)) (status, output)= commands.getstatusoutput("ssh -o StrictHostKeyChecking=no -i %s root@%s '%s'" % (private_key_name, args.host2, command)) if status != 0: error("Something wrong on host: %s\n %s" % (args.host2, output)) if args.host3_info is not False: (status, output)= commands.getstatusoutput("ssh -o StrictHostKeyChecking=no -i %s root@%s '%s'" % (private_key_name, args.host3, command)) if status != 0: error("Something wrong on host: %s\n %s" % (args.host3, output)) SpinnerInfo.spinner_status['mevoco'] = False time.sleep(0.2) #sync imagestore key copy_arg = CopyArg() copy_arg.src = ctl.zstack_home + "/../../../imagestore/bin/certs/" copy_arg.dest = ctl.zstack_home + "/../../../imagestore/bin/certs/" copy(copy_arg, self.host2_post_info) if args.host3_info is not False: copy(copy_arg, self.host2_post_info) print '''HA deploy finished! Mysql user 'root' password: %s Mysql user 'zstack' password: %s Rabbitmq user 'zstack' password: %s Mevoco is running, visit %s in Chrome or Firefox with default user/password : %s You can check the cluster status at %s with user/passwd : %s ''' % (args.mysql_root_password, args.mysql_user_password, args.rabbit_password, colored('http://%s:8888' % args.vip, 'blue'), colored('admin/password', 'yellow'), colored('http://%s:9132/zstack' % args.vip, 'blue'), colored('zstack/zstack123', 'yellow')) class HaproxyKeepalived(InstallHACmd): def __init__(self): super(HaproxyKeepalived, self).__init__() self.name = "haproxy and keepalived init" self.description = "haproxy and keepalived setup" self.host_post_info_list = InstallHACmd.host_post_info_list self.host1_post_info = self.host_post_info_list[0] self.host2_post_info = self.host_post_info_list[1] if len(self.host_post_info_list) == 3: self.host3_post_info = self.host_post_info_list[2] self.yum_repo = self.host1_post_info.yum_repo self.vip = self.host1_post_info.vip self.gateway = self.host1_post_info.gateway_ip def __call__(self): command = ("yum clean --enablerepo=zstack-local metadata && pkg_list=`rpm -q haproxy keepalived" " | grep \"not installed\" | awk '{ print $2 }'` && for pkg in $pkg_list; do yum " "--disablerepo=* --enablerepo=%s install -y $pkg; done;") % self.yum_repo run_remote_command(command, self.host1_post_info) run_remote_command(command, self.host2_post_info) if len(self.host_post_info_list) == 3: run_remote_command(command, self.host3_post_info) update_file("/etc/sysconfig/rsyslog","regexp='^SYSLOGD_OPTIONS=\"\"' line='SYSLOGD_OPTIONS=\"-r -m 0\"'", self.host1_post_info) update_file("/etc/sysconfig/rsyslog","regexp='^SYSLOGD_OPTIONS=\"\"' line='SYSLOGD_OPTIONS=\"-r -m 0\"'", self.host2_post_info) if len(self.host_post_info_list) == 3: update_file("/etc/sysconfig/rsyslog","regexp='^SYSLOGD_OPTIONS=\"\"' line='SYSLOGD_OPTIONS=\"-r -m 0\"'", self.host3_post_info) update_file("/etc/rsyslog.conf","line='$ModLoad imudp'", self.host1_post_info) update_file("/etc/rsyslog.conf","line='$UDPServerRun 514'", self.host1_post_info) update_file("/etc/rsyslog.conf","line='local2.* /var/log/haproxy.log'", self.host1_post_info) update_file("/etc/rsyslog.conf","line='$ModLoad imudp'", self.host2_post_info) update_file("/etc/rsyslog.conf","line='$UDPServerRun 514'", self.host2_post_info) update_file("/etc/rsyslog.conf","line='local2.* /var/log/haproxy.log'", self.host2_post_info) if len(self.host_post_info_list) == 3: update_file("/etc/rsyslog.conf","line='$ModLoad imudp'", self.host3_post_info) update_file("/etc/rsyslog.conf","line='$UDPServerRun 514'", self.host3_post_info) update_file("/etc/rsyslog.conf","line='local2.* /var/log/haproxy.log'", self.host3_post_info) command = "touch /var/log/haproxy.log" run_remote_command(command, self.host1_post_info) run_remote_command(command, self.host2_post_info) if len(self.host_post_info_list) == 3: run_remote_command(command, self.host3_post_info) file_operation("/var/log/haproxy.log","owner=haproxy group=haproxy", self.host1_post_info) file_operation("/var/log/haproxy.log","owner=haproxy group=haproxy", self.host2_post_info) if len(self.host_post_info_list) == 3: file_operation("/var/log/haproxy.log","owner=haproxy group=haproxy", self.host3_post_info) service_status("rsyslog","state=restarted enabled=yes", self.host1_post_info) service_status("rsyslog","state=restarted enabled=yes", self.host2_post_info) if len(self.host_post_info_list) == 3: service_status("rsyslog","state=restarted enabled=yes", self.host3_post_info) haproxy_raw_conf = ''' global log 127.0.0.1 local2 emerg alert crit err warning notice info debug chroot /var/lib/haproxy pidfile /var/run/haproxy.pid maxconn 4000 user haproxy group haproxy daemon # turn on stats unix socket stats socket /var/lib/haproxy/stats #--------------------------------------------------------------------- # common defaults that all the 'listen' and 'backend' sections will # use if not designated in their block #--------------------------------------------------------------------- defaults mode http log global option httplog option dontlognull option http-server-close option forwardfor except 127.0.0.0/8 option redispatch retries 3 timeout http-request 10s timeout queue 1m timeout connect 10s timeout client 1m timeout server 1m timeout http-keep-alive 1m timeout check 1m timeout tunnel 60m maxconn 6000 listen admin_stats 0.0.0.0:9132 mode http stats uri /zstack stats realm Global\ statistics stats auth zstack:zstack123 listen proxy-mysql 0.0.0.0:53306 mode tcp option tcplog balance source option httpchk OPTIONS * HTTP/1.1\\r\\nHost:\ www server zstack-1 {{ host1 }}:3306 weight 10 check port 6033 inter 3s rise 2 fall 2 server zstack-2 {{ host2 }}:3306 backup weight 10 check port 6033 inter 3s rise 2 fall 2 option tcpka listen proxy-rabbitmq 0.0.0.0:55672 mode tcp balance source timeout client 3h timeout server 3h server zstack-1 {{ host1 }}:5672 weight 10 check inter 3s rise 2 fall 2 server zstack-2 {{ host2 }}:5672 backup weight 10 check inter 3s rise 2 fall 2 option tcpka # dashboard not installed, so haproxy will report error listen proxy-ui 0.0.0.0:8888 mode http option http-server-close balance source server zstack-1 {{ host1 }}:5000 weight 10 check inter 3s rise 2 fall 2 server zstack-2 {{ host2 }}:5000 weight 10 check inter 3s rise 2 fall 2 option tcpka ''' if len(self.host_post_info_list) == 3: haproxy_raw_conf = ''' global log 127.0.0.1 local2 emerg alert crit err warning notice info debug chroot /var/lib/haproxy pidfile /var/run/haproxy.pid maxconn 4000 user haproxy group haproxy daemon # turn on stats unix socket stats socket /var/lib/haproxy/stats #--------------------------------------------------------------------- # common defaults that all the 'listen' and 'backend' sections will # use if not designated in their block #--------------------------------------------------------------------- defaults mode http log global option httplog option dontlognull option http-server-close option forwardfor except 127.0.0.0/8 option redispatch retries 3 timeout http-request 10s timeout queue 1m timeout connect 10s timeout client 1m timeout server 1m timeout http-keep-alive 1m timeout check 1m timeout tunnel 60m maxconn 6000 listen admin_stats 0.0.0.0:9132 mode http stats uri /zstack stats realm Global\ statistics stats auth zstack:zstack123 listen proxy-mysql 0.0.0.0:53306 mode tcp option tcplog balance source option httpchk OPTIONS * HTTP/1.1\\r\\nHost:\ www server zstack-1 {{ host1 }}:3306 weight 10 check port 6033 inter 3s rise 2 fall 2 server zstack-2 {{ host2 }}:3306 backup weight 10 check port 6033 inter 3s rise 2 fall 2 server zstack-3 {{ host3 }}:3306 backup weight 10 check port 6033 inter 3s rise 2 fall 2 option tcpka listen proxy-rabbitmq 0.0.0.0:55672 mode tcp balance source timeout client 3h timeout server 3h server zstack-1 {{ host1 }}:5672 weight 10 check inter 3s rise 2 fall 2 server zstack-2 {{ host2 }}:5672 backup weight 10 check inter 3s rise 2 fall 2 server zstack-3 {{ host3 }}:5672 backup weight 10 check inter 3s rise 2 fall 2 option tcpka # dashboard not installed, so haproxy will report error listen proxy-ui 0.0.0.0:8888 mode http option http-server-close balance source server zstack-1 {{ host1 }}:5000 weight 10 check inter 3s rise 2 fall 2 server zstack-2 {{ host2 }}:5000 weight 10 check inter 3s rise 2 fall 2 server zstack-3 {{ host3 }}:5000 weight 10 check inter 3s rise 2 fall 2 option tcpka ''' haproxy_conf_template = jinja2.Template(haproxy_raw_conf) haproxy_host1_conf = haproxy_conf_template.render({ 'host1' : self.host1_post_info.host, 'host2' : self.host2_post_info.host }) if len(self.host_post_info_list) == 3: haproxy_host1_conf = haproxy_conf_template.render({ 'host1' : self.host1_post_info.host, 'host2' : self.host2_post_info.host, 'host3' : self.host3_post_info.host }) # The host1 and host2 and host3 use the same config file host1_config, haproxy_host1_conf_file = tempfile.mkstemp() f1 = os.fdopen(host1_config, 'w') f1.write(haproxy_host1_conf) f1.close() def cleanup_haproxy_config_file(): os.remove(haproxy_host1_conf_file) self.install_cleanup_routine(cleanup_haproxy_config_file) copy_arg = CopyArg() copy_arg.src = haproxy_host1_conf_file copy_arg.dest = "/etc/haproxy/haproxy.cfg" copy(copy_arg,self.host1_post_info) copy(copy_arg,self.host2_post_info) if len(self.host_post_info_list) == 3: copy(copy_arg,self.host3_post_info) #config haproxy firewall command = "iptables -C INPUT -p tcp -m tcp --dport 53306 -j ACCEPT > /dev/null 2>&1 || iptables -I INPUT -p tcp -m tcp --dport 53306 -j ACCEPT; " \ "iptables -C INPUT -p tcp -m tcp --dport 58080 -j ACCEPT > /dev/null 2>&1 || iptables -I INPUT -p tcp -m tcp --dport 58080 -j ACCEPT ; " \ "iptables -C INPUT -p tcp -m tcp --dport 55672 -j ACCEPT > /dev/null 2>&1 || iptables -I INPUT -p tcp -m tcp --dport 55672 -j ACCEPT ; " \ "iptables -C INPUT -p tcp -m tcp --dport 80 -j ACCEPT > /dev/null 2>&1 || iptables -I INPUT -p tcp -m tcp --dport 80 -j ACCEPT ; " \ "iptables -C INPUT -p tcp -m tcp --dport 9132 -j ACCEPT > /dev/null 2>&1 || iptables -I INPUT -p tcp -m tcp --dport 9132 -j ACCEPT ; " \ "iptables -C INPUT -p tcp -m tcp --dport 8888 -j ACCEPT > /dev/null 2>&1 || iptables -I INPUT -p tcp -m tcp --dport 8888 -j ACCEPT ; " \ "iptables -C INPUT -p tcp -m tcp --dport 6033 -j ACCEPT > /dev/null 2>&1 || iptables -I INPUT -p tcp -m tcp --dport 6033 -j ACCEPT; service iptables save " run_remote_command(command, self.host1_post_info) run_remote_command(command, self.host2_post_info) if len(self.host_post_info_list) == 3: run_remote_command(command, self.host3_post_info) #config keepalived keepalived_raw_config = ''' ! Configuration File for keepalived global_defs { router_id HAPROXY_LOAD } vrrp_script Monitor_Haproxy { script "/usr/local/bin/keepalived-kill.sh" interval 2 weight 5 } vrrp_instance VI_1 { # use the same state with host2, so no master node, recovery will not race to control the vip state BACKUP interface {{ bridge }} virtual_router_id {{ vrouter_id }} priority {{ priority }} nopreempt advert_int 1 authentication { auth_type PASS auth_pass {{ auth_passwd }} } track_script { Monitor_Haproxy } virtual_ipaddress { {{ vip }}/{{ netmask }} label {{ bridge }}:0 } } ''' virtual_router_id = random.randint(1, 255) auth_pass = ''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(15)) master_priority = 92 slave_priority = 91 second_slave_priority = 90 keepalived_template = jinja2.Template(keepalived_raw_config) keepalived_host1_config = keepalived_template.render({ 'bridge' : InstallHACmd.bridge, 'vrouter_id': virtual_router_id, 'priority': master_priority, 'auth_passwd': auth_pass, 'vip': self.vip, 'netmask': self.get_formatted_netmask(InstallHACmd.bridge) }) keepalived_host2_config = keepalived_template.render({ 'bridge' : InstallHACmd.bridge, 'vrouter_id': virtual_router_id, 'priority': slave_priority, 'auth_passwd': auth_pass, 'vip': self.vip, 'netmask': self.get_formatted_netmask(InstallHACmd.bridge) }) if len(self.host_post_info_list) == 3: keepalived_host3_config = keepalived_template.render({ 'vrouter_id': virtual_router_id, 'priority': second_slave_priority, 'auth_passwd': auth_pass, 'vip': self.vip, 'netmask': self.get_formatted_netmask(InstallHACmd.bridge) }) host1_config, keepalived_host1_config_file = tempfile.mkstemp() f1 = os.fdopen(host1_config, 'w') f1.write(keepalived_host1_config) f1.close() host2_config, keepalived_host2_config_file = tempfile.mkstemp() f2 = os.fdopen(host1_config, 'w') f2.write(keepalived_host2_config) f2.close() if len(self.host_post_info_list) == 3: host3_config, keepalived_host3_config_file = tempfile.mkstemp() f3 = os.fdopen(host3_config, 'w') f3.write(keepalived_host3_config) f3.close() def cleanup_keepalived_config_file(): os.remove(keepalived_host1_config_file) os.remove(keepalived_host2_config_file) if len(self.host_post_info_list) == 3: os.remove(keepalived_host3_config_file) self.install_cleanup_routine(cleanup_keepalived_config_file) copy_arg = CopyArg() copy_arg.src = keepalived_host1_config_file copy_arg.dest = "/etc/keepalived/keepalived.conf" copy(copy_arg, self.host1_post_info) copy_arg = CopyArg() copy_arg.src = keepalived_host2_config_file copy_arg.dest = "/etc/keepalived/keepalived.conf" copy(copy_arg, self.host2_post_info) if len(self.host_post_info_list) == 3: copy_arg = CopyArg() copy_arg.src = keepalived_host3_config_file copy_arg.dest = "/etc/keepalived/keepalived.conf" copy(copy_arg, self.host3_post_info) # copy keepalived-kill.sh to host copy_arg = CopyArg() copy_arg.src = "%s/conf/keepalived-kill.sh" % InstallHACmd.current_dir copy_arg.dest = "/usr/local/bin/keepalived-kill.sh" copy_arg.args = "mode='u+x,g+x,o+x'" copy(copy_arg, self.host1_post_info) copy(copy_arg, self.host2_post_info) if len(self.host_post_info_list) == 3: copy(copy_arg, self.host3_post_info) # restart haproxy and keepalived service_status("keepalived", "state=restarted enabled=yes", self.host1_post_info) service_status("keepalived", "state=restarted enabled=yes", self.host2_post_info) service_status("haproxy", "state=restarted enabled=yes", self.host1_post_info) service_status("haproxy", "state=restarted enabled=yes", self.host2_post_info) if len(self.host_post_info_list) == 3: service_status("keepalived", "state=restarted enabled=yes", self.host3_post_info) service_status("haproxy", "state=restarted enabled=yes", self.host3_post_info) class MysqlHA(InstallHACmd): def __init__(self): super(MysqlHA, self).__init__() self.host_post_info_list = InstallHACmd.host_post_info_list self.host1_post_info = self.host_post_info_list[0] self.host2_post_info = self.host_post_info_list[1] if len(self.host_post_info_list) == 3: self.host3_post_info = self.host_post_info_list[2] self.yum_repo = self.host1_post_info.yum_repo self.mysql_password = self.host1_post_info.mysql_password def __call__(self): command = ("yum clean --enablerepo=zstack-local metadata && pkg_list=`rpm -q MariaDB-Galera-server xinetd rsync openssl-libs " " | grep \"not installed\" | awk '{ print $2 }'` && for pkg in $pkg_list; do yum " "--disablerepo=* --enablerepo=%s,mariadb install -y $pkg; done;") % self.yum_repo run_remote_command(command, self.host1_post_info) run_remote_command(command, self.host2_post_info) if len(self.host_post_info_list) == 3: run_remote_command(command, self.host3_post_info) # Generate galera config file and copy to host1 host2 galera_raw_config= '''[mysqld] skip-name-resolve=1 character-set-server=utf8 binlog_format=ROW default-storage-engine=innodb innodb_autoinc_lock_mode=2 innodb_locks_unsafe_for_binlog=1 max_connections=2048 query_cache_size=0 query_cache_type=0 bind_address= {{ host1 }} wsrep_provider=/usr/lib64/galera/libgalera_smm.so wsrep_cluster_name="galera_cluster" wsrep_cluster_address="gcomm://{{ host2 }},{{ host1 }}" wsrep_slave_threads=1 wsrep_certify_nonPK=1 wsrep_max_ws_rows=131072 wsrep_max_ws_size=1073741824 wsrep_debug=0 wsrep_convert_LOCK_to_trx=0 wsrep_retry_autocommit=1 wsrep_auto_increment_control=1 wsrep_drupal_282555_workaround=0 wsrep_causal_reads=0 wsrep_notify_cmd= wsrep_sst_method=rsync ''' if len(self.host_post_info_list) == 3: # Generate galera config file and copy to host1 host2 host3 galera_raw_config= '''[mysqld] skip-name-resolve=1 character-set-server=utf8 binlog_format=ROW default-storage-engine=innodb innodb_autoinc_lock_mode=2 innodb_locks_unsafe_for_binlog=1 max_connections=2048 query_cache_size=0 query_cache_type=0 bind_address= {{ host1 }} wsrep_provider=/usr/lib64/galera/libgalera_smm.so wsrep_cluster_name="galera_cluster" wsrep_cluster_address="gcomm://{{ host3 }},{{ host2 }},{{ host1 }}" wsrep_slave_threads=1 wsrep_certify_nonPK=1 wsrep_max_ws_rows=131072 wsrep_max_ws_size=1073741824 wsrep_debug=0 wsrep_convert_LOCK_to_trx=0 wsrep_retry_autocommit=1 wsrep_auto_increment_control=1 wsrep_drupal_282555_workaround=0 wsrep_causal_reads=0 wsrep_notify_cmd= wsrep_sst_method=rsync ''' galera_config_template = jinja2.Template(galera_raw_config) galera_config_host1 = galera_config_template.render({ 'host1' : self.host1_post_info.host, 'host2' : self.host2_post_info.host }) if len(self.host_post_info_list) == 3: galera_config_host1 = galera_config_template.render({ 'host1' : self.host1_post_info.host, 'host2' : self.host2_post_info.host, 'host3' : self.host3_post_info.host }) galera_config_host2 = galera_config_template.render({ 'host1' : self.host2_post_info.host, 'host2' : self.host1_post_info.host }) if len(self.host_post_info_list) == 3: galera_config_host2 = galera_config_template.render({ 'host1' : self.host2_post_info.host, 'host2' : self.host3_post_info.host, 'host3' : self.host1_post_info.host }) if len(self.host_post_info_list) == 3: galera_config_host3 = galera_config_template.render({ 'host1' : self.host3_post_info.host, 'host2' : self.host1_post_info.host, 'host3' : self.host2_post_info.host }) host1_config, galera_config_host1_file = tempfile.mkstemp() f1 = os.fdopen(host1_config, 'w') f1.write(galera_config_host1) f1.close() host2_config, galera_config_host2_file = tempfile.mkstemp() f2 = os.fdopen(host2_config, 'w') f2.write(galera_config_host2) f2.close() if len(self.host_post_info_list) == 3: host3_config, galera_config_host3_file = tempfile.mkstemp() f3 = os.fdopen(host3_config, 'w') f3.write(galera_config_host3) f3.close() def cleanup_galera_config_file(): os.remove(galera_config_host1_file) os.remove(galera_config_host2_file) if len(self.host_post_info_list) == 3: os.remove(galera_config_host3_file) self.install_cleanup_routine(cleanup_galera_config_file) copy_arg = CopyArg() copy_arg.src = galera_config_host1_file copy_arg.dest = "/etc/my.cnf.d/galera.cnf" copy(copy_arg, self.host1_post_info) copy_arg = CopyArg() copy_arg.src = galera_config_host2_file copy_arg.dest = "/etc/my.cnf.d/galera.cnf" copy(copy_arg, self.host2_post_info) if len(self.host_post_info_list) == 3: copy_arg = CopyArg() copy_arg.src = galera_config_host3_file copy_arg.dest = "/etc/my.cnf.d/galera.cnf" copy(copy_arg, self.host3_post_info) # restart mysql service to enable galera config command = "service mysql stop || true" #service_status("mysql", "state=stopped", self.host1_post_info) run_remote_command(command, self.host2_post_info) #last stop node should be the first node to do bootstrap run_remote_command(command, self.host1_post_info) if len(self.host_post_info_list) == 3: run_remote_command(command, self.host3_post_info) command = "service mysql bootstrap" run_remote_command(command, self.host1_post_info) run_remote_command("service mysql start && chkconfig mysql on", self.host2_post_info) if len(self.host_post_info_list) == 3: run_remote_command("service mysql start && chkconfig mysql on", self.host3_post_info) run_remote_command("service mysql restart && chkconfig mysql on", self.host1_post_info) init_install = run_remote_command("mysql -u root --password='' -e 'exit' ", self.host1_post_info, return_status=True) if init_install is True: #command = "mysql -u root --password='' -Bse \"show status like 'wsrep_%%';\"" #galera_status = run_remote_command(command, self.host2_post_info) #create zstack user command =" mysql -u root --password='' -Bse 'grant ALL PRIVILEGES on *.* to zstack@\"localhost\" Identified by \"%s\"; " \ "grant ALL PRIVILEGES on *.* to zstack@\"zstack-1\" Identified by \"%s\"; " \ "grant ALL PRIVILEGES on *.* to zstack@\"%%\" Identified by \"%s\"; " \ "grant ALL PRIVILEGES on *.* to root@\"%%\" Identified by \"%s\";" \ "grant ALL PRIVILEGES on *.* to root@\"localhost\" Identified by \"%s\"; " \ "grant ALL PRIVILEGES ON *.* TO root@\"%%\" IDENTIFIED BY \"%s\" WITH GRANT OPTION; " \ "flush privileges;'" % (self.host1_post_info.mysql_userpassword, self.host1_post_info.mysql_userpassword, self.host1_post_info.mysql_userpassword,self.host1_post_info.mysql_password, self.host1_post_info.mysql_password, self.host1_post_info.mysql_password) (status, output) = run_remote_command(command, self.host1_post_info, True, True) if status is False: time.sleep(5) (status, output) = run_remote_command(command, self.host1_post_info, True, True) if status is False: error("Failed to set mysql 'zstack' and 'root' password, the reason is %s" % output) # config mysqlchk_status.sh on zstack-1 and zstack-2 mysqlchk_raw_script = '''#!/bin/sh MYSQL_HOST="{{ host1 }}" MYSQL_PORT="3306" MYSQL_USERNAME="{{ mysql_username }}" MYSQL_PASSWORD="{{ mysql_password }}" /usr/bin/mysql -h$MYSQL_HOST -u$MYSQL_USERNAME -p$MYSQL_PASSWORD -e "show databases;" > /dev/null if [ "$?" -eq 0 ] then # mysql is fine, return http 200 /bin/echo -e "HTTP/1.1 200 OK" /bin/echo -e "Content-Type: Content-Type: text/plain" /bin/echo -e "MySQL is running." else # mysql is fine, return http 503 /bin/echo -e "HTTP/1.1 503 Service Unavailable" /bin/echo -e "Content-Type: Content-Type: text/plain" /bin/echo -e "MySQL is *down*." fi ''' mysqlchk_template = jinja2.Template(mysqlchk_raw_script) mysqlchk_script_host1 = mysqlchk_template.render({ 'host1' : self.host1_post_info.host, 'mysql_username' : "zstack", 'mysql_password' : self.host1_post_info.mysql_userpassword }) mysqlchk_script_host2 = mysqlchk_template.render({ 'host1' : self.host2_post_info.host, 'mysql_username' : "zstack", 'mysql_password' : self.host2_post_info.mysql_userpassword }) if len(self.host_post_info_list) == 3: mysqlchk_script_host3 = mysqlchk_template.render({ 'host1' : self.host3_post_info.host, 'mysql_username' : "zstack", 'mysql_password' : self.host3_post_info.mysql_userpassword }) host1_config, mysqlchk_script_host1_file = tempfile.mkstemp() f1 = os.fdopen(host1_config, 'w') f1.write(mysqlchk_script_host1) f1.close() host2_config, mysqlchk_script_host2_file = tempfile.mkstemp() f2 = os.fdopen(host2_config, 'w') f2.write(mysqlchk_script_host2) f2.close() if len(self.host_post_info_list) == 3: host3_config, mysqlchk_script_host3_file = tempfile.mkstemp() f3 = os.fdopen(host3_config, 'w') f3.write(mysqlchk_script_host3) f3.close() def cleanup_mysqlchk_script(): os.remove(mysqlchk_script_host1_file) os.remove(mysqlchk_script_host2_file) if len(self.host_post_info_list) == 3: os.remove(mysqlchk_script_host3_file) self.install_cleanup_routine(cleanup_mysqlchk_script) copy_arg = CopyArg() copy_arg.src = mysqlchk_script_host1_file copy_arg.dest = "/usr/local/bin/mysqlchk_status.sh" copy_arg.args = "mode='u+x,g+x,o+x'" copy(copy_arg,self.host1_post_info) copy_arg = CopyArg() copy_arg.src = mysqlchk_script_host2_file copy_arg.dest = "/usr/local/bin/mysqlchk_status.sh" copy_arg.args = "mode='u+x,g+x,o+x'" copy(copy_arg,self.host2_post_info) if len(self.host_post_info_list) == 3: copy_arg = CopyArg() copy_arg.src = mysqlchk_script_host3_file copy_arg.dest = "/usr/local/bin/mysqlchk_status.sh" copy_arg.args = "mode='u+x,g+x,o+x'" copy(copy_arg,self.host3_post_info) # check network check_network_raw_script='''#!/bin/bash MYSQL_HOST="{{ host }}" MYSQL_PORT="3306" MYSQL_USERNAME="root" MYSQL_PASSWORD="{{ mysql_root_password }}" # Checking partner ... ping -c 4 -w 4 $1 > /dev/null 2>&1 if [ $? -ne 0 ]; then # Checking gateway ... ping -c 4 -w 4 $2 > /dev/null 2>&1 if [ $? -ne 0 ]; then echo "Network ERROR! Kill MySQL NOW!" >> /var/log/check-network.log pgrep -f mysql | xargs kill -9 else echo "Setting the primary of Galera." >> /var/log/check-network.log /usr/bin/mysql -h$MYSQL_HOST -u$MYSQL_USERNAME -p$MYSQL_PASSWORD -e "SET GLOBAL wsrep_provider_options='pc.bootstrap=YES';" > /dev/null fi fi TIMEST=`date` echo $TIMEST >> /var/log/check-network.log ''' galera_check_network = jinja2.Template(check_network_raw_script) galera_check_network_host1 = galera_check_network.render({ 'host' : self.host1_post_info.host, 'mysql_root_password' : self.host1_post_info.mysql_password }) galera_check_network_host2 = galera_check_network.render({ 'host' : self.host2_post_info.host, 'mysql_root_password' : self.host1_post_info.mysql_password }) host1_config, galera_check_network_host1_file = tempfile.mkstemp() f1 = os.fdopen(host1_config, 'w') f1.write(galera_check_network_host1) f1.close() host2_config, galera_check_network_host2_file = tempfile.mkstemp() f2 = os.fdopen(host2_config, 'w') f2.write(galera_check_network_host2) f2.close() def cleanup_gelerachk_script(): os.remove(galera_check_network_host1_file) os.remove(galera_check_network_host2_file) self.install_cleanup_routine(cleanup_gelerachk_script) copy_arg = CopyArg() copy_arg.src = galera_check_network_host1_file copy_arg.dest = "/usr/local/zstack/check-network.sh" copy_arg.args = "mode='u+x,g+x,o+x'" copy(copy_arg,self.host1_post_info) copy_arg = CopyArg() copy_arg.src = galera_check_network_host2_file copy_arg.dest = "/usr/local/zstack/check-network.sh" copy_arg.args = "mode='u+x,g+x,o+x'" copy(copy_arg,self.host2_post_info) # set cron task for network status cron("check_node_2_status1","job=\"/usr/local/zstack/check-network.sh %s %s\"" % (self.host2_post_info.host, self.host2_post_info.gateway_ip), self.host1_post_info) cron("check_node_2_status2","job=\"sleep 30;/usr/local/zstack/check-network.sh %s %s\"" % (self.host2_post_info.host, self.host2_post_info.gateway_ip), self.host1_post_info) cron("check_node_1_status1","job=\"/usr/local/zstack/check-network.sh %s %s\"" % (self.host1_post_info.host, self.host1_post_info.gateway_ip), self.host2_post_info) cron("check_node_1_status2","job=\"sleep 30;/usr/local/zstack/check-network.sh %s %s\"" % (self.host1_post_info.host, self.host1_post_info.gateway_ip), self.host2_post_info) if len(self.host_post_info_list) == 3: cron("check_node_1_status1","job=\"/usr/local/zstack/check-network.sh %s %s\" state=absent" % (self.host1_post_info.host, self.host1_post_info.gateway_ip), self.host2_post_info) cron("check_node_1_status2","job=\"sleep 30;/usr/local/zstack/check-network.sh %s %s\" state=absent" % (self.host1_post_info.host, self.host1_post_info.gateway_ip), self.host2_post_info) cron("check_node_2_status1","job=\"/usr/local/zstack/check-network.sh %s %s\" state=absent" % (self.host2_post_info.host, self.host2_post_info.gateway_ip), self.host1_post_info) cron("check_node_2_status2","job=\"sleep 30;/usr/local/zstack/check-network.sh %s %s\" state=absent" % (self.host2_post_info.host, self.host2_post_info.gateway_ip), self.host1_post_info) #config xinetd for service check copy_arg = CopyArg() copy_arg.src = "%s/conf/mysql-check" % InstallHACmd.current_dir copy_arg.dest = "/etc/xinetd.d/mysql-check" copy(copy_arg,self.host1_post_info) copy(copy_arg,self.host2_post_info) if len(self.host_post_info_list) == 3: copy(copy_arg,self.host3_post_info) # add service name update_file("/etc/services", "line='mysqlcheck 6033/tcp #MYSQL status check'", self.host1_post_info) update_file("/etc/services", "line='mysqlcheck 6033/tcp #MYSQL status check'", self.host2_post_info) if len(self.host_post_info_list) == 3: update_file("/etc/services", "line='mysqlcheck 6033/tcp #MYSQL status check'", self.host3_post_info) # start service command = "systemctl daemon-reload" run_remote_command(command,self.host1_post_info) run_remote_command(command,self.host2_post_info) if len(self.host_post_info_list) == 3: run_remote_command(command,self.host3_post_info) service_status("xinetd","state=restarted enabled=yes",self.host1_post_info) service_status("xinetd","state=restarted enabled=yes",self.host2_post_info) if len(self.host_post_info_list) == 3: service_status("xinetd","state=restarted enabled=yes",self.host3_post_info) # add crontab for backup mysql cron("backup_zstack_db","minute='0' hour='1,13' job='/usr/bin/zstack-ctl dump_mysql >>" " /var/log/zstack/ha.log 2>&1' ", self.host1_post_info) cron("backup_zstack_db","minute='0' hour='7,19' job='/usr/bin/zstack-ctl dump_mysql >>" " /var/log/zstack/ha.log 2>&1' ", self.host2_post_info) if len(self.host_post_info_list) == 3: cron("backup_zstack_db","minute='0' hour='1' job='/usr/bin/zstack-ctl dump_mysql >>" " /var/log/zstack/ha.log 2>&1' ", self.host1_post_info) cron("backup_zstack_db","minute='0' hour='9' job='/usr/bin/zstack-ctl dump_mysql >>" " /var/log/zstack/ha.log 2>&1' ", self.host2_post_info) cron("backup_zstack_db","minute='0' hour='17' job='/usr/bin/zstack-ctl dump_mysql >>" " /var/log/zstack/ha.log 2>&1' ", self.host3_post_info) service_status("crond","state=started enabled=yes",self.host1_post_info) service_status("crond","state=started enabled=yes",self.host2_post_info) if len(self.host_post_info_list) == 3: service_status("crond","state=started enabled=yes",self.host3_post_info) class RabbitmqHA(InstallHACmd): def __init__(self): super(RabbitmqHA, self).__init__() self.name = "rabbitmq ha" self.description = "rabbitmq HA setup" self.host_post_info_list = InstallHACmd.host_post_info_list self.host1_post_info = self.host_post_info_list[0] self.host2_post_info = self.host_post_info_list[1] if len(self.host_post_info_list) == 3: self.host3_post_info = self.host_post_info_list[2] self.yum_repo = self.host1_post_info.yum_repo self.rabbit_password= self.host1_post_info.rabbit_password def __call__(self): command = ("yum clean --enablerepo=zstack-local metadata && pkg_list=`rpm -q rabbitmq-server" " | grep \"not installed\" | awk '{ print $2 }'` && for pkg in $pkg_list; do yum " "--disablerepo=* --enablerepo=%s,mariadb install -y $pkg; done;") % self.yum_repo run_remote_command(command, self.host1_post_info) run_remote_command(command, self.host2_post_info) if len(self.host_post_info_list) == 3: run_remote_command(command, self.host3_post_info) # clear erlang process for new deploy command = "echo True || pkill -f .*erlang.* > /dev/null 2>&1 && rm -rf /var/lib/rabbitmq/* " run_remote_command(command, self.host1_post_info) run_remote_command(command, self.host2_post_info) if len(self.host_post_info_list) == 3: run_remote_command(command, self.host3_post_info) # to stop rabbitmq-server for new installation service_status("rabbitmq-server","state=stopped", self.host1_post_info, True) service_status("rabbitmq-server", "state=stopped", self.host2_post_info, True) if len(self.host_post_info_list) == 3: service_status("rabbitmq-server", "state=stopped", self.host3_post_info, True) # to start rabbitmq-server service_status("rabbitmq-server","state=started enabled=yes", self.host1_post_info) service_status("rabbitmq-server", "state=started enabled=yes", self.host2_post_info) if len(self.host_post_info_list) == 3: service_status("rabbitmq-server", "state=started enabled=yes", self.host3_post_info) # add zstack user in this cluster command = "rabbitmqctl add_user zstack %s" % self.rabbit_password run_remote_command(command, self.host1_post_info) run_remote_command(command, self.host2_post_info) if len(self.host_post_info_list) == 3: run_remote_command(command, self.host3_post_info) command = "rabbitmqctl set_user_tags zstack administrator" run_remote_command(command, self.host1_post_info) run_remote_command(command, self.host2_post_info) if len(self.host_post_info_list) == 3: run_remote_command(command, self.host3_post_info) command = "rabbitmqctl change_password zstack %s" % self.rabbit_password run_remote_command(command, self.host1_post_info) run_remote_command(command, self.host2_post_info) if len(self.host_post_info_list) == 3: run_remote_command(command, self.host3_post_info) command = 'rabbitmqctl set_permissions -p \/ zstack ".*" ".*" ".*"' run_remote_command(command, self.host1_post_info) run_remote_command(command, self.host2_post_info) if len(self.host_post_info_list) == 3: run_remote_command(command, self.host3_post_info) command = "rabbitmq-plugins enable rabbitmq_management" run_remote_command(command, self.host1_post_info) run_remote_command(command, self.host2_post_info) if len(self.host_post_info_list) == 3: run_remote_command(command, self.host3_post_info) service_status("rabbitmq-server","state=restarted enabled=yes", self.host1_post_info) service_status("rabbitmq-server", "state=restarted enabled=yes", self.host2_post_info) if len(self.host_post_info_list) == 3: service_status("rabbitmq-server", "state=restarted enabled=yes", self.host3_post_info) class ResetRabbitCmd(Command): def __init__(self): super(ResetRabbitCmd, self).__init__() self.name = "reset_rabbitmq" self.description = "Reinstall RabbitMQ message broker on local machine based on current configuration in zstack.properties." ctl.register_command(self) def install_argparse_arguments(self, parser): parser.add_argument('--yum', help="Use ZStack predefined yum repositories. The valid options include: alibase,aliepel,163base,ustcepel,zstack-local. NOTE: only use it when you know exactly what it does.", default=None) pass def run(self, args): rabbitmq_ip = ctl.read_property('CloudBus.serverIp.0') rabbitmq_user = ctl.read_property('CloudBus.rabbitmqUsername') rabbitmq_passwd = ctl.read_property('CloudBus.rabbitmqPassword') shell("service rabbitmq-server stop; rpm -ev rabbitmq-server; rm -rf /var/lib/rabbitmq") if args.yum is not None: ctl.internal_run('install_rabbitmq', "--host=%s --rabbit-username=%s --rabbit-password=%s --yum=%s" % (rabbitmq_ip, rabbitmq_user, rabbitmq_passwd, args.yum)) else: ctl.internal_run('install_rabbitmq', "--host=%s --rabbit-username=%s --rabbit-password=%s" % (rabbitmq_ip, rabbitmq_user, rabbitmq_passwd)) class InstallRabbitCmd(Command): def __init__(self): super(InstallRabbitCmd, self).__init__() self.name = "install_rabbitmq" self.description = "install RabbitMQ message broker on local or remote machine." ctl.register_command(self) def install_argparse_arguments(self, parser): parser.add_argument('--host', help='host IP, for example, 192.168.0.212, please specify the real IP rather than "localhost" or "127.0.0.1" when installing on local machine; otherwise management nodes on other machines cannot access the RabbitMQ.', required=True) parser.add_argument('--debug', help="open Ansible debug option", action="store_true", default=False) parser.add_argument('--no-update', help="don't update the IP address to 'CloudBus.serverIp.0' in zstack.properties", action="store_true", default=False) parser.add_argument('--ssh-key', help="the path of private key for SSH login $host; if provided, Ansible will use the specified key as private key to SSH login the $host", default=None) parser.add_argument('--rabbit-username', help="RabbitMQ username; if set, the username will be created on RabbitMQ. [DEFAULT] rabbitmq default username", default=None) parser.add_argument('--rabbit-password', help="RabbitMQ password; if set, the password will be created on RabbitMQ for username specified by --rabbit-username. [DEFAULT] rabbitmq default password", default=None) parser.add_argument('--yum', help="Use ZStack predefined yum repositories. The valid options include: alibase,aliepel,163base,ustcepel,zstack-local. NOTE: only use it when you know exactly what it does.", default=None) def run(self, args): if (args.rabbit_password is None and args.rabbit_username) or (args.rabbit_username and args.rabbit_password is None): raise CtlError('--rabbit-username and --rabbit-password must be both set or not set') if not args.yum: args.yum = get_yum_repo_from_property() yaml = '''--- - hosts: $host remote_user: root vars: yum_repo: "$yum_repo" tasks: - name: pre-install script script: $pre_install_script - name: install RabbitMQ on RedHat OS from user defined yum repo when: ansible_os_family == 'RedHat' and yum_repo != 'false' shell: yum clean metadata; yum --disablerepo=* --enablerepo={{yum_repo}} --nogpgcheck install -y rabbitmq-server libselinux-python iptables-services - name: install RabbitMQ on RedHat OS from online when: ansible_os_family == 'RedHat' and yum_repo == 'false' shell: yum clean metadata; yum --nogpgcheck install -y rabbitmq-server libselinux-python iptables-services - name: install iptables-persistent for Ubuntu when: ansible_os_family == 'Debian' apt: pkg={{item}} update_cache=yes with_items: - iptables-persistent - name: install RabbitMQ on Ubuntu OS when: ansible_os_family == 'Debian' apt: pkg={{item}} update_cache=yes with_items: - rabbitmq-server - name: open 5672 port when: ansible_os_family != 'RedHat' shell: iptables-save | grep -- "-A INPUT -p tcp -m tcp --dport 5672 -j ACCEPT" > /dev/null || iptables -I INPUT -p tcp -m tcp --dport 5672 -j ACCEPT - name: open 5673 port when: ansible_os_family != 'RedHat' shell: iptables-save | grep -- "-A INPUT -p tcp -m tcp --dport 5673 -j ACCEPT" > /dev/null || iptables -I INPUT -p tcp -m tcp --dport 5673 -j ACCEPT - name: open 15672 port when: ansible_os_family != 'RedHat' shell: iptables-save | grep -- "-A INPUT -p tcp -m tcp --dport 15672 -j ACCEPT" > /dev/null || iptables -I INPUT -p tcp -m tcp --dport 15672 -j ACCEPT - name: save iptables when: ansible_os_family != 'RedHat' shell: /etc/init.d/iptables-persistent save - name: open 5672 port when: ansible_os_family == 'RedHat' shell: iptables-save | grep -- "-A INPUT -p tcp -m tcp --dport 5672 -j ACCEPT" > /dev/null || iptables -I INPUT -p tcp -m tcp --dport 5672 -j ACCEPT - name: open 5673 port when: ansible_os_family == 'RedHat' shell: iptables-save | grep -- "-A INPUT -p tcp -m tcp --dport 5673 -j ACCEPT" > /dev/null || iptables -I INPUT -p tcp -m tcp --dport 5673 -j ACCEPT - name: open 15672 port when: ansible_os_family == 'RedHat' shell: iptables-save | grep -- "-A INPUT -p tcp -m tcp --dport 15672 -j ACCEPT" > /dev/null || iptables -I INPUT -p tcp -m tcp --dport 15672 -j ACCEPT - name: save iptables when: ansible_os_family == 'RedHat' shell: service iptables save - name: install rabbitmq management plugin shell: rabbitmq-plugins enable rabbitmq_management - name: enable RabbitMQ service: name=rabbitmq-server state=restarted enabled=yes - name: post-install script script: $post_install_script ''' pre_script = ''' if [ -f /etc/redhat-release ] ; then grep ' 7' /etc/redhat-release if [ $? -eq 0 ]; then [ -d /etc/yum.repos.d/ ] && [ ! -f /etc/yum.repos.d/epel.repo ] && echo -e "[epel]\nname=Extra Packages for Enterprise Linux \$releasever - \$basearce - mirrors.aliyun.com\nmirrorlist=https://mirrors.fedoraproject.org/metalink?repo=epel-7&arch=\$basearch\nfailovermethod=priority\nenabled=1\ngpgcheck=0\n" > /etc/yum.repos.d/epel.repo else [ -d /etc/yum.repos.d/ ] && [ ! -f /etc/yum.repos.d/epel.repo ] && echo -e "[epel]\nname=Extra Packages for Enterprise Linux \$releasever - \$basearce - mirrors.aliyun.com\nmirrorlist=https://mirrors.fedoraproject.org/metalink?repo=epel-6&arch=\$basearch\nfailovermethod=priority\nenabled=1\ngpgcheck=0\n" > /etc/yum.repos.d/epel.repo fi [ -d /etc/yum.repos.d/ ] && echo -e "#aliyun base\n[alibase]\nname=CentOS-\$releasever - Base - mirrors.aliyun.com\nfailovermethod=priority\nbaseurl=http://mirrors.aliyun.com/centos/\$releasever/os/\$basearch/\ngpgcheck=0\nenabled=0\n \n#released updates \n[aliupdates]\nname=CentOS-\$releasever - Updates - mirrors.aliyun.com\nfailovermethod=priority\nbaseurl=http://mirrors.aliyun.com/centos/\$releasever/updates/\$basearch/\nenabled=0\ngpgcheck=0\n \n[aliextras]\nname=CentOS-\$releasever - Extras - mirrors.aliyun.com\nfailovermethod=priority\nbaseurl=http://mirrors.aliyun.com/centos/\$releasever/extras/\$basearch/\nenabled=0\ngpgcheck=0\n \n[aliepel]\nname=Extra Packages for Enterprise Linux \$releasever - \$basearce - mirrors.aliyun.com\nbaseurl=http://mirrors.aliyun.com/epel/\$releasever/\$basearch\nfailovermethod=priority\nenabled=0\ngpgcheck=0\n" > /etc/yum.repos.d/zstack-aliyun-yum.repo [ -d /etc/yum.repos.d/ ] && echo -e "#163 base\n[163base]\nname=CentOS-\$releasever - Base - mirrors.163.com\nfailovermethod=priority\nbaseurl=http://mirrors.163.com/centos/\$releasever/os/\$basearch/\ngpgcheck=0\nenabled=0\n \n#released updates \n[163updates]\nname=CentOS-\$releasever - Updates - mirrors.163.com\nfailovermethod=priority\nbaseurl=http://mirrors.163.com/centos/\$releasever/updates/\$basearch/\nenabled=0\ngpgcheck=0\n \n#additional packages that may be useful\n[163extras]\nname=CentOS-\$releasever - Extras - mirrors.163.com\nfailovermethod=priority\nbaseurl=http://mirrors.163.com/centos/\$releasever/extras/\$basearch/\nenabled=0\ngpgcheck=0\n \n[ustcepel]\nname=Extra Packages for Enterprise Linux \$releasever - \$basearch - ustc \nbaseurl=http://centos.ustc.edu.cn/epel/\$releasever/\$basearch\nfailovermethod=priority\nenabled=0\ngpgcheck=0\n" > /etc/yum.repos.d/zstack-163-yum.repo fi ################### #Check DNS hijacking ################### hostname=`hostname` pintret=`ping -c 1 -W 2 $hostname 2>/dev/null | head -n1` echo $pintret | grep 'PING' > /dev/null [ $? -ne 0 ] && exit 0 ip=`echo $pintret | cut -d' ' -f 3 | cut -d'(' -f 2 | cut -d')' -f 1` ip_1=`echo $ip | cut -d'.' -f 1` [ "127" = "$ip_1" ] && exit 0 ip addr | grep $ip > /dev/null [ $? -eq 0 ] && exit 0 echo "The hostname($hostname) of your machine is resolved to IP($ip) which is none of IPs of your machine. It's likely your DNS server has been hijacking, please try fixing it or add \"ip_of_your_host $hostname\" to /etc/hosts. DNS hijacking will cause MySQL and RabbitMQ not working." exit 1 ''' fd, pre_script_path = tempfile.mkstemp() os.fdopen(fd, 'w').write(pre_script) def cleanup_prescript(): os.remove(pre_script_path) self.install_cleanup_routine(cleanup_prescript) if args.rabbit_username and args.rabbit_password: post_script = '''set -x rabbitmqctl list_users|grep 'zstack' if [ $$? -ne 0 ]; then set -e rabbitmqctl add_user $username $password rabbitmqctl set_user_tags $username administrator rabbitmqctl set_permissions -p / $username ".*" ".*" ".*" fi ''' t = string.Template(post_script) post_script = t.substitute({ 'username': args.rabbit_username, 'password': args.rabbit_password }) else: post_script = '' fd, post_script_path = tempfile.mkstemp() os.fdopen(fd, 'w').write(post_script) def cleanup_postscript(): os.remove(post_script_path) self.install_cleanup_routine(cleanup_postscript) t = string.Template(yaml) if args.yum: yum_repo = args.yum else: yum_repo = 'false' yaml = t.substitute({ 'host': args.host, 'pre_install_script': pre_script_path, 'yum_folder': ctl.zstack_home, 'yum_repo': yum_repo, 'post_install_script': post_script_path }) ansible(yaml, args.host, args.debug, args.ssh_key) if not args.no_update: ctl.write_property('CloudBus.serverIp.0', args.host) info('updated CloudBus.serverIp.0=%s in %s' % (args.host, ctl.properties_file_path)) if args.rabbit_username and args.rabbit_password: ctl.write_property('CloudBus.rabbitmqUsername', args.rabbit_username) info('updated CloudBus.rabbitmqUsername=%s in %s' % (args.rabbit_username, ctl.properties_file_path)) ctl.write_property('CloudBus.rabbitmqPassword', args.rabbit_password) info('updated CloudBus.rabbitmqPassword=%s in %s' % (args.rabbit_password, ctl.properties_file_path)) class ChangeMysqlPasswordCmd(Command): def __init__(self): super(ChangeMysqlPasswordCmd, self).__init__() self.name = "change_mysql_password" self.description = ( "Change mysql password for root or normal user" ) ctl.register_command(self) def install_argparse_arguments(self, parser): parser.add_argument('--root-password','-root', help="Current mysql root password", required=True) parser.add_argument('--user-name','-user', help="The user you want to change password", required=True) parser.add_argument('--new-password','-new', help="New mysql password of root or normal user", required=True) parser.add_argument('--remote-ip','-ip', help="Mysql ip address if didn't install on localhost", ) def check_username_password(self,args): if args.remote_ip is not None: status, output = commands.getstatusoutput("mysql -u root -p%s -h '%s' -e 'show databases;'" % (args.root_password, args.remote_ip)) else: status, output = commands.getstatusoutput("mysql -u root -p%s -e 'show databases;'" % args.root_password) if status != 0: error(output) def run(self, args): self.check_username_password(args) if args.user_name == 'zstack': if args.remote_ip is not None: sql = "mysql -u root -p'%s' -h '%s' -e \"UPDATE mysql.user SET Password=PASSWORD(\'%s\') , Host = \'%s\' WHERE USER=\'%s\';FLUSH PRIVILEGES;\"" % (args.root_password, args.remote_ip, args.new_password,args.remote_ip, args.user_name) else: sql = "mysql -u root -p'%s' -e \"UPDATE mysql.user SET Password=PASSWORD(\'%s\') WHERE USER=\'%s\';FLUSH PRIVILEGES;\"" % (args.root_password, args.new_password, args.user_name) status, output = commands.getstatusoutput(sql) if status != 0: error(output) info("Change mysql password for user '%s' successfully! " % args.user_name) info(colored("Please change 'DB.password' in 'zstack.properties' then restart zstack to make the changes effective" , 'yellow')) elif args.user_name == 'root': if args.remote_ip is not None: status, output = commands.getstatusoutput("mysqladmin -u %s -p'%s' password %s -h %s" % (args.user_name, args.root_password, args.new_password, args.remote_ip)) else: status, output = commands.getstatusoutput("mysqladmin -u %s -p'%s' password %s" % (args.user_name, args.root_password, args.new_password)) if status != 0: error(output) info("Change mysql password for user '%s' successfully!" % args.user_name) else: error("Only support change 'zstack' and 'root' password") class DumpMysqlCmd(Command): def __init__(self): super(DumpMysqlCmd, self).__init__() self.name = "dump_mysql" self.description = ( "Dump mysql database for backup" ) ctl.register_command(self) def install_argparse_arguments(self, parser): parser.add_argument('--file-name', help="The filename you want to save the database, default is 'zstack-backup-db'", default="zstack-backup-db") parser.add_argument('--keep-amount',type=int, help="The amount of backup files you want to keep, older backup files will be deleted, default number is 60", default=60) def run(self, args): (db_hostname, db_port, db_user, db_password) = ctl.get_live_mysql_portal() file_name = args.file_name keep_amount = args.keep_amount backup_timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S") db_backup_dir = "/var/lib/zstack/mysql-backup/" if os.path.exists(db_backup_dir) is False: os.mkdir(db_backup_dir) db_backup_name = db_backup_dir + file_name + "-" + backup_timestamp if db_hostname == "localhost" or db_hostname == "127.0.0.1": if db_password is None or db_password == "": db_connect_password = "" else: db_connect_password = "-p" + db_password command = "mysqldump --add-drop-database --databases -u %s %s -P %s zstack zstack_rest | gzip > %s "\ % (db_user, db_connect_password, db_port, db_backup_name + ".gz") (status, output) = commands.getstatusoutput(command) if status != 0: error(output) else: if db_password is None or db_password == "": db_connect_password = "" else: db_connect_password = "-p" + db_password command = "mysqldump --add-drop-database --databases -u %s %s --host %s -P %s zstack zstack_rest | gzip > %s " \ % (db_user, db_connect_password, db_hostname, db_port, db_backup_name + ".gz") (status, output) = commands.getstatusoutput(command) if status != 0: error(output) print "Backup mysql successful! You can check the file at %s.gz" % db_backup_name # remove old file if len(os.listdir(db_backup_dir)) > keep_amount: backup_files_list = [s for s in os.listdir(db_backup_dir) if os.path.isfile(os.path.join(db_backup_dir, s))] backup_files_list.sort(key=lambda s: os.path.getmtime(os.path.join(db_backup_dir, s))) for expired_file in backup_files_list: if expired_file not in backup_files_list[-keep_amount:]: os.remove(db_backup_dir + expired_file) class RestoreMysqlCmd(Command): status, all_local_ip = commands.getstatusoutput("ip a") def __init__(self): super(RestoreMysqlCmd, self).__init__() self.name = "restore_mysql" self.description = ( "Restore mysql data from backup file" ) self.hide = True ctl.register_command(self) def install_argparse_arguments(self, parser): parser.add_argument('--from-file', '-f', help="The backup filename under /var/lib/zstack/mysql-backup/ ", required=True) parser.add_argument('--mysql-root-password', help="mysql root password", default=None) def test_mysql_connection(self, db_connect_password, db_port, db_hostname): command = "mysql -uroot %s -P %s %s -e 'show databases' >> /dev/null 2>&1" \ % (db_connect_password, db_port, db_hostname) try: shell_no_pipe(command) except: error("Can't connect mysql with root password '%s', please specify databse root password with --mysql-root-password" % db_connect_password.split('-p')[1]) def run(self, args): (db_hostname, db_port, db_user, db_password) = ctl.get_live_mysql_portal() # only root user can restore database db_password = args.mysql_root_password db_backup_name = args.from_file if os.path.exists(db_backup_name) is False: error("Didn't find file: %s ! Stop recover database! " % db_backup_name) error_if_tool_is_missing('gunzip') info("Backup mysql before restore data ...") shell_no_pipe('zstack-ctl dump_mysql') shell_no_pipe('zstack-ctl stop_node') info("Starting recover data ...") if db_password is None or db_password == "": db_connect_password = "" else: db_connect_password = "-p" + db_password if db_hostname == "localhost" or db_hostname == "127.0.0.1" or (db_hostname in RestoreMysqlCmd.all_local_ip): db_hostname = "" else: db_hostname = "--host %s" % db_hostname self.test_mysql_connection(db_connect_password, db_port, db_hostname) for database in ['zstack','zstack_rest']: command = "mysql -uroot %s -P %s %s -e 'drop database if exists %s; create database %s' >> /dev/null 2>&1" \ % (db_connect_password, db_port, db_hostname, database, database) shell_no_pipe(command) command = "gunzip < %s | mysql -uroot %s %s -P %s %s" \ % (db_backup_name, db_connect_password, db_hostname, db_port, database) shell_no_pipe(command) #shell_no_pipe('zstack-ctl start_node') info("Recover data successfully! You can start node by: zstack-ctl start") class CollectLogCmd(Command): zstack_log_dir = "/var/log/zstack/" vrouter_log_dir = "/home/vyos/zvr/" host_log_list = ['zstack.log','zstack-kvmagent.log','zstack-iscsi-filesystem-agent.log', 'zstack-agent/collectd.log','zstack-agent/server.log'] bs_log_list = ['zstack-sftpbackupstorage.log','ceph-backupstorage.log','zstack-store/zstore.log', 'fusionstor-backupstorage.log'] ps_log_list = ['ceph-primarystorage.log','fusionstor-primarystorage.log'] # management-server.log is not in the same dir, will collect separately mn_log_list = ['deploy.log', 'ha.log', 'zstack-console-proxy.log', 'zstack.log', 'zstack-cli', 'zstack-ui.log', 'zstack-dashboard.log', 'zstack-ctl.log'] collect_lines = 100000 logger_dir = '/var/log/zstack/' logger_file = 'zstack-ctl.log' failed_flag = False def __init__(self): super(CollectLogCmd, self).__init__() self.name = "collect_log" self.description = ( "Collect log for diagnose" ) ctl.register_command(self) def install_argparse_arguments(self, parser): parser.add_argument('--db', help='collect database for diagnose ', action="store_true", default=False) parser.add_argument('--mn-only', help='only collect management log', action="store_true", default=False) parser.add_argument('--full', help='collect full management logs and host logs', action="store_true", default=False) parser.add_argument('--host', help='only collect management log and specific host log') def get_db(self, collect_dir): command = "cp `zstack-ctl dump_mysql | awk '{ print $10 }'` %s" % collect_dir shell(command, False) def compress_and_fetch_log(self, local_collect_dir, tmp_log_dir, host_post_info): command = "cd %s && tar zcf ../collect-log.tar.gz ." % tmp_log_dir run_remote_command(command, host_post_info) fetch_arg = FetchArg() fetch_arg.src = "%s/../collect-log.tar.gz " % tmp_log_dir fetch_arg.dest = local_collect_dir fetch_arg.args = "fail_on_missing=yes flat=yes" fetch(fetch_arg, host_post_info) command = "rm -rf %s %s/../collect-log.tar.gz" % (tmp_log_dir, tmp_log_dir) run_remote_command(command, host_post_info) (status, output) = commands.getstatusoutput("cd %s && tar zxf collect-log.tar.gz" % local_collect_dir) if status != 0: warn("Uncompress %s/collect-log.tar.gz meet problem: %s" % (local_collect_dir, output)) (status, output) = commands.getstatusoutput("rm -f %s/collect-log.tar.gz" % local_collect_dir) def get_system_log(self, host_post_info, tmp_log_dir): # collect uptime and last reboot log and dmesg host_info_log = tmp_log_dir + "host_info" command = "uptime > %s && last reboot >> %s && free -h >> %s && cat /proc/cpuinfo >> %s && ip addr >> %s && df -h >> %s" % \ (host_info_log, host_info_log, host_info_log, host_info_log, host_info_log, host_info_log) run_remote_command(command, host_post_info, True, True) command = "cp /var/log/dmesg* /var/log/messages %s" % tmp_log_dir run_remote_command(command, host_post_info) def get_pkg_list(self, host_post_info, tmp_log_dir): command = "rpm -qa | sort > %s/pkg_list" % tmp_log_dir run_remote_command(command, host_post_info) def get_vrouter_log(self, host_post_info, collect_dir): #current vrouter log is very small, so collect all logs for debug if check_host_reachable(host_post_info) is True: info("Collecting log from vrouter: %s ..." % host_post_info.host) local_collect_dir = collect_dir + 'vrouter-%s/' % host_post_info.host tmp_log_dir = "%s/tmp-log/" % CollectLogCmd.vrouter_log_dir command = "mkdir -p %s " % tmp_log_dir run_remote_command(command, host_post_info) command = "/opt/vyatta/sbin/vyatta-save-config.pl && cp /config/config.boot %s" % tmp_log_dir run_remote_command(command, host_post_info) command = "cp %s/*.log %s/*.json %s" % (CollectLogCmd.vrouter_log_dir, CollectLogCmd.vrouter_log_dir,tmp_log_dir) run_remote_command(command, host_post_info) self.compress_and_fetch_log(local_collect_dir, tmp_log_dir, host_post_info) else: warn("Vrouter %s is unreachable!" % host_post_info.host) def get_host_log(self, host_post_info, collect_dir, collect_full_log=False): if check_host_reachable(host_post_info) is True: info("Collecting log from host: %s ..." % host_post_info.host) tmp_log_dir = "%s/tmp-log/" % CollectLogCmd.zstack_log_dir local_collect_dir = collect_dir + 'host-%s/' % host_post_info.host try: # file system broken shouldn't block collect log process if not os.path.exists(local_collect_dir): os.makedirs(local_collect_dir) command = "mkdir -p %s " % tmp_log_dir run_remote_command(command, host_post_info) for log in CollectLogCmd.host_log_list: if 'zstack-agent' in log: command = "mkdir -p %s" % tmp_log_dir + '/zstack-agent/' run_remote_command(command, host_post_info) host_log = CollectLogCmd.zstack_log_dir + '/' + log collect_log = tmp_log_dir + '/' + log if file_dir_exist("path=%s" % host_log, host_post_info): if collect_full_log: for num in range(1, 16): log_name = "%s.%s.gz" % (host_log, num) command = "/bin/cp -rf %s %s/" % (log_name, tmp_log_dir) (status, output) = run_remote_command(command, host_post_info, True, True) command = "/bin/cp -rf %s %s/" % (host_log, tmp_log_dir) (status, output) = run_remote_command(command, host_post_info, True, True) else: command = "tail -n %d %s > %s " % (CollectLogCmd.collect_lines, host_log, collect_log) run_remote_command(command, host_post_info) except SystemExit: warn("collect log on host %s failed" % host_post_info.host) logger.warn("collect log on host %s failed" % host_post_info.host) command = 'rm -rf %s' % tmp_log_dir CollectLogCmd.failed_flag = True run_remote_command(command, host_post_info) return 1 command = 'test "$(ls -A "%s" 2>/dev/null)" || echo The directory is empty' % tmp_log_dir (status, output) = run_remote_command(command, host_post_info, return_status=True, return_output=True) if "The directory is empty" in output: warn("Didn't find log on host: %s " % (host_post_info.host)) command = 'rm -rf %s' % tmp_log_dir run_remote_command(command, host_post_info) return 0 self.get_system_log(host_post_info, tmp_log_dir) self.get_pkg_list(host_post_info, tmp_log_dir) self.compress_and_fetch_log(local_collect_dir,tmp_log_dir,host_post_info) else: warn("Host %s is unreachable!" % host_post_info.host) def get_storage_log(self, host_post_info, collect_dir, storage_type, collect_full_log=False): collect_log_list = [] if check_host_reachable(host_post_info) is True: info("Collecting log from %s storage: %s ..." % (storage_type, host_post_info.host)) tmp_log_dir = "%s/tmp-log/" % CollectLogCmd.zstack_log_dir local_collect_dir = collect_dir + storage_type + '-' + host_post_info.host+ '/' try: # file system broken shouldn't block collect log process if not os.path.exists(local_collect_dir): os.makedirs(local_collect_dir) command = "rm -rf %s && mkdir -p %s " % (tmp_log_dir, tmp_log_dir) run_remote_command(command, host_post_info) if '_ps' in storage_type: collect_log_list = CollectLogCmd.ps_log_list elif '_bs' in storage_type: collect_log_list = CollectLogCmd.bs_log_list else: warn("unknown storage type: %s" % storage_type) for log in collect_log_list: if 'zstack-store' in log: command = "mkdir -p %s" % tmp_log_dir + '/zstack-store/' run_remote_command(command, host_post_info) storage_agent_log = CollectLogCmd.zstack_log_dir + '/' + log collect_log = tmp_log_dir + '/' + log if file_dir_exist("path=%s" % storage_agent_log, host_post_info): if collect_full_log: for num in range(1, 16): log_name = "%s.%s.gz" % (storage_agent_log, num) command = "/bin/cp -rf %s %s/" % (log_name, tmp_log_dir) (status, output) = run_remote_command(command, host_post_info, True, True) command = "/bin/cp -rf %s %s/" % (storage_agent_log, tmp_log_dir) (status, output) = run_remote_command(command, host_post_info, True, True) else: command = "tail -n %d %s > %s " % (CollectLogCmd.collect_lines, storage_agent_log, collect_log) run_remote_command(command, host_post_info) except SystemExit: logger.warn("collect log on storage: %s failed" % host_post_info.host) command = 'rm -rf %s' % tmp_log_dir CollectLogCmd.failed_flag = True run_remote_command(command, host_post_info) command = 'test "$(ls -A "%s" 2>/dev/null)" || echo The directory is empty' % tmp_log_dir (status, output) = run_remote_command(command, host_post_info, return_status=True, return_output=True) if "The directory is empty" in output: warn("Didn't find log on storage host: %s " % host_post_info.host) command = 'rm -rf %s' % tmp_log_dir run_remote_command(command, host_post_info) return 0 self.get_system_log(host_post_info, tmp_log_dir) self.get_pkg_list(host_post_info, tmp_log_dir) self.compress_and_fetch_log(local_collect_dir,tmp_log_dir, host_post_info) else: warn("%s storage %s is unreachable!" % (storage_type, host_post_info.host)) def get_host_ssh_info(self, host_ip, type): db_hostname, db_port, db_user, db_password = ctl.get_live_mysql_portal() query = MySqlCommandLineQuery() query.host = db_hostname query.port = db_port query.user = db_user query.password = db_password query.table = 'zstack' if type == 'host': query.sql = "select * from HostVO where managementIp='%s'" % host_ip host_uuid = query.query()[0]['uuid'] query.sql = "select * from KVMHostVO where uuid='%s'" % host_uuid ssh_info = query.query()[0] username = ssh_info['username'] password = ssh_info['password'] ssh_port = ssh_info['port'] return (username, password, ssh_port) elif type == "sftp_bs": query.sql = "select * from SftpBackupStorageVO where hostname='%s'" % host_ip ssh_info = query.query()[0] username = ssh_info['username'] password = ssh_info['password'] ssh_port = ssh_info['sshPort'] return (username, password, ssh_port) elif type == "ceph_bs": query.sql = "select * from CephBackupStorageMonVO where hostname='%s'" % host_ip ssh_info = query.query()[0] username = ssh_info['sshUsername'] password = ssh_info['sshPassword'] ssh_port = ssh_info['sshPort'] return (username, password, ssh_port) elif type == "fusionStor_bs": query.sql = "select * from FusionstorPrimaryStorageMonVO where hostname='%s'" % host_ip ssh_info = query.query()[0] username = ssh_info['sshUsername'] password = ssh_info['sshPassword'] ssh_port = ssh_info['sshPort'] return (username, password, ssh_port) elif type == "imageStore_bs": query.sql = "select * from ImageStoreBackupStorageVO where hostname='%s'" % host_ip ssh_info = query.query()[0] username = ssh_info['username'] password = ssh_info['password'] ssh_port = ssh_info['sshPort'] return (username, password, ssh_port) elif type == "ceph_ps": query.sql = "select * from CephPrimaryStorageMonVO where hostname='%s'" % host_ip ssh_info = query.query()[0] username = ssh_info['sshUsername'] password = ssh_info['sshPassword'] ssh_port = ssh_info['sshPort'] return (username, password, ssh_port) elif type == "fusionStor_ps": query.sql = "select * from FusionstorPrimaryStorageMonVO where hostname='%s'" % host_ip ssh_info = query.query()[0] username = ssh_info['sshUsername'] password = ssh_info['sshPassword'] ssh_port = ssh_info['sshPort'] return (username, password, ssh_port) elif type == "vrouter": query.sql = "select value from GlobalConfigVO where name='vrouter.password'" password = query.query() username = "vyos" ssh_port = 22 return (username, password, ssh_port) else: warn("unknown target type: %s" % type) def get_management_node_log(self, collect_dir, host_post_info, collect_full_log=False): '''management.log maybe not exist, so collect latest files, maybe a tarball''' if check_host_reachable(host_post_info) is True: mn_ip = host_post_info.host info("Collecting log from management node %s ..." % mn_ip) local_collect_dir = collect_dir + "/management-node-%s/" % mn_ip + '/' if not os.path.exists(local_collect_dir): os.makedirs(local_collect_dir) tmp_log_dir = "%s/../../logs/tmp-log/" % ctl.zstack_home command = 'rm -rf %s && mkdir -p %s' % (tmp_log_dir, tmp_log_dir) run_remote_command(command, host_post_info) command = "mn_log=`find %s/../../logs/management-serve* -maxdepth 1 -type f -printf" \ " '%%T+\\t%%p\\n' | sort -r | awk '{print $2; exit}'`; /bin/cp -rf $mn_log %s" % (ctl.zstack_home, tmp_log_dir) (status, output) = run_remote_command(command, host_post_info, True, True) if status is not True: warn("get management-server log failed: %s" % output) if collect_full_log: for item in range(0, 15): log_name = "management-server-" + (datetime.today() - timedelta(days=item)).strftime("%Y-%m-%d") command = "/bin/cp -rf %s/../../logs/%s* %s/" % (ctl.zstack_home, log_name, tmp_log_dir) (status, output) = run_remote_command(command, host_post_info, True, True) for log in CollectLogCmd.mn_log_list: if file_dir_exist("path=%s/%s" % (CollectLogCmd.zstack_log_dir, log), host_post_info): command = "tail -n %d %s/%s > %s/%s " \ % (CollectLogCmd.collect_lines, CollectLogCmd.zstack_log_dir, log, tmp_log_dir, log) run_remote_command(command, host_post_info) self.get_system_log(host_post_info, tmp_log_dir) self.get_pkg_list(host_post_info, tmp_log_dir) self.compress_and_fetch_log(local_collect_dir, tmp_log_dir, host_post_info) else: warn("Management node %s is unreachable!" % host_post_info.host) def get_local_mn_log(self, collect_dir, collect_full_log=False): info("Collecting log from this management node ...") mn_log_dir = collect_dir + 'management-node-%s' % get_default_ip() if not os.path.exists(mn_log_dir): os.makedirs(mn_log_dir) command = "mn_log=`find %s/../..//logs/management-serve* -maxdepth 1 -type f -printf '%%T+\\t%%p\\n' | sort -r | " \ "awk '{print $2; exit}'`; /bin/cp -rf $mn_log %s/" % (ctl.zstack_home, mn_log_dir) (status, output) = commands.getstatusoutput(command) if status !=0: warn("get management-server log failed: %s" % output) if collect_full_log: for item in range(0, 15): log_name = "management-server-" + (datetime.today() - timedelta(days=item)).strftime("%Y-%m-%d") command = "/bin/cp -rf %s/../../logs/%s* %s/" % (ctl.zstack_home, log_name, mn_log_dir) (status, output) = commands.getstatusoutput(command) for log in CollectLogCmd.mn_log_list: if os.path.exists(CollectLogCmd.zstack_log_dir + log): command = ( "tail -n %d %s/%s > %s/%s " % (CollectLogCmd.collect_lines, CollectLogCmd.zstack_log_dir, log, mn_log_dir, log)) (status, output) = commands.getstatusoutput(command) if status != 0: warn("get %s failed: %s" % (log, output)) host_info_log = mn_log_dir + "/host_info" command = "uptime > %s && last reboot >> %s && free -h >> %s && cat /proc/cpuinfo >> %s && ip addr >> %s && df -h >> %s" % \ (host_info_log, host_info_log, host_info_log, host_info_log, host_info_log, host_info_log) commands.getstatusoutput(command) command = "cp /var/log/dmesg* /var/log/messages %s/" % mn_log_dir commands.getstatusoutput(command) command = "cp %s/*git-commit %s/" % (ctl.zstack_home, mn_log_dir) commands.getstatusoutput(command) command = " rpm -qa | sort > %s/pkg_list" % mn_log_dir commands.getstatusoutput(command) command = " rpm -qa | sort > %s/pkg_list" % mn_log_dir commands.getstatusoutput(command) def generate_tar_ball(self, run_command_dir, detail_version, time_stamp): (status, output) = commands.getstatusoutput("cd %s && tar zcf collect-log-%s-%s.tar.gz collect-log-%s-%s" % (run_command_dir, detail_version, time_stamp, detail_version, time_stamp)) if status != 0: error("Generate tarball failed: %s " % output) def generate_host_post_info(self, host_ip, type): host_post_info = HostPostInfo() # update inventory with open(ctl.zstack_home + "/../../../ansible/hosts") as f: old_hosts = f.read() if host_ip not in old_hosts: with open(ctl.zstack_home + "/../../../ansible/hosts", "w") as f: new_hosts = host_ip + "\n" + old_hosts f.write(new_hosts) (host_user, host_password, host_port) = self.get_host_ssh_info(host_ip, type) if host_user != 'root' and host_password is not None: host_post_info.become = True host_post_info.remote_user = host_user host_post_info.remote_pass = host_password host_post_info.remote_port = host_port host_post_info.host = host_ip host_post_info.host_inventory = ctl.zstack_home + "/../../../ansible/hosts" host_post_info.private_key = ctl.zstack_home + "/WEB-INF/classes/ansible/rsaKeys/id_rsa" host_post_info.post_url = "" return host_post_info def run(self, args): run_command_dir = os.getcwd() time_stamp = datetime.now().strftime("%Y-%m-%d_%H-%M") # create log create_log(CollectLogCmd.logger_dir, CollectLogCmd.logger_file) if get_detail_version() is not None: detail_version = get_detail_version().replace(' ','_') else: hostname, port, user, password = ctl.get_live_mysql_portal() detail_version = get_zstack_version(hostname, port, user, password) # collect_dir used to store the collect-log collect_dir = run_command_dir + '/collect-log-%s-%s/' % (detail_version, time_stamp) if not os.path.exists(collect_dir): os.makedirs(collect_dir) if os.path.exists(InstallHACmd.conf_file) is not True: self.get_local_mn_log(collect_dir, args.full) else: # this only for HA due to db will lost mn info if mn offline mn_list = get_ha_mn_list(InstallHACmd.conf_file) for mn_ip in mn_list: host_post_info = HostPostInfo() host_post_info.remote_user = 'root' # this will be changed in the future host_post_info.remote_port = '22' host_post_info.host = mn_ip host_post_info.host_inventory = InstallHACmd.conf_dir + 'host' host_post_info.post_url = "" host_post_info.private_key = InstallHACmd.conf_dir + 'ha_key' self.get_management_node_log(collect_dir, host_post_info, args.full) if args.db is True: self.get_db(collect_dir) if args.mn_only is not True: host_vo = get_host_list("HostVO") #collect host log for host in host_vo: if args.host is not None: host_ip = args.host else: host_ip = host['managementIp'] host_type = host['hypervisorType'] if host_type == "KVM": self.get_host_log(self.generate_host_post_info(host_ip, "host"), collect_dir, args.full) else: warn("host %s is not a KVM host, skip..." % host_ip) if args.host is not None: break #collect vrouter log vrouter_ip_list = get_vrouter_list() for vrouter_ip in vrouter_ip_list: self.get_vrouter_log(self.generate_host_post_info(vrouter_ip, "vrouter"),collect_dir) #collect bs log sftp_bs_vo = get_host_list("SftpBackupStorageVO") for bs in sftp_bs_vo: bs_ip = bs['hostname'] self.get_storage_log(self.generate_host_post_info(bs_ip, "sftp_bs"), collect_dir, "sftp_bs") ceph_bs_vo = get_host_list("CephBackupStorageMonVO") for bs in ceph_bs_vo: bs_ip = bs['hostname'] self.get_storage_log(self.generate_host_post_info(bs_ip, "ceph_bs"), collect_dir, "ceph_bs") fusionStor_bs_vo = get_host_list("FusionstorBackupStorageMonVO") for bs in fusionStor_bs_vo: bs_ip = bs['hostname'] self.get_storage_log(self.generate_host_post_info(bs_ip, "fusionStor_bs"), collect_dir, "fusionStor_bs") imageStore_bs_vo = get_host_list("ImageStoreBackupStorageVO") for bs in imageStore_bs_vo: bs_ip = bs['hostname'] self.get_storage_log(self.generate_host_post_info(bs_ip, "imageStore_bs"), collect_dir, "imageStore_bs") #collect ps log ceph_ps_vo = get_host_list("CephPrimaryStorageMonVO") for ps in ceph_ps_vo: ps_ip = ps['hostname'] self.get_storage_log(self.generate_host_post_info(ps_ip,"ceph_ps"), collect_dir, "ceph_ps") fusionStor_ps_vo = get_host_list("FusionstorPrimaryStorageMonVO") for ps in fusionStor_ps_vo: ps_ip = ps['hostname'] self.get_storage_log(self.generate_host_post_info(ps_ip,"fusionStor_ps"), collect_dir, "fusionStor_ps") self.generate_tar_ball(run_command_dir, detail_version, time_stamp) if CollectLogCmd.failed_flag is True: info("The collect log generate at: %s/collect-log-%s-%s.tar.gz" % (run_command_dir, detail_version, time_stamp)) info(colored("Please check the reason of failed task in log: %s\n" % (CollectLogCmd.logger_dir + CollectLogCmd.logger_file), 'yellow')) else: info("The collect log generate at: %s/collect-log-%s-%s.tar.gz" % (run_command_dir, detail_version, time_stamp)) class ChangeIpCmd(Command): def __init__(self): super(ChangeIpCmd, self).__init__() self.name = "change_ip" self.description = ( "update new management ip address to zstack property file" ) ctl.register_command(self) def install_argparse_arguments(self, parser): parser.add_argument('--ip', help='The new IP address of management node.' 'This operation will update the new ip address to ' 'zstack config file' , required=True) parser.add_argument('--cloudbus_server_ip', help='The new IP address of CloudBus.serverIp.0, default will use value from --ip', required=False) parser.add_argument('--mysql_ip', help='The new IP address of DB.url, default will use value from --ip', required=False) parser.add_argument('--yum', help="Use ZStack predefined yum repositories. The valid options include: alibase,aliepel,163base,ustcepel,zstack-local. NOTE: only use it when you know exactly what it does.", default=None) def run(self, args): if args.ip == '0.0.0.0': raise CtlError('for your data safety, please do NOT use 0.0.0.0 as the listen address') if args.cloudbus_server_ip is not None: cloudbus_server_ip = args.cloudbus_server_ip else: cloudbus_server_ip = args.ip if args.mysql_ip is not None: mysql_ip = args.mysql_ip else: mysql_ip = args.ip zstack_conf_file = ctl.properties_file_path ip_check = re.compile('^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$') for input_ip in [cloudbus_server_ip, mysql_ip]: if not ip_check.match(input_ip): info("The ip address you input: %s seems not a valid ip" % input_ip) return 1 # Update /etc/hosts if os.path.isfile(zstack_conf_file): old_ip = ctl.read_property('management.server.ip') if old_ip is not None: if not ip_check.match(old_ip): info("The ip address[%s] read from [%s] seems not a valid ip" % (old_ip, zstack_conf_file)) return 1 # read from env other than /etc/hostname in case of impact of DHCP SERVER old_hostname = shell("hostname").replace("\n","") new_hostname = args.ip.replace(".","-") if old_hostname != "localhost" and old_hostname != "localhost.localdomain": new_hostname = old_hostname if old_ip != None: shell('sed -i "/^%s .*$/d" /etc/hosts' % old_ip) else: shell('sed -i "/^.* %s$/d" /etc/hosts' % new_hostname) shell('echo "%s %s" >> /etc/hosts' % (args.ip, new_hostname)) shell('hostnamectl set-hostname %s' % new_hostname) shell('export HOSTNAME=%s' % new_hostname) if old_ip != None: info("Update /etc/hosts, old_ip:%s, new_ip:%s" % (old_ip, args.ip)) else: info("Update /etc/hosts, new_ip:%s" % args.ip) else: info("Didn't find %s, skip update new ip" % zstack_conf_file ) return 1 # Update zstack config file if os.path.isfile(zstack_conf_file): shell("yes | cp %s %s.bak" % (zstack_conf_file, zstack_conf_file)) ctl.write_properties([ ('CloudBus.serverIp.0', cloudbus_server_ip), ]) info("Update cloudbus server ip %s in %s " % (cloudbus_server_ip, zstack_conf_file)) ctl.write_properties([ ('management.server.ip', args.ip), ]) info("Update management server ip %s in %s " % (args.ip, zstack_conf_file)) db_url = ctl.read_property('DB.url') db_old_ip = re.findall(r'[0-9]+(?:\.[0-9]{1,3}){3}', db_url) db_new_url = db_url.split(db_old_ip[0])[0] + mysql_ip + db_url.split(db_old_ip[0])[1] ctl.write_properties([ ('DB.url', db_new_url), ]) info("Update mysql new url %s in %s " % (db_new_url, zstack_conf_file)) else: info("Didn't find %s, skip update new ip" % zstack_conf_file ) return 1 # Reset RabbitMQ info("Starting reset rabbitmq...") if args.yum is not None: ret = shell_return("zstack-ctl reset_rabbitmq --yum=%s" % args.yum) else: ret = shell_return("zstack-ctl reset_rabbitmq") if ret == 0: info("Reset rabbitMQ successfully") info("Change ip successfully") else: error("Change ip failed") class InstallManagementNodeCmd(Command): def __init__(self): super(InstallManagementNodeCmd, self).__init__() self.name = "install_management_node" self.description = ( "install ZStack management node from current machine to a remote machine with zstack.properties." "\nNOTE: please configure current node before installing node on other machines" ) ctl.register_command(self) def install_argparse_arguments(self, parser): parser.add_argument('--host', help='target host IP, for example, 192.168.0.212, to install ZStack management node to a remote machine', required=True) parser.add_argument('--install-path', help='the path on remote machine where Apache Tomcat will be installed, which must be an absolute path; [DEFAULT]: /usr/local/zstack', default='/usr/local/zstack') parser.add_argument('--source-dir', help='the source folder containing Apache Tomcat package and zstack.war, if omitted, it will default to a path related to $ZSTACK_HOME') parser.add_argument('--debug', help="open Ansible debug option", action="store_true", default=False) parser.add_argument('--force-reinstall', help="delete existing Apache Tomcat and resinstall ZStack", action="store_true", default=False) parser.add_argument('--yum', help="Use ZStack predefined yum repositories. The valid options include: alibase,aliepel,163base,ustcepel,zstack-local. NOTE: only use it when you know exactly what it does.", default=None) parser.add_argument('--ssh-key', help="the path of private key for SSH login $host; if provided, Ansible will use the specified key as private key to SSH login the $host", default=None) def run(self, args): if not os.path.isabs(args.install_path): raise CtlError('%s is not an absolute path' % args.install_path) if not args.source_dir: args.source_dir = os.path.join(ctl.zstack_home, "../../../") if not os.path.isdir(args.source_dir): raise CtlError('%s is not an directory' % args.source_dir) if not args.yum: args.yum = get_yum_repo_from_property() apache_tomcat = None zstack = None apache_tomcat_zip_name = None for file in os.listdir(args.source_dir): full_path = os.path.join(args.source_dir, file) if file.startswith('apache-tomcat') and file.endswith('zip') and os.path.isfile(full_path): apache_tomcat = full_path apache_tomcat_zip_name = file if file == 'zstack.war': zstack = full_path if not apache_tomcat: raise CtlError('cannot find Apache Tomcat ZIP in %s, please use --source-dir to specify the directory containing the ZIP' % args.source_dir) if not zstack: raise CtlError('cannot find zstack.war in %s, please use --source-dir to specify the directory containing the WAR file' % args.source_dir) pypi_path = os.path.join(ctl.zstack_home, "static/pypi/") if not os.path.isdir(pypi_path): raise CtlError('cannot find %s, please make sure you have installed ZStack management node' % pypi_path) pypi_tar_path = os.path.join(ctl.zstack_home, "static/pypi.tar.bz") static_path = os.path.join(ctl.zstack_home, "static") shell('cd %s; tar jcf pypi.tar.bz pypi' % static_path) yaml = '''--- - hosts: $host remote_user: root vars: root: $install_path yum_repo: "$yum_repo" tasks: - name: check remote env on RedHat OS 6 when: ansible_os_family == 'RedHat' and ansible_distribution_version < '7' script: $pre_script_on_rh6 - name: prepare remote environment script: $pre_script - name: install dependencies on RedHat OS from user defined repo when: ansible_os_family == 'RedHat' and yum_repo != 'false' shell: yum clean metadata; yum --disablerepo=* --enablerepo={{yum_repo}} --nogpgcheck install -y dmidecode java-1.8.0-openjdk wget python-devel gcc autoconf tar gzip unzip python-pip openssh-clients sshpass bzip2 ntp ntpdate sudo libselinux-python python-setuptools iptables-services - name: install dependencies on RedHat OS from system repos when: ansible_os_family == 'RedHat' and yum_repo == 'false' shell: yum clean metadata; yum --nogpgcheck install -y dmidecode java-1.8.0-openjdk wget python-devel gcc autoconf tar gzip unzip python-pip openssh-clients sshpass bzip2 ntp ntpdate sudo libselinux-python python-setuptools iptables-services - name: set java 8 as default runtime when: ansible_os_family == 'RedHat' shell: update-alternatives --install /usr/bin/java java /usr/lib/jvm/jre-1.8.0/bin/java 0; update-alternatives --set java /usr/lib/jvm/jre-1.8.0/bin/java - name: add ppa source for openjdk-8 on Ubuntu 14.04 when: ansible_os_family == 'Debian' and ansible_distribution_version == '14.04' shell: add-apt-repository ppa:openjdk-r/ppa -y; apt-get update - name: install openjdk on Ubuntu 14.04 when: ansible_os_family == 'Debian' and ansible_distribution_version == '14.04' apt: pkg={{item}} update_cache=yes with_items: - openjdk-8-jdk - name: install openjdk on Ubuntu 16.04 when: ansible_os_family == 'Debian' and ansible_distribution_version == '16.04' apt: pkg={{item}} update_cache=yes with_items: - openjdk-8-jdk - name: set java 8 as default runtime when: ansible_os_family == 'Debian' and ansible_distribution_version == '14.04' shell: update-alternatives --install /usr/bin/java java /usr/lib/jvm/java-8-openjdk-amd64/jre/bin/java 0; update-alternatives --install /usr/bin/javac javac /usr/lib/jvm/java-8-openjdk-amd64/jre/bin/javac 0; update-alternatives --set java /usr/lib/jvm/java-8-openjdk-amd64/jre/bin/java; update-alternatives --set javac /usr/lib/jvm/java-8-openjdk-amd64/bin/javac - name: install dependencies Debian OS when: ansible_os_family == 'Debian' apt: pkg={{item}} update_cache=yes with_items: - wget - python-dev - gcc - autoconf - tar - gzip - unzip - python-pip - sshpass - bzip2 - ntp - ntpdate - sudo - python-setuptools - stat: path=/usr/bin/mysql register: mysql_path - name: install MySQL client for RedHat 6 from user defined repos when: ansible_os_family == 'RedHat' and ansible_distribution_version < '7' and yum_repo != 'false' and (mysql_path.stat.exists == False) shell: yum --disablerepo=* --enablerepo={{yum_repo}} --nogpgcheck install -y mysql - name: install MySQL client for RedHat 6 from system repo when: ansible_os_family == 'RedHat' and ansible_distribution_version < '7' and yum_repo == 'false' and (mysql_path.stat.exists == False) shell: yum --nogpgcheck install -y mysql - name: install MySQL client for RedHat 7 from user defined repos when: ansible_os_family == 'RedHat' and ansible_distribution_version >= '7' and yum_repo != 'false' and (mysql_path.stat.exists == False) shell: yum --disablerepo=* --enablerepo={{yum_repo}} --nogpgcheck install -y mariadb - name: install MySQL client for RedHat 7 from system repos when: ansible_os_family == 'RedHat' and ansible_distribution_version >= '7' and yum_repo == 'false' and (mysql_path.stat.exists == False) shell: yum --nogpgcheck install -y mariadb - name: install MySQL client for Ubuntu when: ansible_os_family == 'Debian' and (mysql_path.stat.exists == False) apt: pkg={{item}} with_items: - mysql-client - name: copy pypi tar file copy: src=$pypi_tar_path dest=$pypi_tar_path_dest - name: untar pypi shell: "cd /tmp/; tar jxf $pypi_tar_path_dest" - name: install pip from local source shell: "easy_install -i file://$pypi_path/simple --upgrade pip" - name: install ansible from local source pip: name="ansible" extra_args="-i file://$pypi_path/simple --ignore-installed --trusted-host localhost" - name: install virtualenv pip: name="virtualenv" extra_args="-i file://$pypi_path/simple --ignore-installed --trusted-host localhost" - name: copy Apache Tomcat copy: src=$apache_path dest={{root}}/$apache_tomcat_zip_name - name: copy zstack.war copy: src=$zstack_path dest={{root}}/zstack.war - name: install ZStack script: $post_script - name: copy zstack.properties copy: src=$properties_file dest={{root}}/apache-tomcat/webapps/zstack/WEB-INF/classes/zstack.properties - name: setup zstack account script: $setup_account ''' pre_script = ''' if [ -f /etc/redhat-release ] ; then grep ' 7' /etc/redhat-release if [ $$? -eq 0 ]; then [ -d /etc/yum.repos.d/ ] && [ ! -f /etc/yum.repos.d/epel.repo ] && echo -e "[epel]\nname=Extra Packages for Enterprise Linux \$$releasever - \$$basearce - mirrors.aliyun.com\nmirrorlist=https://mirrors.fedoraproject.org/metalink?repo=epel-7&arch=\$$basearch\nfailovermethod=priority\nenabled=1\ngpgcheck=0\n" > /etc/yum.repos.d/epel.repo else [ -d /etc/yum.repos.d/ ] && [ ! -f /etc/yum.repos.d/epel.repo ] && echo -e "[epel]\nname=Extra Packages for Enterprise Linux \$$releasever - \$$basearce - mirrors.aliyun.com\nmirrorlist=https://mirrors.fedoraproject.org/metalink?repo=epel-6&arch=\$$basearch\nfailovermethod=priority\nenabled=1\ngpgcheck=0\n" > /etc/yum.repos.d/epel.repo fi [ -d /etc/yum.repos.d/ ] && echo -e "#aliyun base\n[alibase]\nname=CentOS-\$$releasever - Base - mirrors.aliyun.com\nfailovermethod=priority\nbaseurl=http://mirrors.aliyun.com/centos/\$$releasever/os/\$$basearch/\ngpgcheck=0\nenabled=0\n \n#released updates \n[aliupdates]\nname=CentOS-\$$releasever - Updates - mirrors.aliyun.com\nfailovermethod=priority\nbaseurl=http://mirrors.aliyun.com/centos/\$$releasever/updates/\$$basearch/\nenabled=0\ngpgcheck=0\n \n[aliextras]\nname=CentOS-\$$releasever - Extras - mirrors.aliyun.com\nfailovermethod=priority\nbaseurl=http://mirrors.aliyun.com/centos/\$$releasever/extras/\$$basearch/\nenabled=0\ngpgcheck=0\n \n[aliepel]\nname=Extra Packages for Enterprise Linux \$$releasever - \$$basearce - mirrors.aliyun.com\nbaseurl=http://mirrors.aliyun.com/epel/\$$releasever/\$$basearch\nfailovermethod=priority\nenabled=0\ngpgcheck=0\n" > /etc/yum.repos.d/zstack-aliyun-yum.repo [ -d /etc/yum.repos.d/ ] && echo -e "#163 base\n[163base]\nname=CentOS-\$$releasever - Base - mirrors.163.com\nfailovermethod=priority\nbaseurl=http://mirrors.163.com/centos/\$$releasever/os/\$$basearch/\ngpgcheck=0\nenabled=0\n \n#released updates \n[163updates]\nname=CentOS-\$$releasever - Updates - mirrors.163.com\nfailovermethod=priority\nbaseurl=http://mirrors.163.com/centos/\$$releasever/updates/\$$basearch/\nenabled=0\ngpgcheck=0\n \n#additional packages that may be useful\n[163extras]\nname=CentOS-\$$releasever - Extras - mirrors.163.com\nfailovermethod=priority\nbaseurl=http://mirrors.163.com/centos/\$$releasever/extras/\$$basearch/\nenabled=0\ngpgcheck=0\n \n[ustcepel]\nname=Extra Packages for Enterprise Linux \$$releasever - \$$basearch - ustc \nbaseurl=http://centos.ustc.edu.cn/epel/\$$releasever/\$$basearch\nfailovermethod=priority\nenabled=0\ngpgcheck=0\n" > /etc/yum.repos.d/zstack-163-yum.repo fi whereis zstack-ctl if [ $$? -eq 0 ]; then zstack-ctl stop_node fi apache_path=$install_path/apache-tomcat if [[ -d $$apache_path ]] && [[ $force_resinstall -eq 0 ]]; then echo "found existing Apache Tomcat directory $$apache_path; please use --force-reinstall to delete it and re-install" exit 1 fi rm -rf $install_path mkdir -p $install_path ''' t = string.Template(pre_script) pre_script = t.substitute({ 'force_resinstall': int(args.force_reinstall), 'install_path': args.install_path }) fd, pre_script_path = tempfile.mkstemp(suffix='.sh') os.fdopen(fd, 'w').write(pre_script) pre_script_on_rh6 = ''' ZSTACK_INSTALL_LOG='/tmp/zstack_installation.log' rpm -qi python-crypto >/dev/null 2>&1 if [ $? -eq 0 ]; then echo "Management node remote installation failed. You need to manually remove python-crypto by \n\n \`rpm -ev python-crypto\` \n\n in remote management node; otherwise it will conflict with ansible's pycrypto." >>$ZSTACK_INSTALL_LOG exit 1 fi ''' t = string.Template(pre_script_on_rh6) fd, pre_script_on_rh6_path = tempfile.mkstemp(suffix='.sh') os.fdopen(fd, 'w').write(pre_script_on_rh6) def cleanup_pre_script(): os.remove(pre_script_path) os.remove(pre_script_on_rh6_path) self.install_cleanup_routine(cleanup_pre_script) post_script = ''' set -e filename=$apache_tomcat_zip_name foldername="$${filename%.*}" apache_path=$install_path/apache-tomcat unzip $apache -d $install_path ln -s $install_path/$$foldername $$apache_path unzip $zstack -d $$apache_path/webapps/zstack chmod a+x $$apache_path/bin/* cat >> $$apache_path/bin/setenv.sh <<EOF export CATALINA_OPTS=" -Djava.net.preferIPv4Stack=true -Dcom.sun.management.jmxremote=true" EOF install_script="$$apache_path/webapps/zstack/WEB-INF/classes/tools/install.sh" eval "bash $$install_script zstack-ctl" eval "bash $$install_script zstack-cli" set +e grep "ZSTACK_HOME" ~/.bashrc > /dev/null if [ $$? -eq 0 ]; then sed -i "s#export ZSTACK_HOME=.*#export ZSTACK_HOME=$$apache_path/webapps/zstack#" ~/.bashrc else echo "export ZSTACK_HOME=$$apache_path/webapps/zstack" >> ~/.bashrc fi which ansible-playbook &> /dev/null if [ $$? -ne 0 ]; then pip install -i file://$pypi_path/simple --trusted-host localhost ansible fi ''' t = string.Template(post_script) post_script = t.substitute({ 'install_path': args.install_path, 'apache': os.path.join(args.install_path, apache_tomcat_zip_name), 'zstack': os.path.join(args.install_path, 'zstack.war'), 'apache_tomcat_zip_name': apache_tomcat_zip_name, 'pypi_path': '/tmp/pypi/' }) fd, post_script_path = tempfile.mkstemp(suffix='.sh') os.fdopen(fd, 'w').write(post_script) def cleanup_post_script(): os.remove(post_script_path) self.install_cleanup_routine(cleanup_post_script) setup_account = '''id -u zstack >/dev/null 2>&1 if [ $$? -eq 0 ]; then usermod -d $install_path zstack else useradd -d $install_path zstack && mkdir -p $install_path && chown -R zstack.zstack $install_path fi grep 'zstack' /etc/sudoers >/dev/null || echo 'zstack ALL=(ALL) NOPASSWD: ALL' >> /etc/sudoers grep '^root' /etc/sudoers >/dev/null || echo 'root ALL=(ALL) NOPASSWD: ALL' >> /etc/sudoers sed -i '/requiretty$$/d' /etc/sudoers chown -R zstack.zstack $install_path mkdir /home/zstack && chown -R zstack.zstack /home/zstack zstack-ctl setenv ZSTACK_HOME=$install_path/apache-tomcat/webapps/zstack ''' t = string.Template(setup_account) setup_account = t.substitute({ 'install_path': args.install_path }) fd, setup_account_path = tempfile.mkstemp() os.fdopen(fd, 'w').write(setup_account) def clean_up(): os.remove(setup_account_path) self.install_cleanup_routine(clean_up) t = string.Template(yaml) if args.yum: yum_repo = args.yum else: yum_repo = 'false' yaml = t.substitute({ 'host': args.host, 'install_path': args.install_path, 'apache_path': apache_tomcat, 'zstack_path': zstack, 'pre_script': pre_script_path, 'pre_script_on_rh6': pre_script_on_rh6_path, 'post_script': post_script_path, 'properties_file': ctl.properties_file_path, 'apache_tomcat_zip_name': apache_tomcat_zip_name, 'pypi_tar_path': pypi_tar_path, 'pypi_tar_path_dest': '/tmp/pypi.tar.bz', 'pypi_path': '/tmp/pypi/', 'yum_folder': ctl.zstack_home, 'yum_repo': yum_repo, 'setup_account': setup_account_path }) ansible(yaml, args.host, args.debug, args.ssh_key) info('successfully installed new management node on machine(%s)' % args.host) class ShowConfiguration(Command): def __init__(self): super(ShowConfiguration, self).__init__() self.name = "show_configuration" self.description = "a shortcut that prints contents of zstack.properties to screen" ctl.register_command(self) def run(self, args): shell_no_pipe('cat %s' % ctl.properties_file_path) class SetEnvironmentVariableCmd(Command): PATH = os.path.join(ctl.USER_ZSTACK_HOME_DIR, "zstack-ctl/ctl-env") def __init__(self): super(SetEnvironmentVariableCmd, self).__init__() self.name = "setenv" self.description = "set variables to zstack-ctl variable file at %s" % self.PATH ctl.register_command(self) def need_zstack_home(self): return False def run(self, args): if not ctl.extra_arguments: raise CtlError('please input variables that are in format of "key=value" split by space') if not os.path.isdir(ctl.USER_ZSTACK_HOME_DIR): raise CtlError('cannot find home directory(%s) of user "zstack"' % ctl.USER_ZSTACK_HOME_DIR) with use_user_zstack(): path_dir = os.path.dirname(self.PATH) if not os.path.isdir(path_dir): os.makedirs(path_dir) with open(self.PATH, 'a'): # create the file if not existing pass env = PropertyFile(self.PATH) arg_str = ' '.join(ctl.extra_arguments) env.write_properties([arg_str.split('=', 1)]) class UnsetEnvironmentVariableCmd(Command): NAME = 'unsetenv' def __init__(self): super(UnsetEnvironmentVariableCmd, self).__init__() self.name = self.NAME self.description = ( 'unset variables in %s' % SetEnvironmentVariableCmd.PATH ) ctl.register_command(self) def run(self, args): if not os.path.exists(SetEnvironmentVariableCmd.PATH): return if not ctl.extra_arguments: raise CtlError('please input a list of variable names you want to unset') env = PropertyFile(SetEnvironmentVariableCmd.PATH) env.delete_properties(ctl.extra_arguments) info('unset zstack environment variables: %s' % ctl.extra_arguments) class GetEnvironmentVariableCmd(Command): NAME = 'getenv' def __init__(self): super(GetEnvironmentVariableCmd, self).__init__() self.name = self.NAME self.description = ( "get variables from %s" % SetEnvironmentVariableCmd.PATH ) ctl.register_command(self) def run(self, args): if not os.path.exists(SetEnvironmentVariableCmd.PATH): raise CtlError('cannot find the environment variable file at %s' % SetEnvironmentVariableCmd.PATH) ret = [] if ctl.extra_arguments: env = PropertyFile(SetEnvironmentVariableCmd.PATH) for key in ctl.extra_arguments: value = env.read_property(key) if value: ret.append('%s=%s' % (key, value)) else: env = PropertyFile(SetEnvironmentVariableCmd.PATH) for k, v in env.read_all_properties(): ret.append('%s=%s' % (k, v)) info('\n'.join(ret)) class InstallWebUiCmd(Command): def __init__(self): super(InstallWebUiCmd, self).__init__() self.name = "install_ui" self.description = "install ZStack web UI" ctl.register_command(self) def install_argparse_arguments(self, parser): parser.add_argument('--host', help='target host IP, for example, 192.168.0.212, to install ZStack web UI; if omitted, it will be installed on local machine') parser.add_argument('--ssh-key', help="the path of private key for SSH login $host; if provided, Ansible will use the specified key as private key to SSH login the $host", default=None) parser.add_argument('--yum', help="Use ZStack predefined yum repositories. The valid options include: alibase,aliepel,163base,ustcepel,zstack-local. NOTE: only use it when you know exactly what it does.", default=None) parser.add_argument('--force', help="delete existing virtualenv and resinstall zstack ui and all dependencies", action="store_true", default=False) def _install_to_local(self, args): install_script = os.path.join(ctl.zstack_home, "WEB-INF/classes/tools/install.sh") if not os.path.isfile(install_script): raise CtlError('cannot find %s, please make sure you have installed ZStack management node' % install_script) info('found installation script at %s, start installing ZStack web UI' % install_script) if args.force: shell('bash %s zstack-dashboard force' % install_script) else: shell('bash %s zstack-dashboard' % install_script) def run(self, args): if not args.host: self._install_to_local(args) return if not args.yum: args.yum = get_yum_repo_from_property() tools_path = os.path.join(ctl.zstack_home, "WEB-INF/classes/tools/") if not os.path.isdir(tools_path): raise CtlError('cannot find %s, please make sure you have installed ZStack management node' % tools_path) ui_binary = None for l in os.listdir(tools_path): if l.startswith('zstack_dashboard'): ui_binary = l break if not ui_binary: raise CtlError('cannot find zstack-dashboard package under %s, please make sure you have installed ZStack management node' % tools_path) ui_binary_path = os.path.join(tools_path, ui_binary) pypi_path = os.path.join(ctl.zstack_home, "static/pypi/") if not os.path.isdir(pypi_path): raise CtlError('cannot find %s, please make sure you have installed ZStack management node' % pypi_path) pypi_tar_path = os.path.join(ctl.zstack_home, "static/pypi.tar.bz") if not os.path.isfile(pypi_tar_path): static_path = os.path.join(ctl.zstack_home, "static") os.system('cd %s; tar jcf pypi.tar.bz pypi' % static_path) yaml = '''--- - hosts: $host remote_user: root vars: virtualenv_root: /var/lib/zstack/virtualenv/zstack-dashboard yum_repo: "$yum_repo" tasks: - name: pre-install script when: ansible_os_family == 'RedHat' and yum_repo != 'false' script: $pre_install_script - name: install Python pip for RedHat OS from user defined repo when: ansible_os_family == 'RedHat' and yum_repo != 'false' shell: yum clean metadata; yum --disablerepo=* --enablerepo={{yum_repo}} --nogpgcheck install -y libselinux-python python-pip bzip2 python-devel gcc autoconf - name: install Python pip for RedHat OS from system repo when: ansible_os_family == 'RedHat' and yum_repo == 'false' shell: yum clean metadata; yum --nogpgcheck install -y libselinux-python python-pip bzip2 python-devel gcc autoconf - name: copy zstack-dashboard package copy: src=$src dest=$dest - name: copy pypi tar file copy: src=$pypi_tar_path dest=$pypi_tar_path_dest - name: untar pypi shell: "cd /tmp/; tar jxf $pypi_tar_path_dest" - name: install Python pip for Ubuntu when: ansible_os_family == 'Debian' apt: pkg={{item}} update_cache=yes with_items: - python-pip - iptables-persistent - name: install pip from local source shell: "cd $pypi_path/simple/pip/; pip install --ignore-installed pip*.tar.gz" - shell: virtualenv --version | grep "12.1.1" register: virtualenv_ret ignore_errors: True - name: install virtualenv pip: name=virtualenv version=12.1.1 extra_args="--ignore-installed --trusted-host localhost -i file://$pypi_path/simple" when: virtualenv_ret.rc != 0 - name: create virtualenv shell: "rm -rf {{virtualenv_root}} && virtualenv {{virtualenv_root}}" - name: install zstack-dashboard pip: name=$dest extra_args="--trusted-host localhost -i file://$pypi_path/simple" virtualenv="{{virtualenv_root}}" ''' pre_script = ''' if [ -f /etc/redhat-release ] ; then grep ' 7' /etc/redhat-release if [ $? -eq 0 ]; then [ -d /etc/yum.repos.d/ ] && [ ! -f /etc/yum.repos.d/epel.repo ] && echo -e "[epel]\nname=Extra Packages for Enterprise Linux \$releasever - \$basearce - mirrors.aliyun.com\nmirrorlist=https://mirrors.fedoraproject.org/metalink?repo=epel-7&arch=\$basearch\nfailovermethod=priority\nenabled=1\ngpgcheck=0\n" > /etc/yum.repos.d/epel.repo else [ -d /etc/yum.repos.d/ ] && [ ! -f /etc/yum.repos.d/epel.repo ] && echo -e "[epel]\nname=Extra Packages for Enterprise Linux \$releasever - \$basearce - mirrors.aliyun.com\nmirrorlist=https://mirrors.fedoraproject.org/metalink?repo=epel-6&arch=\$basearch\nfailovermethod=priority\nenabled=1\ngpgcheck=0\n" > /etc/yum.repos.d/epel.repo fi [ -d /etc/yum.repos.d/ ] && echo -e "#aliyun base\n[alibase]\nname=CentOS-\$releasever - Base - mirrors.aliyun.com\nfailovermethod=priority\nbaseurl=http://mirrors.aliyun.com/centos/\$releasever/os/\$basearch/\ngpgcheck=0\nenabled=0\n \n#released updates \n[aliupdates]\nname=CentOS-\$releasever - Updates - mirrors.aliyun.com\nfailovermethod=priority\nbaseurl=http://mirrors.aliyun.com/centos/\$releasever/updates/\$basearch/\nenabled=0\ngpgcheck=0\n \n[aliextras]\nname=CentOS-\$releasever - Extras - mirrors.aliyun.com\nfailovermethod=priority\nbaseurl=http://mirrors.aliyun.com/centos/\$releasever/extras/\$basearch/\nenabled=0\ngpgcheck=0\n \n[aliepel]\nname=Extra Packages for Enterprise Linux \$releasever - \$basearce - mirrors.aliyun.com\nbaseurl=http://mirrors.aliyun.com/epel/\$releasever/\$basearch\nfailovermethod=priority\nenabled=0\ngpgcheck=0\n" > /etc/yum.repos.d/zstack-aliyun-yum.repo [ -d /etc/yum.repos.d/ ] && echo -e "#163 base\n[163base]\nname=CentOS-\$releasever - Base - mirrors.163.com\nfailovermethod=priority\nbaseurl=http://mirrors.163.com/centos/\$releasever/os/\$basearch/\ngpgcheck=0\nenabled=0\n \n#released updates \n[163updates]\nname=CentOS-\$releasever - Updates - mirrors.163.com\nfailovermethod=priority\nbaseurl=http://mirrors.163.com/centos/\$releasever/updates/\$basearch/\nenabled=0\ngpgcheck=0\n \n#additional packages that may be useful\n[163extras]\nname=CentOS-\$releasever - Extras - mirrors.163.com\nfailovermethod=priority\nbaseurl=http://mirrors.163.com/centos/\$releasever/extras/\$basearch/\nenabled=0\ngpgcheck=0\n \n[ustcepel]\nname=Extra Packages for Enterprise Linux \$releasever - \$basearch - ustc \nbaseurl=http://centos.ustc.edu.cn/epel/\$releasever/\$basearch\nfailovermethod=priority\nenabled=0\ngpgcheck=0\n" > /etc/yum.repos.d/zstack-163-yum.repo fi ''' fd, pre_script_path = tempfile.mkstemp() os.fdopen(fd, 'w').write(pre_script) def cleanup_prescript(): os.remove(pre_script_path) self.install_cleanup_routine(cleanup_prescript) t = string.Template(yaml) if args.yum: yum_repo = args.yum else: yum_repo = 'false' yaml = t.substitute({ "src": ui_binary_path, "dest": os.path.join('/tmp', ui_binary), "host": args.host, 'pre_install_script': pre_script_path, 'pypi_tar_path': pypi_tar_path, 'pypi_tar_path_dest': '/tmp/pypi.tar.bz', 'pypi_path': '/tmp/pypi/', 'yum_folder': ctl.zstack_home, 'yum_repo': yum_repo }) ansible(yaml, args.host, ssh_key=args.ssh_key) class BootstrapCmd(Command): def __init__(self): super(BootstrapCmd, self).__init__() self.name = 'bootstrap' self.description = ( 'create user and group of "zstack" and add "zstack" to sudoers;' '\nthis command is only needed by installation script' ' and users that install ZStack manually' ) ctl.register_command(self) def need_zstack_user(self): return False def run(self, args): shell('id -u zstack 2>/dev/null || (useradd -d %s zstack -s /bin/false && mkdir -p %s && chown -R zstack.zstack %s)' % (ctl.USER_ZSTACK_HOME_DIR, ctl.USER_ZSTACK_HOME_DIR, ctl.USER_ZSTACK_HOME_DIR)) shell("grep 'zstack' /etc/sudoers || echo 'zstack ALL=(ALL) NOPASSWD: ALL' >> /etc/sudoers") shell('mkdir -p %s && chown zstack:zstack %s' % (ctl.USER_ZSTACK_HOME_DIR, ctl.USER_ZSTACK_HOME_DIR)) class UpgradeManagementNodeCmd(Command): def __init__(self): super(UpgradeManagementNodeCmd, self).__init__() self.name = "upgrade_management_node" self.description = 'upgrade the management node to a specified version' ctl.register_command(self) def install_argparse_arguments(self, parser): parser.add_argument('--host', help='IP or DNS name of the machine to upgrade the management node', default=None) parser.add_argument('--war-file', help='path to zstack.war. A HTTP/HTTPS url or a path to a local zstack.war', required=True) parser.add_argument('--debug', help="open Ansible debug option", action="store_true", default=False) parser.add_argument('--ssh-key', help="the path of private key for SSH login $host; if provided, Ansible will use the specified key as private key to SSH login the $host", default=None) def run(self, args): error_if_tool_is_missing('unzip') need_download = args.war_file.startswith('http') if need_download: error_if_tool_is_missing('wget') upgrade_tmp_dir = os.path.join(ctl.USER_ZSTACK_HOME_DIR, 'upgrade', time.strftime('%Y-%m-%d-%H-%M-%S', time.gmtime())) shell('mkdir -p %s' % upgrade_tmp_dir) property_file_backup_path = os.path.join(upgrade_tmp_dir, 'zstack.properties') class NewWarFilePath(object): self.path = None new_war = NewWarFilePath() if not need_download: new_war.path = expand_path(args.war_file) if not os.path.exists(new_war.path): raise CtlError('%s not found' % new_war.path) def local_upgrade(): def backup(): ctl.internal_run('save_config', '--save-to %s' % os.path.dirname(property_file_backup_path)) shell('cp -r %s %s' % (ctl.zstack_home, upgrade_tmp_dir)) info('backup %s to %s' % (ctl.zstack_home, upgrade_tmp_dir)) def download_war_if_needed(): if need_download: new_war.path = os.path.join(upgrade_tmp_dir, 'new', 'zstack.war') shell_no_pipe('wget --no-check-certificate %s -O %s' % (args.war_file, new_war.path)) info('downloaded new zstack.war to %s' % new_war.path) def stop_node(): info('start to stop the management node ...') ctl.internal_run('stop_node') def upgrade(): info('start to upgrade the management node ...') shell('rm -rf %s' % ctl.zstack_home) if ctl.zstack_home.endswith('/'): webapp_dir = os.path.dirname(os.path.dirname(ctl.zstack_home)) else: webapp_dir = os.path.dirname(ctl.zstack_home) shell('cp %s %s' % (new_war.path, webapp_dir)) ShellCmd('unzip %s -d zstack' % os.path.basename(new_war.path), workdir=webapp_dir)() #create local repo folder for possible zstack local yum repo zstack_dvd_repo = '%s/zstack/static/zstack-dvd' % webapp_dir shell('rm -f %s; ln -s /opt/zstack-dvd %s' % (zstack_dvd_repo, zstack_dvd_repo)) def restore_config(): info('restoring the zstack.properties ...') ctl.internal_run('restore_config', '--restore-from %s' % os.path.dirname(property_file_backup_path)) def install_tools(): info('upgrading zstack-cli, zstack-ctl; this may cost several minutes ...') install_script = os.path.join(ctl.zstack_home, "WEB-INF/classes/tools/install.sh") if not os.path.isfile(install_script): raise CtlError('cannot find %s, please make sure you have installed ZStack management node' % install_script) shell("bash %s zstack-cli" % install_script) shell("bash %s zstack-ctl" % install_script) info('successfully upgraded zstack-cli, zstack-ctl') def save_new_war(): sdir = os.path.join(ctl.zstack_home, "../../../") shell('yes | cp %s %s' % (new_war.path, sdir)) def chown_to_zstack(): info('change permission to user zstack') shell('chown -R zstack:zstack %s' % os.path.join(ctl.zstack_home, '../../')) backup() download_war_if_needed() stop_node() upgrade() restore_config() install_tools() save_new_war() chown_to_zstack() info('----------------------------------------------\n' 'Successfully upgraded the ZStack management node to a new version.\n' 'We backup the old zstack as follows:\n' '\tzstack.properties: %s\n' '\tzstack folder: %s\n' 'Please test your new ZStack. If everything is OK and stable, you can manually delete those backup by deleting %s.\n' 'Otherwise you can use them to rollback to the previous version\n' '-----------------------------------------------\n' % (property_file_backup_path, os.path.join(upgrade_tmp_dir, 'zstack'), upgrade_tmp_dir)) def remote_upgrade(): need_copy = 'true' src_war = new_war.path dst_war = '/tmp/zstack.war' if need_download: need_copy = 'false' src_war = args.war_file dst_war = args.war_file upgrade_script = ''' zstack-ctl upgrade_management_node --war-file=$war_file if [ $$? -ne 0 ]; then echo 'failed to upgrade the remote management node' exit 1 fi if [ "$need_copy" == "true" ]; then rm -f $war_file fi ''' t = string.Template(upgrade_script) upgrade_script = t.substitute({ 'war_file': dst_war, 'need_copy': need_copy }) fd, upgrade_script_path = tempfile.mkstemp(suffix='.sh') os.fdopen(fd, 'w').write(upgrade_script) def cleanup_upgrade_script(): os.remove(upgrade_script_path) self.install_cleanup_routine(cleanup_upgrade_script) yaml = '''--- - hosts: $host remote_user: root vars: need_copy: "$need_copy" tasks: - name: copy zstack.war to remote copy: src=$src_war dest=$dst_war when: need_copy == 'true' - name: upgrade management node script: $upgrade_script register: output ignore_errors: yes - name: failure fail: msg="failed to upgrade the remote management node. {{ output.stdout }} {{ output.stderr }}" when: output.rc != 0 ''' t = string.Template(yaml) yaml = t.substitute({ "src_war": src_war, "dst_war": dst_war, "host": args.host, "need_copy": need_copy, "upgrade_script": upgrade_script_path }) info('start to upgrade the remote management node; the process may cost several minutes ...') ansible(yaml, args.host, args.debug, ssh_key=args.ssh_key) info('upgraded the remote management node successfully') if args.host: remote_upgrade() else: local_upgrade() class UpgradeMultiManagementNodeCmd(Command): logger_dir = '/var/log/zstack' logger_file = 'zstack-ctl.log' SpinnerInfo.spinner_status = {'stop_local':False, 'upgrade_local':False , 'start_local':False, 'upgrade':False, 'stop':False, 'start':False} def __init__(self): super(UpgradeMultiManagementNodeCmd, self).__init__() self.name = "upgrade_multi_management_node" self.description = 'upgrade the management cluster' ctl.register_command(self) def start_mn(self, host_post_info): command = "zstack-ctl start_node && zstack-ctl start_ui" #Ansible finish command will lead mn stop, so use ssh native connection to start mn (status, output) = commands.getstatusoutput("ssh -o StrictHostKeyChecking=no -i %s root@%s '%s'" % (host_post_info.private_key, host_post_info.host, command)) if status != 0: error("Something wrong on host: %s\n %s" % (host_post_info.host, output)) logger.debug("[ HOST: %s ] SUCC: shell command: '%s' successfully" % (host_post_info.host, command)) def install_argparse_arguments(self, parser): parser.add_argument('--installer-bin','--bin', help="The new version installer package with absolute path", required=True) parser.add_argument('--force', '-F', help="Force upgrade when database upgrading dry-run failed", action='store_true', default=False) def run(self, args): if os.path.isfile(args.installer_bin) is not True: error("Didn't find install package %s" % args.installer_bin) create_log(UpgradeMultiManagementNodeCmd.logger_dir, UpgradeMultiManagementNodeCmd.logger_file) mn_vo = get_host_list("ManagementNodeVO") local_mn_ip = get_default_ip() mn_ip_list = [] cmd = create_check_mgmt_node_command() cmd(False) if 'true' not in cmd.stdout: error("Local management node status is not Running, can't make sure ZStack status is healthy") for mn in mn_vo: mn_ip_list.append(mn['hostName']) mn_ip_list.insert(0, mn_ip_list.pop(mn_ip_list.index(local_mn_ip))) all_mn_ip = ' '.join(mn_ip_list) info(" Will upgrade all 'Running' management nodes: %s" % colored(all_mn_ip,'green')) ssh_key = ctl.zstack_home + "/WEB-INF/classes/ansible/rsaKeys/id_rsa.pub" private_key = ssh_key.split('.')[0] inventory_file = ctl.zstack_home + "/../../../ansible/hosts" for mn_ip in mn_ip_list: if mn_ip != local_mn_ip: host_info = HostPostInfo() host_info.host = mn_ip host_info.private_key = private_key host_info.host_inventory = inventory_file host_reachable = check_host_reachable(host_info, True) if host_reachable is True: spinner_info = SpinnerInfo() spinner_info.output = "Stop remote management node %s" % mn_ip spinner_info.name = "stop_%s" % mn_ip SpinnerInfo.spinner_status['stop_%s' % mn_ip] = False SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status,False) SpinnerInfo.spinner_status['stop_%s' % mn_ip] = True ZstackSpinner(spinner_info) command = "zstack-ctl stop_node" run_remote_command(command, host_info) else: # running management node will block upgrade process error("Management node %s is unreachable, please sync public key %s to other management nodes" % (mn_ip, ssh_key)) else: spinner_info = SpinnerInfo() spinner_info.output = "Stop local management node %s" % mn_ip spinner_info.name = "stop_local" SpinnerInfo.spinner_status['stop_local'] = False SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status,False) SpinnerInfo.spinner_status['stop_local'] = True ZstackSpinner(spinner_info) command = "zstack-ctl stop_node" shell(command) for mn_ip in mn_ip_list: host_info = HostPostInfo() host_info.host = mn_ip host_info.private_key = private_key host_info.host_inventory = inventory_file if mn_ip == local_mn_ip: spinner_info = SpinnerInfo() spinner_info.output = "Upgrade management node on localhost(%s)" % local_mn_ip spinner_info.name = 'upgrade_local' SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status,False) SpinnerInfo.spinner_status['upgrade_local'] = True ZstackSpinner(spinner_info) if args.force is True: shell("rm -rf /tmp/zstack_upgrade.lock && bash %s -u -F" % args.installer_bin) else: shell("rm -rf /tmp/zstack_upgrade.lock && bash %s -u" % args.installer_bin) spinner_info = SpinnerInfo() spinner_info.output = "Start management node on localhost(%s)" % local_mn_ip spinner_info.name = 'start' SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status,False) SpinnerInfo.spinner_status['start_local'] = True ZstackSpinner(spinner_info) shell("zstack-ctl start_node && zstack-ctl start_ui") else: spinner_info = SpinnerInfo() spinner_info.output = "Upgrade management node on host %s" % mn_ip spinner_info.name = 'upgrade' SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status,False) SpinnerInfo.spinner_status['upgrade'] = True ZstackSpinner(spinner_info) war_file = ctl.zstack_home + "/../../../apache-tomcat-7.0.35/webapps/zstack.war" ssh_key = ctl.zstack_home + "/WEB-INF/classes/ansible/rsaKeys/id_rsa" status,output = commands.getstatusoutput("zstack-ctl upgrade_management_node --host %s --ssh-key %s --war-file %s" % (mn_ip, ssh_key, war_file)) if status != 0: error(output) spinner_info = SpinnerInfo() spinner_info.output = "Start management node on host %s" % mn_ip spinner_info.name = 'start' SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status,False) SpinnerInfo.spinner_status['start'] = True ZstackSpinner(spinner_info) self.start_mn(host_info) SpinnerInfo.spinner_status = reset_dict_value(SpinnerInfo.spinner_status, False) time.sleep(0.3) info(colored("All management nodes upgrade successfully!",'blue')) class UpgradeDbCmd(Command): def __init__(self): super(UpgradeDbCmd, self).__init__() self.name = 'upgrade_db' self.description = ( 'upgrade the database from current version to a new version' ) ctl.register_command(self) def install_argparse_arguments(self, parser): parser.add_argument('--force', help='bypass management nodes status check.' '\nNOTE: only use it when you know exactly what it does', action='store_true', default=False) parser.add_argument('--no-backup', help='do NOT backup the database. If the database is very large and you have manually backup it, using this option will fast the upgrade process. [DEFAULT] false', default=False) parser.add_argument('--dry-run', help='Check if db could be upgraded. [DEFAULT] not set', action='store_true', default=False) def run(self, args): error_if_tool_is_missing('mysqldump') error_if_tool_is_missing('mysql') db_url = ctl.get_db_url() db_url_params = db_url.split('//') db_url = db_url_params[0] + '//' + db_url_params[1].split('/')[0] if 'zstack' not in db_url: db_url = '%s/zstack' % db_url.rstrip('/') db_hostname, db_port, db_user, db_password = ctl.get_live_mysql_portal() flyway_path = os.path.join(ctl.zstack_home, 'WEB-INF/classes/tools/flyway-3.2.1/flyway') if not os.path.exists(flyway_path): raise CtlError('cannot find %s. Have you run upgrade_management_node?' % flyway_path) upgrading_schema_dir = os.path.join(ctl.zstack_home, 'WEB-INF/classes/db/upgrade/') if not os.path.exists(upgrading_schema_dir): raise CtlError('cannot find %s. Have you run upgrade_management_node?' % upgrading_schema_dir) ctl.check_if_management_node_has_stopped(args.force) if args.dry_run: info('Dry run finished. Database could be upgraded. ') return True def backup_current_database(): if args.no_backup: return info('start to backup the database ...') db_backup_path = os.path.join(ctl.USER_ZSTACK_HOME_DIR, 'db_backup', time.strftime('%Y-%m-%d-%H-%M-%S', time.gmtime()), 'backup.sql') shell('mkdir -p %s' % os.path.dirname(db_backup_path)) if db_password: shell('mysqldump -u %s -p%s --host %s --port %s zstack > %s' % (db_user, db_password, db_hostname, db_port, db_backup_path)) else: shell('mysqldump -u %s --host %s --port %s zstack > %s' % (db_user, db_hostname, db_port, db_backup_path)) info('successfully backup the database to %s' % db_backup_path) def create_schema_version_table_if_needed(): if db_password: out = shell('''mysql -u %s -p%s --host %s --port %s -t zstack -e "show tables like 'schema_version'"''' % (db_user, db_password, db_hostname, db_port)) else: out = shell('''mysql -u %s --host %s --port %s -t zstack -e "show tables like 'schema_version'"''' % (db_user, db_hostname, db_port)) if 'schema_version' in out: return info('version table "schema_version" is not existing; initializing a new version table first') if db_password: shell_no_pipe('bash %s baseline -baselineVersion=0.6 -baselineDescription="0.6 version" -user=%s -password=%s -url=%s' % (flyway_path, db_user, db_password, db_url)) else: shell_no_pipe('bash %s baseline -baselineVersion=0.6 -baselineDescription="0.6 version" -user=%s -url=%s' % (flyway_path, db_user, db_url)) def migrate(): schema_path = 'filesystem:%s' % upgrading_schema_dir if db_password: shell_no_pipe('bash %s migrate -outOfOrder=true -user=%s -password=%s -url=%s -locations=%s' % (flyway_path, db_user, db_password, db_url, schema_path)) else: shell_no_pipe('bash %s migrate -outOfOrder=true -user=%s -url=%s -locations=%s' % (flyway_path, db_user, db_url, schema_path)) info('Successfully upgraded the database to the latest version.\n') backup_current_database() create_schema_version_table_if_needed() migrate() class UpgradeCtlCmd(Command): def __init__(self): super(UpgradeCtlCmd, self).__init__() self.name = 'upgrade_ctl' self.description = ( 'upgrade the zstack-ctl to a new version' ) ctl.register_command(self) def install_argparse_arguments(self, parser): parser.add_argument('--package', help='the path to the new zstack-ctl package', required=True) def run(self, args): error_if_tool_is_missing('pip') path = expand_path(args.package) if not os.path.exists(path): raise CtlError('%s not found' % path) pypi_path = os.path.join(ctl.zstack_home, "static/pypi/") if not os.path.isdir(pypi_path): raise CtlError('cannot find %s, please make sure you have installed ZStack management node' % pypi_path) install_script = '''set -e which virtualenv &>/dev/null if [ $$? != 0 ]; then pip install -i file://$pypi_path/simple --trusted-host localhost virtualenv fi CTL_VIRENV_PATH=/var/lib/zstack/virtualenv/zstackctl rm -rf $$CTL_VIRENV_PATH virtualenv $$CTL_VIRENV_PATH . $$CTL_VIRENV_PATH/bin/activate pip install -i file://$pypi_path/simple --trusted-host --ignore-installed $package || exit 1 chmod +x /usr/bin/zstack-ctl ''' script(install_script, {"pypi_path": pypi_path, "package": args.package}) info('successfully upgraded zstack-ctl to %s' % args.package) class RollbackManagementNodeCmd(Command): def __init__(self): super(RollbackManagementNodeCmd, self).__init__() self.name = "rollback_management_node" self.description = "rollback the management node to a previous version if the upgrade fails" ctl.register_command(self) def install_argparse_arguments(self, parser): parser.add_argument('--host', help='the IP or DNS name of machine to rollback the management node') parser.add_argument('--war-file', help='path to zstack.war. A HTTP/HTTPS url or a path to a local zstack.war', required=True) parser.add_argument('--debug', help="open Ansible debug option", action="store_true", default=False) parser.add_argument('--ssh-key', help="the path of private key for SSH login $host; if provided, Ansible will use the specified key as private key to SSH login the $host", default=None) parser.add_argument('--property-file', help="the path to zstack.properties. If omitted, the current zstack.properties will be used", default=None) def run(self, args): error_if_tool_is_missing('unzip') rollback_tmp_dir = os.path.join(ctl.USER_ZSTACK_HOME_DIR, 'rollback', time.strftime('%Y-%m-%d-%H-%M-%S', time.gmtime())) shell('mkdir -p %s' % rollback_tmp_dir) need_download = args.war_file.startswith('http') class Info(object): def __init__(self): self.war_path = None self.property_file = None rollbackinfo = Info() def local_rollback(): def backup_current_zstack(): info('start to backup the current zstack ...') shell('cp -r %s %s' % (ctl.zstack_home, rollback_tmp_dir)) info('backup %s to %s' % (ctl.zstack_home, rollback_tmp_dir)) info('successfully backup the current zstack to %s' % os.path.join(rollback_tmp_dir, os.path.basename(ctl.zstack_home))) def download_war_if_needed(): if need_download: rollbackinfo.war_path = os.path.join(rollback_tmp_dir, 'zstack.war') shell_no_pipe('wget --no-check-certificate %s -O %s' % (args.war_file, rollbackinfo.war_path)) info('downloaded zstack.war to %s' % rollbackinfo.war_path) else: rollbackinfo.war_path = expand_path(args.war_file) if not os.path.exists(rollbackinfo.war_path): raise CtlError('%s not found' % rollbackinfo.war_path) def save_property_file_if_needed(): if not args.property_file: ctl.internal_run('save_config', '--save-to %s' % rollback_tmp_dir) rollbackinfo.property_file = os.path.join(rollback_tmp_dir, 'zstack.properties') else: rollbackinfo.property_file = args.property_file if not os.path.exists(rollbackinfo.property_file): raise CtlError('%s not found' % rollbackinfo.property_file) def stop_node(): info('start to stop the management node ...') ctl.internal_run('stop_node') def rollback(): info('start to rollback the management node ...') shell('rm -rf %s' % ctl.zstack_home) shell('unzip %s -d %s' % (rollbackinfo.war_path, ctl.zstack_home)) def restore_config(): info('restoring the zstack.properties ...') ctl.internal_run('restore_config', '--restore-from %s' % rollbackinfo.property_file) def install_tools(): info('rollback zstack-cli, zstack-ctl to the previous version. This may cost several minutes ...') install_script = os.path.join(ctl.zstack_home, "WEB-INF/classes/tools/install.sh") if not os.path.isfile(install_script): raise CtlError('cannot find %s, please make sure you have installed ZStack management node' % install_script) shell("bash %s zstack-cli" % install_script) shell("bash %s zstack-ctl" % install_script) info('successfully upgraded zstack-cli, zstack-ctl') backup_current_zstack() download_war_if_needed() save_property_file_if_needed() stop_node() rollback() restore_config() install_tools() info('----------------------------------------------\n' 'Successfully rollback the ZStack management node to a previous version.\n' 'We backup the current zstack as follows:\n' '\tzstack.properties: %s\n' '\tzstack folder: %s\n' 'Please test your ZStack. If everything is OK and stable, you can manually delete those backup by deleting %s.\n' '-----------------------------------------------\n' % (rollbackinfo.property_file, os.path.join(rollback_tmp_dir, os.path.basename(ctl.zstack_home)), rollback_tmp_dir)) def remote_rollback(): error_if_tool_is_missing('wget') need_copy = 'true' src_war = rollbackinfo.war_path dst_war = '/tmp/zstack.war' if need_download: need_copy = 'false' src_war = args.war_file dst_war = args.war_file rollback_script = ''' zstack-ctl rollback_management_node --war-file=$war_file if [ $$? -ne 0 ]; then echo 'failed to rollback the remote management node' exit 1 fi if [ "$need_copy" == "true" ]; then rm -f $war_file fi ''' t = string.Template(rollback_script) rollback_script = t.substitute({ 'war_file': dst_war, 'need_copy': need_copy }) fd, rollback_script_path = tempfile.mkstemp(suffix='.sh') os.fdopen(fd, 'w').write(rollback_script) def cleanup_rollback_script(): os.remove(rollback_script_path) self.install_cleanup_routine(cleanup_rollback_script) yaml = '''--- - hosts: $host remote_user: root vars: need_copy: "$need_copy" tasks: - name: copy zstack.war to remote copy: src=$src_war dest=$dst_war when: need_copy == 'true' - name: rollback the management node script: $rollback_script register: output ignore_errors: yes - name: failure fail: msg="failed to rollback the remote management node. {{ output.stdout }} {{ output.stderr }}" when: output.rc != 0 ''' t = string.Template(yaml) yaml = t.substitute({ "src_war": src_war, "dst_war": dst_war, "host": args.host, "need_copy": need_copy, "rollback_script": rollback_script_path }) info('start to rollback the remote management node; the process may cost several minutes ...') ansible(yaml, args.host, args.debug, ssh_key=args.ssh_key) info('successfully rollback the remote management node') if args.host: remote_rollback() else: local_rollback() class RollbackDatabaseCmd(Command): def __init__(self): super(RollbackDatabaseCmd, self).__init__() self.name = 'rollback_db' self.description = "rollback the database to the previous version if the upgrade fails" ctl.register_command(self) def install_argparse_arguments(self, parser): parser.add_argument('--db-dump', help="the previous database dump file", required=True) parser.add_argument('--root-password', help="the password for mysql root user. [DEFAULT] empty password") parser.add_argument('--force', help='bypass management nodes status check.' '\nNOTE: only use it when you know exactly what it does', action='store_true', default=False) def run(self, args): error_if_tool_is_missing('mysql') ctl.check_if_management_node_has_stopped(args.force) if not os.path.exists(args.db_dump): raise CtlError('%s not found' % args.db_dump) host, port, _, _ = ctl.get_live_mysql_portal() if args.root_password: cmd = ShellCmd('mysql -u root -p%s --host %s --port %s -e "select 1"' % (args.root_password, host, port)) else: cmd = ShellCmd('mysql -u root --host %s --port %s -e "select 1"' % (host, port)) cmd(False) if cmd.return_code != 0: error_not_exit('failed to test the mysql server. You may have provided a wrong password of the root user. Please use --root-password to provide the correct password') cmd.raise_error() info('start to rollback the database ...') if args.root_password: shell('mysql -u root -p%s --host %s --port %s -t zstack < %s' % (args.root_password, host, port, args.db_dump)) else: shell('mysql -u root --host %s --port %s -t zstack < %s' % (host, port, args.db_dump)) info('successfully rollback the database to the dump file %s' % args.db_dump) class StopUiCmd(Command): def __init__(self): super(StopUiCmd, self).__init__() self.name = 'stop_ui' self.description = "stop UI server on the local or remote host" ctl.register_command(self) def install_argparse_arguments(self, parser): parser.add_argument('--host', help="UI server IP. [DEFAULT] localhost", default='localhost') def _remote_stop(self, host): cmd = '/etc/init.d/zstack-dashboard stop' ssh_run_no_pipe(host, cmd) def run(self, args): if args.host != 'localhost': self._remote_stop(args.host) return pidfile = '/var/run/zstack/zstack-dashboard.pid' if os.path.exists(pidfile): with open(pidfile, 'r') as fd: pid = fd.readline() pid = pid.strip(' \t\n\r') shell('kill %s >/dev/null 2>&1' % pid, is_exception=False) def stop_all(): pid = find_process_by_cmdline('zstack_dashboard') if pid: shell('kill -9 %s >/dev/null 2>&1' % pid) stop_all() else: return stop_all() info('successfully stopped the UI server') class UiStatusCmd(Command): def __init__(self): super(UiStatusCmd, self).__init__() self.name = "ui_status" self.description = "check the UI server status on the local or remote host." ctl.register_command(self) def install_argparse_arguments(self, parser): parser.add_argument('--host', help="UI server IP. [DEFAULT] localhost", default='localhost') parser.add_argument('--quiet', '-q', help='Do not log this action.', action='store_true', default=False) def _remote_status(self, host): cmd = '/etc/init.d/zstack-dashboard status' ssh_run_no_pipe(host, cmd) def run(self, args): self.quiet = args.quiet if args.host != 'localhost': self._remote_status(args.host) return ha_info_file = '/var/lib/zstack/ha/ha.yaml' pidfile = '/var/run/zstack/zstack-dashboard.pid' portfile = '/var/run/zstack/zstack-dashboard.port' if os.path.exists(pidfile): with open(pidfile, 'r') as fd: pid = fd.readline() pid = pid.strip(' \t\n\r') check_pid_cmd = ShellCmd('ps -p %s > /dev/null' % pid) check_pid_cmd(is_exception=False) if check_pid_cmd.return_code == 0: if os.path.exists(ha_info_file): with open(ha_info_file, 'r') as fd2: ha_conf = yaml.load(fd2) if check_ip_port(ha_conf['vip'], 8888): info('UI status: %s [PID:%s] http://%s:8888' % (colored('Running', 'green'), pid, ha_conf['vip'])) else: info('UI status: %s' % colored('Unknown', 'yellow')) return default_ip = get_default_ip() if not default_ip: info('UI status: %s [PID:%s]' % (colored('Running', 'green'), pid)) else: if os.path.exists(portfile): with open(portfile, 'r') as fd2: port = fd2.readline() port = port.strip(' \t\n\r') else: port = 5000 info('UI status: %s [PID:%s] http://%s:%s' % (colored('Running', 'green'), pid, default_ip, port)) return pid = find_process_by_cmdline('zstack_dashboard') if pid: info('UI status: %s [PID: %s]' % (colored('Zombie', 'yellow'), pid)) else: info('UI status: %s [PID: %s]' % (colored('Stopped', 'red'), pid)) class InstallLicenseCmd(Command): def __init__(self): super(InstallLicenseCmd, self).__init__() self.name = "install_license" self.description = "install zstack license" ctl.register_command(self) def install_argparse_arguments(self, parser): parser.add_argument('--license', '-f', help="path to the license file", required=True) parser.add_argument('--prikey', help="[OPTIONAL] the path to the private key used to generate license request") def run(self, args): lpath = expand_path(args.license) if not os.path.isfile(lpath): raise CtlError('cannot find the license file at %s' % args.license) ppath = None if args.prikey: ppath = expand_path(args.prikey) if not os.path.isfile(ppath): raise CtlError('cannot find the private key file at %s' % args.prikey) license_folder = '/var/lib/zstack/license' shell('''mkdir -p %s''' % license_folder) shell('''chown zstack:zstack %s''' % license_folder) shell('''yes | cp %s %s/license.txt''' % (lpath, license_folder)) shell('''chown zstack:zstack %s/license.txt''' % license_folder) info("successfully installed the license file to %s/license.txt" % license_folder) if ppath: shell('''yes | cp %s %s/pri.key''' % (ppath, license_folder)) shell('''chown zstack:zstack %s/pri.key''' % license_folder) info("successfully installed the private key file to %s/pri.key" % license_folder) class StartUiCmd(Command): PID_FILE = '/var/run/zstack/zstack-dashboard.pid' def __init__(self): super(StartUiCmd, self).__init__() self.name = "start_ui" self.description = "start UI server on the local or remote host" ctl.register_command(self) if not os.path.exists(os.path.dirname(self.PID_FILE)): shell("mkdir -p %s" % os.path.dirname(self.PID_FILE)) shell("mkdir -p /var/log/zstack") def install_argparse_arguments(self, parser): parser.add_argument('--host', help="UI server IP. [DEFAULT] localhost", default='localhost') parser.add_argument('--port', help="UI server port. [DEFAULT] 5000", default='5000') def _remote_start(self, host, params): cmd = '/etc/init.d/zstack-dashboard start --rabbitmq %s' % params ssh_run_no_pipe(host, cmd) info('successfully start the UI server on the remote host[%s]' % host) def _check_status(self, port): if os.path.exists(self.PID_FILE): with open(self.PID_FILE, 'r') as fd: pid = fd.readline() pid = pid.strip(' \t\n\r') check_pid_cmd = ShellCmd('ps -p %s > /dev/null' % pid) check_pid_cmd(is_exception=False) if check_pid_cmd.return_code == 0: default_ip = get_default_ip() if not default_ip: info('UI server is still running[PID:%s]' % pid) else: info('UI server is still running[PID:%s], http://%s:%s' % (pid, default_ip, port)) return False pid = find_process_by_cmdline('zstack_dashboard') if pid: info('found a zombie UI server[PID:%s], kill it and start a new one' % pid) shell('kill -9 %s > /dev/null' % pid) return True def run(self, args): ips = ctl.read_property_list("UI.vip.") if not ips: ips = ctl.read_property_list("CloudBus.serverIp.") if not ips: raise CtlError('no RabbitMQ IPs found in %s. The IPs should be configured as CloudBus.serverIp.0, CloudBus.serverIp.1 ... CloudBus.serverIp.N' % ctl.properties_file_path) ips = [v for k, v in ips] username = ctl.read_property("CloudBus.rabbitmqUsername") password = ctl.read_property("CloudBus.rabbitmqPassword") if username and not password: raise CtlError('CloudBus.rabbitmqUsername is configured but CloudBus.rabbitmqPassword is not. They must be both set or not set. Check %s' % ctl.properties_file_path) if not username and password: raise CtlError('CloudBus.rabbitmqPassword is configured but CloudBus.rabbitmqUsername is not. They must be both set or not set. Check %s' % ctl.properties_file_path) if username and password: urls = ["%s:%s@%s" % (username, password, ip) for ip in ips] else: urls = ips param = ','.join(urls) if args.host != 'localhost': self._remote_start(args.host, param) return virtualenv = '/var/lib/zstack/virtualenv/zstack-dashboard' if not os.path.exists(virtualenv): raise CtlError('%s not found. Are you sure the UI server is installed on %s?' % (virtualenv, args.host)) if not self._check_status(args.port): return distro = platform.dist()[0] if distro == 'centos': shell('iptables-save | grep -- "-A INPUT -p tcp -m tcp --dport %s -j ACCEPT" > /dev/null || (iptables -I INPUT -p tcp -m tcp --dport 5000 -j ACCEPT && service iptables save)' % args.port) elif distro == 'Ubuntu': shell('iptables-save | grep -- "-A INPUT -p tcp -m tcp --dport %s -j ACCEPT" > /dev/null || (iptables -I INPUT -p tcp -m tcp --dport 5000 -j ACCEPT && /etc/init.d/iptables-persistent save)' % args.port) else: shell('iptables-save | grep -- "-A INPUT -p tcp -m tcp --dport %s -j ACCEPT" > /dev/null || iptables -I INPUT -p tcp -m tcp --dport 5000 -j ACCEPT ' % args.port) scmd = '. %s/bin/activate\nZSTACK_DASHBOARD_PORT=%s nohup python -c "from zstack_dashboard import web; web.main()" --rabbitmq %s >/var/log/zstack/zstack-dashboard.log 2>&1 </dev/null &' % (virtualenv, args.port, param) script(scmd, no_pipe=True) @loop_until_timeout(5, 0.5) def write_pid(): pid = find_process_by_cmdline('zstack_dashboard') if pid: with open(self.PID_FILE, 'w') as fd: fd.write(str(pid)) return True else: return False write_pid() pid = find_process_by_cmdline('zstack_dashboard') if not pid: info('fail to start UI server on the local host. Use zstack-ctl start_ui to restart it. zstack UI log could be found in /var/log/zstack/zstack-dashboard.log') return False default_ip = get_default_ip() if not default_ip: info('successfully started UI server on the local host, PID[%s]' % pid) else: info('successfully started UI server on the local host, PID[%s], http://%s:%s' % (pid, default_ip, args.port)) os.system('mkdir -p /var/run/zstack/') with open('/var/run/zstack/zstack-dashboard.port', 'w') as fd: fd.write(args.port) def main(): AddManagementNodeCmd() BootstrapCmd() ChangeIpCmd() CollectLogCmd() ConfigureCmd() DumpMysqlCmd() ChangeMysqlPasswordCmd() DeployDBCmd() GetEnvironmentVariableCmd() InstallWebUiCmd() InstallHACmd() InstallDbCmd() InstallRabbitCmd() InstallManagementNodeCmd() InstallLicenseCmd() ShowConfiguration() SetEnvironmentVariableCmd() RollbackManagementNodeCmd() RollbackDatabaseCmd() ResetRabbitCmd() RestoreConfigCmd() RestartNodeCmd() RestoreMysqlCmd() RecoverHACmd() ShowStatusCmd() StartCmd() StopCmd() SaveConfigCmd() StartUiCmd() StopUiCmd() StartAllCmd() StopAllCmd() TailLogCmd() UiStatusCmd() UnsetEnvironmentVariableCmd() UpgradeManagementNodeCmd() UpgradeMultiManagementNodeCmd() UpgradeDbCmd() UpgradeCtlCmd() UpgradeHACmd() try: ctl.run() except CtlError as e: if ctl.verbose: error_not_exit(traceback.format_exc()) error(str(e)) if __name__ == '__main__': main()<|fim▁end|>
ha_config_content = None
<|file_name|>disjoint_set.py<|end_file_name|><|fim▁begin|>""" Disjoint set. Reference: https://en.wikipedia.org/wiki/Disjoint-set_data_structure """ class Node: def __init__(self, data: int) -> None: self.data = data self.rank: int self.parent: Node def make_set(x: Node) -> None: """ Make x as a set. """ # rank is the distance from x to its' parent # root's rank is 0 x.rank = 0 x.parent = x def union_set(x: Node, y: Node) -> None: """ Union of two sets. set with bigger rank should be parent, so that the disjoint set tree will be more flat. """ x, y = find_set(x), find_set(y) if x == y: return elif x.rank > y.rank: y.parent = x else: x.parent = y if x.rank == y.rank: y.rank += 1 def find_set(x: Node) -> Node: """ Return the parent of x """ if x != x.parent: x.parent = find_set(x.parent) return x.parent def find_python_set(node: Node) -> set: """ Return a Python Standard Library set that contains i. """ sets = ({0, 1, 2}, {3, 4, 5}) for s in sets: if node.data in s: return s raise ValueError(f"{node.data} is not in {sets}") def test_disjoint_set() -> None: """ >>> test_disjoint_set() """ vertex = [Node(i) for i in range(6)] for v in vertex: make_set(v) union_set(vertex[0], vertex[1]) union_set(vertex[1], vertex[2]) union_set(vertex[3], vertex[4]) union_set(vertex[3], vertex[5]) <|fim▁hole|> else: assert find_set(node0) == find_set(node1) if __name__ == "__main__": test_disjoint_set()<|fim▁end|>
for node0 in vertex: for node1 in vertex: if find_python_set(node0).isdisjoint(find_python_set(node1)): assert find_set(node0) != find_set(node1)
<|file_name|>lua_ability.cpp<|end_file_name|><|fim▁begin|>/* =========================================================================== Copyright (c) 2010-2015 Darkstar Dev Teams This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see http://www.gnu.org/licenses/ This file is part of DarkStar-server source code. =========================================================================== */ #include "lua_ability.h" /************************************************************************ * * * Конструктор * * * ************************************************************************/ CLuaAbility::CLuaAbility(lua_State *L) { if( !lua_isnil(L,-1) ) { m_PLuaAbility = (CAbility*)(lua_touserdata(L,-1)); lua_pop(L,1); }else{ m_PLuaAbility = nullptr; } } /************************************************************************ * * * Конструктор * * * ************************************************************************/ CLuaAbility::CLuaAbility(CAbility* PAbility) { m_PLuaAbility = PAbility; } inline int32 CLuaAbility::getID(lua_State *L) { DSP_DEBUG_BREAK_IF(m_PLuaAbility == nullptr); lua_pushinteger(L, m_PLuaAbility->getID()); return 1; } inline int32 CLuaAbility::getRecast(lua_State* L) { DSP_DEBUG_BREAK_IF(m_PLuaAbility == nullptr); lua_pushinteger(L, m_PLuaAbility->getRecastTime()); return 1; } inline int32 CLuaAbility::getRange(lua_State* L) { DSP_DEBUG_BREAK_IF(m_PLuaAbility == nullptr); lua_pushinteger(L, m_PLuaAbility->getRange()); return 1; } inline int32 CLuaAbility::setMsg(lua_State *L) { DSP_DEBUG_BREAK_IF(m_PLuaAbility == nullptr); DSP_DEBUG_BREAK_IF(lua_isnil(L,-1) || !lua_isnumber(L,-1)); m_PLuaAbility->setMessage(lua_tointeger(L,-1)); return 0; } inline int32 CLuaAbility::setAnimation(lua_State *L) { DSP_DEBUG_BREAK_IF(m_PLuaAbility == nullptr); DSP_DEBUG_BREAK_IF(lua_isnil(L, -1) || !lua_isnumber(L, -1)); m_PLuaAbility->setAnimationID(lua_tointeger(L, -1)); return 0; } inline int32 CLuaAbility::setRecast(lua_State* L) { DSP_DEBUG_BREAK_IF(m_PLuaAbility == nullptr); DSP_DEBUG_BREAK_IF(lua_isnil(L, -1) || !lua_isnumber(L, -1)); m_PLuaAbility->setRecastTime(lua_tointeger(L, -1)); return 0; } inline int32 CLuaAbility::setCE(lua_State* L) { DSP_DEBUG_BREAK_IF(m_PLuaAbility == nullptr); DSP_DEBUG_BREAK_IF(lua_isnil(L, -1) || !lua_isnumber(L, -1)); m_PLuaAbility->setCE(lua_tointeger(L, -1)); return 0; <|fim▁hole|>} inline int32 CLuaAbility::setVE(lua_State* L) { DSP_DEBUG_BREAK_IF(m_PLuaAbility == nullptr); DSP_DEBUG_BREAK_IF(lua_isnil(L, -1) || !lua_isnumber(L, -1)); m_PLuaAbility->setVE(lua_tointeger(L, -1)); return 0; } inline int32 CLuaAbility::setRange(lua_State *L) { DSP_DEBUG_BREAK_IF(m_PLuaAbility == nullptr); DSP_DEBUG_BREAK_IF(lua_isnil(L, -1) || !lua_isnumber(L, -1)); m_PLuaAbility->setRange(lua_tointeger(L, -1)); return 0; } /************************************************************************ * * * Инициализация методов в lua * * * ************************************************************************/ const int8 CLuaAbility::className[] = "CAbility"; Lunar<CLuaAbility>::Register_t CLuaAbility::methods[] = { LUNAR_DECLARE_METHOD(CLuaAbility,getID), LUNAR_DECLARE_METHOD(CLuaAbility,getRecast), LUNAR_DECLARE_METHOD(CLuaAbility,getRange), LUNAR_DECLARE_METHOD(CLuaAbility,setMsg), LUNAR_DECLARE_METHOD(CLuaAbility,setAnimation), LUNAR_DECLARE_METHOD(CLuaAbility,setRecast), LUNAR_DECLARE_METHOD(CLuaAbility,setCE), LUNAR_DECLARE_METHOD(CLuaAbility,setVE), LUNAR_DECLARE_METHOD(CLuaAbility,setRange), {nullptr,nullptr} };<|fim▁end|>
<|file_name|>test_init.py<|end_file_name|><|fim▁begin|>from unittest import TestCase # from unittest import mock from mwclient import Site from bookhelper import ExistingBook from . import PageMock, SiteMock, TESTBOOK1_TXT #Site.login=mock.MagicMock(return_value=True) class TestInit(TestCase):<|fim▁hole|> self.site = SiteMock() def test_book(self): book = ExistingBook(self.site, "Testbook1", "live") self.assertEqual(book.errors, []) self.assertEqual(book.book_page.friendly_title, "Testbook1") self.assertEqual(book.book_page.text.strip(), TESTBOOK1_TXT) self.assertEqual(book.toc[0].target,'Testbook1/Page1') self.assertEqual(book.toc[1].target,'Testbook1/Page2') self.assertEqual(book.toc[1].text,'Page2')<|fim▁end|>
def setUp(self):
<|file_name|>test_DelEntry.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ''' ##BOILERPLATE_COPYRIGHT ##BOILERPLATE_COPYRIGHT_END ''' import unittest, copy from testRoot import RootClass from noink.user_db import UserDB from noink.entry_db import EntryDB class AddEntry(RootClass): def test_AddEntry(self):<|fim▁hole|> entryDB = EntryDB() u = userDB.add("jontest", "pass", "Jon Q. Testuser") title = 'Little Buttercup' entry = 'There once was a man from Nantucket,' + \ 'who kept his wife in a Bucket.' + \ "Wait... how'd she fit in that bucket anyway?" e = entryDB.add(copy.deepcopy(title), entry, u) self.assertTrue(e.title == title) if __name__ == '__main__': unittest.main()<|fim▁end|>
userDB = UserDB()
<|file_name|>LoadingIndicator.js<|end_file_name|><|fim▁begin|>import React from 'react' function LoadingIndicator () { return ( <div> Loading <div className='sk-fading-circle'> <div className='sk-circle1 sk-circle'></div> <div className='sk-circle2 sk-circle'></div> <div className='sk-circle3 sk-circle'></div> <div className='sk-circle4 sk-circle'></div> <div className='sk-circle5 sk-circle'></div> <div className='sk-circle6 sk-circle'></div> <div className='sk-circle7 sk-circle'></div> <div className='sk-circle8 sk-circle'></div> <div className='sk-circle9 sk-circle'></div> <div className='sk-circle10 sk-circle'></div> <div className='sk-circle11 sk-circle'></div> <div className='sk-circle12 sk-circle'></div> </div> </div> ) }<|fim▁hole|><|fim▁end|>
export default LoadingIndicator
<|file_name|>solution.py<|end_file_name|><|fim▁begin|>class Solution(object): def dfs(self,rooms): # get all gate position queue=[(i,j,0) for i,rows in enumerate(rooms) for j,v in enumerate(rows) if not v] while queue: i,j,depth=queue.pop() # has a min path to gate and update if depth<rooms[i][j]: rooms[i][j]=depth for newi,newj in (i+1,j),(i-1,j),(i,j-1),(i,j+1): if 0<=newi<len(rooms) and 0<=newj<len(rooms[0]) and depth<rooms[newi][newj]: queue.append((newi,newj,depth+1)) def bfs(self,rooms): # get all gate position queue=[(i,j) for i,rows in enumerate(rooms) for j,v in enumerate(rows) if not v] while queue: # pop the fist insert i,j=queue.pop(0) for newi,newj in (i+1,j),(i-1,j),(i,j-1),(i,j+1): if 0<=newi<len(rooms) and 0<=newj<len(rooms[0]) and rooms[newi][newj]==2147483647: rooms[newi][newj]=rooms[i][j]+1 queue.append((newi,newj))<|fim▁hole|> :type rooms: List[List[int]] :rtype: void Do not return anything, modify rooms in-place instead. """ self.bfs(rooms)<|fim▁end|>
def wallsAndGates(self, rooms): """
<|file_name|>evennia_launcher.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python """ EVENNIA SERVER LAUNCHER SCRIPT This is the start point for running Evennia. Sets the appropriate environmental variables and launches the server and portal through the evennia_runner. Run without arguments to get a menu. Run the script with the -h flag to see usage information. """ from __future__ import print_function from builtins import input, range import os import sys import signal import shutil import importlib from argparse import ArgumentParser from subprocess import Popen, check_output, call, CalledProcessError, STDOUT import django # Signal processing SIG = signal.SIGINT # Set up the main python paths to Evennia EVENNIA_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) import evennia EVENNIA_LIB = os.path.join(os.path.dirname(os.path.abspath(evennia.__file__))) EVENNIA_SERVER = os.path.join(EVENNIA_LIB, "server") EVENNIA_RUNNER = os.path.join(EVENNIA_SERVER, "evennia_runner.py") EVENNIA_TEMPLATE = os.path.join(EVENNIA_LIB, "game_template") EVENNIA_PROFILING = os.path.join(EVENNIA_SERVER, "profiling") EVENNIA_DUMMYRUNNER = os.path.join(EVENNIA_PROFILING, "dummyrunner.py") TWISTED_BINARY = "twistd" # Game directory structure SETTINGFILE = "settings.py" SERVERDIR = "server" CONFDIR = os.path.join(SERVERDIR, "conf") SETTINGS_PATH = os.path.join(CONFDIR, SETTINGFILE) SETTINGS_DOTPATH = "server.conf.settings" CURRENT_DIR = os.getcwd() GAMEDIR = CURRENT_DIR # Operational setup SERVER_LOGFILE = None PORTAL_LOGFILE = None HTTP_LOGFILE = None SERVER_PIDFILE = None PORTAL_PIDFILE = None SERVER_RESTART = None PORTAL_RESTART = None SERVER_PY_FILE = None PORTAL_PY_FILE = None PYTHON_MIN = '2.7' TWISTED_MIN = '16.0.0' DJANGO_MIN = '1.8' DJANGO_REC = '1.9' sys.path[1] = EVENNIA_ROOT <|fim▁hole|># # Messages # #------------------------------------------------------------ CREATED_NEW_GAMEDIR = \ """ Welcome to Evennia! Created a new Evennia game directory '{gamedir}'. You can now optionally edit your new settings file at {settings_path}. If you don't, the defaults will work out of the box. When ready to continue, 'cd' to your game directory and run: evennia migrate This initializes the database. To start the server for the first time, run: evennia start Make sure to create a superuser when asked for it (the email can be blank if you want). You should now be able to (by default) connect to your server on 'localhost', port 4000 using a telnet/mud client or http://localhost:8000 using your web browser. If things don't work, check so those ports are open. """ ERROR_INPUT = \ """ Command {args} {kwargs} raised an error: '{traceback}'. """ ERROR_NO_GAMEDIR = \ """ ERROR: No Evennia settings file was found. Evennia looks for the file in your game directory as server/conf/settings.py. You must run this command from somewhere inside a valid game directory first created with evennia --init mygamename If you are in a game directory but is missing a settings.py file, it may be because you have git-cloned an existing game directory. The settings.py file is not cloned by git (it's in .gitignore) since it can contain sensitive and/or server-specific information. You can create a new, empty settings file with evennia --initsettings If cloning the settings file is not a problem you could manually copy over the old settings file or remove its entry in .gitignore """ WARNING_MOVING_SUPERUSER = \ """ WARNING: Evennia expects a Player superuser with id=1. No such Player was found. However, another superuser ('{other_key}', id={other_id}) was found in the database. If you just created this superuser and still see this text it is probably due to the database being flushed recently - in this case the database's internal auto-counter might just start from some value higher than one. We will fix this by assigning the id 1 to Player '{other_key}'. Please confirm this is acceptable before continuing. """ WARNING_RUNSERVER = \ """ WARNING: There is no need to run the Django development webserver to test out Evennia web features (the web client will in fact not work since the Django test server knows nothing about MUDs). Instead, just start Evennia with the webserver component active (this is the default). """ ERROR_SETTINGS = \ """ ERROR: There was an error importing Evennia's config file {settingspath}. There is usually one of three reasons for this: 1) You are not running this command from your game directory. Change directory to your game directory and try again (or create a new game directory using evennia --init <dirname>) 2) The settings file contains a syntax error. If you see a traceback above, review it, resolve the problem and try again. 3) Django is not correctly installed. This usually shows as errors mentioning 'DJANGO_SETTINGS_MODULE'. If you run a virtual machine, it might be worth to restart it to see if this resolves the issue. """.format(settingsfile=SETTINGFILE, settingspath=SETTINGS_PATH) ERROR_INITSETTINGS = \ """ ERROR: 'evennia --initsettings' must be called from the root of your game directory, since it tries to (re)create the new settings.py file in a subfolder server/conf/. """ RECREATED_SETTINGS = \ """ (Re)created an empty settings file in server/conf/settings.py. Note that if you were using an existing database, the password salt of this new settings file will be different from the old one. This means that any existing players may not be able to log in to their accounts with their old passwords. """ ERROR_DATABASE = \ """ ERROR: Your database does not seem to be set up correctly. (error was '{traceback}') Standing in your game directory, run evennia migrate to initialize/update the database according to your settings. """ ERROR_WINDOWS_WIN32API = \ """ ERROR: Unable to import win32api, which Twisted requires to run. You may download it from: http://sourceforge.net/projects/pywin32/files/pywin32/ If you are running in a virtual environment, browse to the location of the latest win32api exe file for your computer and Python version and copy the url to it; then paste it into a call to easy_install: easy_install http://<url to win32api exe> """ INFO_WINDOWS_BATFILE = \ """ INFO: Since you are running Windows, a file 'twistd.bat' was created for you. This is a simple batch file that tries to call the twisted executable. Evennia determined this to be: {twistd_path} If you run into errors at startup you might need to edit twistd.bat to point to the actual location of the Twisted executable (usually called twistd.py) on your machine. This procedure is only done once. Run evennia.py again when you are ready to start the server. """ CMDLINE_HELP = \ """ Starts or operates the Evennia MU* server. Also allows for initializing a new game directory and manages the game's database. You can also pass most standard django-admin arguments and options. """ VERSION_INFO = \ """ Evennia {version} OS: {os} Python: {python} Twisted: {twisted} Django: {django}{about} """ ABOUT_INFO = \ """ Evennia MUD/MUX/MU* development system Licence: BSD 3-Clause Licence Web: http://www.evennia.com Irc: #evennia on FreeNode Forum: http://www.evennia.com/discussions Maintainer (2010-): Griatch (griatch AT gmail DOT com) Maintainer (2006-10): Greg Taylor Use -h for command line options. """ HELP_ENTRY = \ """ Enter 'evennia -h' for command-line options. Use option (1) in a production environment. During development (2) is usually enough, portal debugging is usually only useful if you are adding new protocols or are debugging Evennia itself. Reload with (5) to update the server with your changes without disconnecting any players. Note: Reload and stop are sometimes poorly supported in Windows. If you have issues, log into the game to stop or restart the server instead. """ MENU = \ """ +----Evennia Launcher-------------------------------------------+ | | +--- Starting --------------------------------------------------+ | | | 1) (normal): All output to logfiles | | 2) (server devel): Server logs to terminal (-i option) | | 3) (portal devel): Portal logs to terminal | | 4) (full devel): Both Server and Portal logs to terminal | | | +--- Restarting ------------------------------------------------+ | | | 5) Reload the Server | | 6) Reload the Portal (only works with portal/full debug) | | | +--- Stopping --------------------------------------------------+ | | | 7) Stopping both Portal and Server | | 8) Stopping only Server | | 9) Stopping only Portal | | | +---------------------------------------------------------------+ | h) Help i) About info q) Abort | +---------------------------------------------------------------+ """ ERROR_LOGDIR_MISSING = \ """ ERROR: One or more log-file directory locations could not be found: {logfiles} This is simple to fix: Just manually create the missing log directory (or directories) and re-launch the server (the log files will be created automatically). (Explanation: Evennia creates the log directory automatically when initializating a new game directory. This error usually happens if you used git to clone a pre-created game directory - since log files are in .gitignore they will not be cloned, which leads to the log directory also not being created.) """ ERROR_PYTHON_VERSION = \ """ ERROR: Python {pversion} used. Evennia requires version {python_min} or higher (but not 3.x). """ ERROR_TWISTED_VERSION = \ """ ERROR: Twisted {tversion} found. Evennia requires version {twisted_min} or higher. """ ERROR_NOTWISTED = \ """ ERROR: Twisted does not seem to be installed. """ ERROR_DJANGO_MIN = \ """ ERROR: Django {dversion} found. Evennia requires version {django_min} or higher. Install it with for example `pip install --upgrade django` or with `pip install django=={django_min}` to get a specific version. It's also a good idea to run `evennia migrate` after this upgrade. """ NOTE_DJANGO_MIN = \ """ NOTE: Django {dversion} found. This will work, but v{django_rec} is recommended for production. """ NOTE_DJANGO_NEW = \ """ NOTE: Django {dversion} found. This is newer than Evennia's recommended version (v{django_rec}). It might work, but may be new enough to not be fully tested yet. Report any issues. """ ERROR_NODJANGO = \ """ ERROR: Django does not seem to be installed. """ NOTE_KEYBOARDINTERRUPT = \ """ STOP: Caught keyboard interrupt while in interactive mode. """ #------------------------------------------------------------ # # Functions # #------------------------------------------------------------ def evennia_version(): """ Get the Evennia version info from the main package. """ version = "Unknown" try: import evennia version = evennia.__version__ except ImportError: pass try: rev = check_output( "git rev-parse --short HEAD", shell=True, cwd=EVENNIA_ROOT, stderr=STDOUT).strip() version = "%s (rev %s)" % (version, rev) except (IOError, CalledProcessError): pass return version EVENNIA_VERSION = evennia_version() def check_main_evennia_dependencies(): """ Checks and imports the Evennia dependencies. This must be done already before the paths are set up. Returns: not_error (bool): True if no dependency error was found. """ error = False # Python pversion = ".".join(str(num) for num in sys.version_info if type(num) == int) if pversion < PYTHON_MIN: print(ERROR_PYTHON_VERSION.format(pversion=pversion, python_min=PYTHON_MIN)) error = True # Twisted try: import twisted tversion = twisted.version.short() if tversion < TWISTED_MIN: print(ERROR_TWISTED_VERSION.format( tversion=tversion, twisted_min=TWISTED_MIN)) error = True except ImportError: print(ERROR_NOTWISTED) error = True # Django try: dversion = ".".join(str(num) for num in django.VERSION if type(num) == int) # only the main version (1.5, not 1.5.4.0) dversion_main = ".".join(dversion.split(".")[:2]) if dversion < DJANGO_MIN: print(ERROR_DJANGO_MIN.format( dversion=dversion_main, django_min=DJANGO_MIN)) error = True elif DJANGO_MIN <= dversion < DJANGO_REC: print(NOTE_DJANGO_MIN.format( dversion=dversion_main, django_rec=DJANGO_REC)) elif DJANGO_REC < dversion_main: print(NOTE_DJANGO_NEW.format( dversion=dversion_main, django_rec=DJANGO_REC)) except ImportError: print(ERROR_NODJANGO) error = True if error: sys.exit() # return True/False if error was reported or not return not error def set_gamedir(path): """ Set GAMEDIR based on path, by figuring out where the setting file is inside the directory tree. """ global GAMEDIR Ndepth = 10 settings_path = os.path.join("server", "conf", "settings.py") for i in range(Ndepth): gpath = os.getcwd() if "server" in os.listdir(gpath): if os.path.isfile(settings_path): GAMEDIR = gpath return os.chdir(os.pardir) print(ERROR_NO_GAMEDIR) sys.exit() def create_secret_key(): """ Randomly create the secret key for the settings file """ import random import string secret_key = list((string.letters + string.digits + string.punctuation).replace("\\", "")\ .replace("'", '"').replace("{","_").replace("}","-")) random.shuffle(secret_key) secret_key = "".join(secret_key[:40]) return secret_key def create_settings_file(init=True): """ Uses the template settings file to build a working settings file. Args: init (bool): This is part of the normal evennia --init operation. If false, this function will copy a fresh template file in (asking if it already exists). """ settings_path = os.path.join(GAMEDIR, "server", "conf", "settings.py") if not init: # if not --init mode, settings file may already exist from before if os.path.exists(settings_path): inp = raw_input("server/conf/settings.py already exists. " "Do you want to reset it? y/[N]> ") if not inp.lower() == 'y': print ("Aborted.") sys.exit() else: print ("Reset the settings file.") default_settings_path = os.path.join(EVENNIA_TEMPLATE, "server", "conf", "settings.py") shutil.copy(default_settings_path, settings_path) with open(settings_path, 'r') as f: settings_string = f.read() # tweak the settings setting_dict = { "settings_default": os.path.join(EVENNIA_LIB, "settings_default.py"), "servername": "\"%s\"" % GAMEDIR.rsplit(os.path.sep, 1)[1].capitalize(), "secret_key": "\'%s\'" % create_secret_key()} settings_string = settings_string.format(**setting_dict) with open(settings_path, 'w') as f: f.write(settings_string) def create_game_directory(dirname): """ Initialize a new game directory named dirname at the current path. This means copying the template directory from evennia's root. Args: dirname (str): The directory name to create. """ global GAMEDIR GAMEDIR = os.path.abspath(os.path.join(CURRENT_DIR, dirname)) if os.path.exists(GAMEDIR): print("Cannot create new Evennia game dir: '%s' already exists." % dirname) sys.exit() # copy template directory shutil.copytree(EVENNIA_TEMPLATE, GAMEDIR) # pre-build settings file in the new GAMEDIR create_settings_file() def create_superuser(): """ Create the superuser player """ print( "\nCreate a superuser below. The superuser is Player #1, the 'owner' " "account of the server.\n") django.core.management.call_command("createsuperuser", interactive=True) def check_database(): """ Check so the database exists. Returns: exists (bool): `True` if the database exists, otherwise `False`. """ # Check so a database exists and is accessible from django.db import connection tables = connection.introspection.get_table_list(connection.cursor()) if not tables or not isinstance(tables[0], basestring): # django 1.8+ tables = [tableinfo.name for tableinfo in tables] if tables and u'players_playerdb' in tables: # database exists and seems set up. Initialize evennia. import evennia evennia._init() # Try to get Player#1 from evennia.players.models import PlayerDB try: PlayerDB.objects.get(id=1) except django.db.utils.OperationalError as e: print(ERROR_DATABASE.format(traceback=e)) sys.exit() except PlayerDB.DoesNotExist: # no superuser yet. We need to create it. other_superuser = PlayerDB.objects.filter(is_superuser=True) if other_superuser: # Another superuser was found, but not with id=1. This may # happen if using flush (the auto-id starts at a higher # value). Wwe copy this superuser into id=1. To do # this we must deepcopy it, delete it then save the copy # with the new id. This allows us to avoid the UNIQUE # constraint on usernames. other = other_superuser[0] other_id = other.id other_key = other.username print(WARNING_MOVING_SUPERUSER.format( other_key=other_key, other_id=other_id)) res = "" while res.upper() != "Y": # ask for permission res = input("Continue [Y]/N: ") if res.upper() == "N": sys.exit() elif not res: break # continue with the from copy import deepcopy new = deepcopy(other) other.delete() new.id = 1 new.save() else: create_superuser() check_database() return True def getenv(): """ Get current environment and add PYTHONPATH. Returns: env (dict): Environment global dict. """ sep = ";" if os.name == 'nt' else ":" env = os.environ.copy() env['PYTHONPATH'] = sep.join(sys.path) return env def get_pid(pidfile): """ Get the PID (Process ID) by trying to access an PID file. Args: pidfile (str): The path of the pid file. Returns: pid (str): The process id. """ pid = None if os.path.exists(pidfile): f = open(pidfile, 'r') pid = f.read() return pid def del_pid(pidfile): """ The pidfile should normally be removed after a process has finished, but when sending certain signals they remain, so we need to clean them manually. Args: pidfile (str): The path of the pid file. """ if os.path.exists(pidfile): os.remove(pidfile) def kill(pidfile, signal=SIG, succmsg="", errmsg="", restart_file=SERVER_RESTART, restart=False): """ Send a kill signal to a process based on PID. A customized success/error message will be returned. If clean=True, the system will attempt to manually remove the pid file. Args: pidfile (str): The path of the pidfile to get the PID from. signal (int, optional): Signal identifier. succmsg (str, optional): Message to log on success. errmsg (str, optional): Message to log on failure. restart_file (str, optional): Restart file location. restart (bool, optional): Are we in restart mode or not. """ pid = get_pid(pidfile) if pid: if os.name == 'nt': os.remove(pidfile) # set restart/norestart flag if restart: django.core.management.call_command( 'collectstatic', interactive=False, verbosity=0) with open(restart_file, 'w') as f: f.write("reload") else: with open(restart_file, 'w') as f: f.write("shutdown") try: os.kill(int(pid), signal) except OSError: print("Process %(pid)s cannot be stopped. "\ "The PID file 'server/%(pidfile)s' seems stale. "\ "Try removing it." % {'pid': pid, 'pidfile': pidfile}) return print("Evennia:", succmsg) return print("Evennia:", errmsg) def show_version_info(about=False): """ Display version info. Args: about (bool): Include ABOUT info as well as version numbers. Returns: version_info (str): A complete version info string. """ import os import sys import twisted import django return VERSION_INFO.format( version=EVENNIA_VERSION, about=ABOUT_INFO if about else "", os=os.name, python=sys.version.split()[0], twisted=twisted.version.short(), django=django.get_version()) def error_check_python_modules(): """ Import settings modules in settings. This will raise exceptions on pure python-syntax issues which are hard to catch gracefully with exceptions in the engine (since they are formatting errors in the python source files themselves). Best they fail already here before we get any further. Raises: DeprecationWarning: For trying to access various modules (usually in `settings.py`) which are no longer supported. """ from django.conf import settings def imp(path, split=True): mod, fromlist = path, "None" if split: mod, fromlist = path.rsplit('.', 1) __import__(mod, fromlist=[fromlist]) # core modules imp(settings.COMMAND_PARSER) imp(settings.SEARCH_AT_RESULT) imp(settings.CONNECTION_SCREEN_MODULE) #imp(settings.AT_INITIAL_SETUP_HOOK_MODULE, split=False) for path in settings.LOCK_FUNC_MODULES: imp(path, split=False) # cmdsets deprstring = ("settings.%s should be renamed to %s. If defaults are used, " "their path/classname must be updated " "(see evennia/settings_default.py).") if hasattr(settings, "CMDSET_DEFAULT"): raise DeprecationWarning(deprstring % ( "CMDSET_DEFAULT", "CMDSET_CHARACTER")) if hasattr(settings, "CMDSET_OOC"): raise DeprecationWarning(deprstring % ("CMDSET_OOC", "CMDSET_PLAYER")) if settings.WEBSERVER_ENABLED and not isinstance(settings.WEBSERVER_PORTS[0], tuple): raise DeprecationWarning( "settings.WEBSERVER_PORTS must be on the form " "[(proxyport, serverport), ...]") if hasattr(settings, "BASE_COMM_TYPECLASS"): raise DeprecationWarning(deprstring % ( "BASE_COMM_TYPECLASS", "BASE_CHANNEL_TYPECLASS")) if hasattr(settings, "COMM_TYPECLASS_PATHS"): raise DeprecationWarning(deprstring % ( "COMM_TYPECLASS_PATHS", "CHANNEL_TYPECLASS_PATHS")) if hasattr(settings, "CHARACTER_DEFAULT_HOME"): raise DeprecationWarning( "settings.CHARACTER_DEFAULT_HOME should be renamed to " "DEFAULT_HOME. See also settings.START_LOCATION " "(see evennia/settings_default.py).") deprstring = ("settings.%s is now merged into settings.TYPECLASS_PATHS. " "Update your settings file.") if hasattr(settings, "OBJECT_TYPECLASS_PATHS"): raise DeprecationWarning(deprstring % "OBJECT_TYPECLASS_PATHS") if hasattr(settings, "SCRIPT_TYPECLASS_PATHS"): raise DeprecationWarning(deprstring % "SCRIPT_TYPECLASS_PATHS") if hasattr(settings, "PLAYER_TYPECLASS_PATHS"): raise DeprecationWarning(deprstring % "PLAYER_TYPECLASS_PATHS") if hasattr(settings, "CHANNEL_TYPECLASS_PATHS"): raise DeprecationWarning(deprstring % "CHANNEL_TYPECLASS_PATHS") from evennia.commands import cmdsethandler if not cmdsethandler.import_cmdset(settings.CMDSET_UNLOGGEDIN, None): print("Warning: CMDSET_UNLOGGED failed to load!") if not cmdsethandler.import_cmdset(settings.CMDSET_CHARACTER, None): print("Warning: CMDSET_CHARACTER failed to load") if not cmdsethandler.import_cmdset(settings.CMDSET_PLAYER, None): print("Warning: CMDSET_PLAYER failed to load") # typeclasses imp(settings.BASE_PLAYER_TYPECLASS) imp(settings.BASE_OBJECT_TYPECLASS) imp(settings.BASE_CHARACTER_TYPECLASS) imp(settings.BASE_ROOM_TYPECLASS) imp(settings.BASE_EXIT_TYPECLASS) imp(settings.BASE_SCRIPT_TYPECLASS) def init_game_directory(path, check_db=True): """ Try to analyze the given path to find settings.py - this defines the game directory and also sets PYTHONPATH as well as the django path. Args: path (str): Path to new game directory, including its name. check_db (bool, optional): Check if the databae exists. """ # set the GAMEDIR path set_gamedir(path) # Add gamedir to python path sys.path.insert(0, GAMEDIR) if sys.argv[1] == 'test': os.environ['DJANGO_SETTINGS_MODULE'] = 'evennia.settings_default' else: os.environ['DJANGO_SETTINGS_MODULE'] = SETTINGS_DOTPATH # required since django1.7 django.setup() # test existence of the settings module try: from django.conf import settings except Exception as ex: if not str(ex).startswith("No module named"): import traceback print(traceback.format_exc().strip()) print(ERROR_SETTINGS) sys.exit() # this will both check the database and initialize the evennia dir. if check_db: check_database() # set up the Evennia executables and log file locations global SERVER_PY_FILE, PORTAL_PY_FILE global SERVER_LOGFILE, PORTAL_LOGFILE, HTTP_LOGFILE global SERVER_PIDFILE, PORTAL_PIDFILE global SERVER_RESTART, PORTAL_RESTART global EVENNIA_VERSION SERVER_PY_FILE = os.path.join(EVENNIA_LIB, "server", "server.py") PORTAL_PY_FILE = os.path.join(EVENNIA_LIB, "portal", "portal", "portal.py") SERVER_PIDFILE = os.path.join(GAMEDIR, SERVERDIR, "server.pid") PORTAL_PIDFILE = os.path.join(GAMEDIR, SERVERDIR, "portal.pid") SERVER_RESTART = os.path.join(GAMEDIR, SERVERDIR, "server.restart") PORTAL_RESTART = os.path.join(GAMEDIR, SERVERDIR, "portal.restart") SERVER_LOGFILE = settings.SERVER_LOG_FILE PORTAL_LOGFILE = settings.PORTAL_LOG_FILE HTTP_LOGFILE = settings.HTTP_LOG_FILE # verify existence of log file dir (this can be missing e.g. # if the game dir itself was cloned since log files are in .gitignore) logdirs = [logfile.rsplit(os.path.sep, 1) for logfile in (SERVER_LOGFILE, PORTAL_LOGFILE, HTTP_LOGFILE)] if not all(os.path.isdir(pathtup[0]) for pathtup in logdirs): errstr = "\n ".join("%s (log file %s)" % (pathtup[0], pathtup[1]) for pathtup in logdirs if not os.path.isdir(pathtup[0])) print(ERROR_LOGDIR_MISSING.format(logfiles=errstr)) sys.exit() if os.name == 'nt': # We need to handle Windows twisted separately. We create a # batchfile in game/server, linking to the actual binary global TWISTED_BINARY # Windows requires us to use the absolute path for the bat file. server_path = os.path.dirname(os.path.abspath(__file__)) TWISTED_BINARY = os.path.join(server_path, "twistd.bat") # add path so system can find the batfile sys.path.insert(1, os.path.join(GAMEDIR, SERVERDIR)) try: importlib.import_module("win32api") except ImportError: print(ERROR_WINDOWS_WIN32API) sys.exit() batpath = os.path.join(EVENNIA_SERVER, TWISTED_BINARY) if not os.path.exists(batpath): # Test for executable twisted batch file. This calls the # twistd.py executable that is usually not found on the # path in Windows. It's not enough to locate # scripts.twistd, what we want is the executable script # C:\PythonXX/Scripts/twistd.py. Alas we cannot hardcode # this location since we don't know if user has Python in # a non-standard location. So we try to figure it out. twistd = importlib.import_module("twisted.scripts.twistd") twistd_dir = os.path.dirname(twistd.__file__) # note that we hope the twistd package won't change here, since we # try to get to the executable by relative path. twistd_path = os.path.abspath( os.path.join(twistd_dir, os.pardir, os.pardir, os.pardir, os.pardir, 'scripts', 'twistd.py')) with open(batpath, 'w') as bat_file: # build a custom bat file for windows bat_file.write("@\"%s\" \"%s\" %%*" % ( sys.executable, twistd_path)) print(INFO_WINDOWS_BATFILE.format(twistd_path=twistd_path)) def run_dummyrunner(number_of_dummies): """ Start an instance of the dummyrunner Args: number_of_dummies (int): The number of dummy players to start. Notes: The dummy players' behavior can be customized by adding a `dummyrunner_settings.py` config file in the game's conf/ directory. """ number_of_dummies = str(int(number_of_dummies)) if number_of_dummies else 1 cmdstr = [sys.executable, EVENNIA_DUMMYRUNNER, "-N", number_of_dummies] config_file = os.path.join(SETTINGS_PATH, "dummyrunner_settings.py") if os.path.exists(config_file): cmdstr.extend(["--config", config_file]) try: call(cmdstr, env=getenv()) except KeyboardInterrupt: pass def list_settings(keys): """ Display the server settings. We only display the Evennia specific settings here. The result will be printed to the terminal. Args: keys (str or list): Setting key or keys to inspect. """ from importlib import import_module from evennia.utils import evtable evsettings = import_module(SETTINGS_DOTPATH) if len(keys) == 1 and keys[0].upper() == "ALL": # show a list of all keys # a specific key table = evtable.EvTable() confs = [key for key in sorted(evsettings.__dict__) if key.isupper()] for i in range(0, len(confs), 4): table.add_row(*confs[i:i+4]) else: # a specific key table = evtable.EvTable(width=131) keys = [key.upper() for key in keys] confs = dict((key, var) for key, var in evsettings.__dict__.items() if key in keys) for key, val in confs.items(): table.add_row(key, str(val)) print(table) def run_menu(): """ This launches an interactive menu. """ while True: # menu loop print(MENU) inp = input(" option > ") # quitting and help if inp.lower() == 'q': return elif inp.lower() == 'h': print(HELP_ENTRY) input("press <return> to continue ...") continue elif inp.lower() in ('v', 'i', 'a'): print(show_version_info(about=True)) input("press <return> to continue ...") continue # options try: inp = int(inp) except ValueError: print("Not a valid option.") continue if inp == 1: # start everything, log to log files server_operation("start", "all", False, False) elif inp == 2: # start everything, server interactive start server_operation("start", "all", True, False) elif inp == 3: # start everything, portal interactive start server_operation("start", "server", False, False) server_operation("start", "portal", True, False) elif inp == 4: # start both server and portal interactively server_operation("start", "server", True, False) server_operation("start", "portal", True, False) elif inp == 5: # reload the server server_operation("reload", "server", None, None) elif inp == 6: # reload the portal server_operation("reload", "portal", None, None) elif inp == 7: # stop server and portal server_operation("stop", "all", None, None) elif inp == 8: # stop server server_operation("stop", "server", None, None) elif inp == 9: # stop portal server_operation("stop", "portal", None, None) else: print("Not a valid option.") continue return def server_operation(mode, service, interactive, profiler, logserver=False): """ Handle argument options given on the command line. Args: mode (str): Start/stop/restart and so on. service (str): "server", "portal" or "all". interactive (bool). Use interactive mode or daemon. profiler (bool): Run the service under the profiler. logserver (bool, optional): Log Server data to logfile specified by settings.SERVER_LOG_FILE. """ cmdstr = [sys.executable, EVENNIA_RUNNER] errmsg = "The %s does not seem to be running." if mode == 'start': # launch the error checker. Best to catch the errors already here. error_check_python_modules() # starting one or many services if service == 'server': if profiler: cmdstr.append('--pserver') if interactive: cmdstr.append('--iserver') if logserver: cmdstr.append('--logserver') cmdstr.append('--noportal') elif service == 'portal': if profiler: cmdstr.append('--pportal') if interactive: cmdstr.append('--iportal') cmdstr.append('--noserver') django.core.management.call_command( 'collectstatic', verbosity=1, interactive=False) else: # all # for convenience we don't start logging of # portal, only of server with this command. if profiler: # this is the common case cmdstr.append('--pserver') if interactive: cmdstr.append('--iserver') if logserver: cmdstr.append('--logserver') django.core.management.call_command( 'collectstatic', verbosity=1, interactive=False) cmdstr.extend([ GAMEDIR, TWISTED_BINARY, SERVER_LOGFILE, PORTAL_LOGFILE, HTTP_LOGFILE]) # start the server process = Popen(cmdstr, env=getenv()) if interactive: try: process.wait() except KeyboardInterrupt: server_operation("stop", "portal", False, False) return finally: print(NOTE_KEYBOARDINTERRUPT) elif mode == 'reload': # restarting services if os.name == 'nt': print( "Restarting from command line is not supported under Windows. " "Log into the game to restart.") return if service == 'server': kill(SERVER_PIDFILE, SIG, "Server reloaded.", errmsg % 'Server', SERVER_RESTART, restart=True) elif service == 'portal': print( "Note: Portal usually doesnt't need to be reloaded unless you " "are debugging in interactive mode. If Portal was running in " "default Daemon mode, it cannot be restarted. In that case " "you have to restart it manually with 'evennia.py " "start portal'") kill(PORTAL_PIDFILE, SIG, "Portal reloaded (or stopped, if it was in daemon mode).", errmsg % 'Portal', PORTAL_RESTART, restart=True) else: # all # default mode, only restart server kill(SERVER_PIDFILE, SIG, "Server reload.", errmsg % 'Server', SERVER_RESTART, restart=True) elif mode == 'stop': # stop processes, avoiding reload if service == 'server': kill(SERVER_PIDFILE, SIG, "Server stopped.", errmsg % 'Server', SERVER_RESTART) elif service == 'portal': kill(PORTAL_PIDFILE, SIG, "Portal stopped.", errmsg % 'Portal', PORTAL_RESTART) else: kill(PORTAL_PIDFILE, SIG, "Portal stopped.", errmsg % 'Portal', PORTAL_RESTART) kill(SERVER_PIDFILE, SIG, "Server stopped.", errmsg % 'Server', SERVER_RESTART) def main(): """ Run the evennia launcher main program. """ # set up argument parser parser = ArgumentParser(description=CMDLINE_HELP) parser.add_argument( '-v', '--version', action='store_true', dest='show_version', default=False, help="Show version info.") parser.add_argument( '-i', '--interactive', action='store_true', dest='interactive', default=False, help="Start given processes in interactive mode.") parser.add_argument( '-l', '--log', action='store_true', dest="logserver", default=False, help="Log Server data to log file.") parser.add_argument( '--init', action='store', dest="init", metavar="name", help="Creates a new game directory 'name' at the current location.") parser.add_argument( '--list', nargs='+', action='store', dest='listsetting', metavar="key", help=("List values for server settings. Use 'all' to list all " "available keys.")) parser.add_argument( '--profiler', action='store_true', dest='profiler', default=False, help="Start given server component under the Python profiler.") parser.add_argument( '--dummyrunner', nargs=1, action='store', dest='dummyrunner', metavar="N", help="Tests a running server by connecting N dummy players to it.") parser.add_argument( '--settings', nargs=1, action='store', dest='altsettings', default=None, metavar="filename.py", help=("Start evennia with alternative settings file in " "gamedir/server/conf/.")) parser.add_argument( '--initsettings', action='store_true', dest="initsettings", default=False, help="Creates a new, empty settings file as gamedir/server/conf/settings.py.") parser.add_argument( "option", nargs='?', default="noop", help="Operational mode: 'start', 'stop', 'restart' or 'menu'.") parser.add_argument( "service", metavar="component", nargs='?', default="all", help=("Server component to operate on: " "'server', 'portal' or 'all' (default).")) parser.epilog = ( "Example django-admin commands: " "'migrate', 'flush', 'shell' and 'dbshell'. " "See the django documentation for more django-admin commands.") args, unknown_args = parser.parse_known_args() # handle arguments option, service = args.option, args.service # make sure we have everything check_main_evennia_dependencies() if not args: # show help pane print(CMDLINE_HELP) sys.exit() elif args.init: # initialization of game directory create_game_directory(args.init) print(CREATED_NEW_GAMEDIR.format( gamedir=args.init, settings_path=os.path.join(args.init, SETTINGS_PATH))) sys.exit() if args.show_version: # show the version info print(show_version_info(option == "help")) sys.exit() if args.altsettings: # use alternative settings file sfile = args.altsettings[0] global SETTINGSFILE, SETTINGS_DOTPATH SETTINGSFILE = sfile SETTINGS_DOTPATH = "server.conf.%s" % sfile.rstrip(".py") print("Using settings file '%s' (%s)." % ( SETTINGSFILE, SETTINGS_DOTPATH)) if args.initsettings: # create new settings file global GAMEDIR GAMEDIR = os.getcwd() try: create_settings_file(init=False) print(RECREATED_SETTINGS) except IOError: print(ERROR_INITSETTINGS) sys.exit() if args.dummyrunner: # launch the dummy runner init_game_directory(CURRENT_DIR, check_db=True) run_dummyrunner(args.dummyrunner[0]) elif args.listsetting: # display all current server settings init_game_directory(CURRENT_DIR, check_db=False) list_settings(args.listsetting) elif option == 'menu': # launch menu for operation init_game_directory(CURRENT_DIR, check_db=True) run_menu() elif option in ('start', 'reload', 'stop'): # operate the server directly init_game_directory(CURRENT_DIR, check_db=True) server_operation(option, service, args.interactive, args.profiler, args.logserver) elif option != "noop": # pass-through to django manager check_db = False if option in ('runserver', 'testserver'): print(WARNING_RUNSERVER) if option == "shell": # to use the shell we need to initialize it first, # and this only works if the database is set up check_db = True init_game_directory(CURRENT_DIR, check_db=check_db) args = [option] kwargs = {} if service not in ("all", "server", "portal"): args.append(service) if unknown_args: for arg in unknown_args: if arg.startswith("--"): print("arg:", arg) if "=" in arg: arg, value = [p.strip() for p in arg.split("=", 1)] else: value = True kwargs[arg.lstrip("--")] = [value] else: args.append(arg) try: django.core.management.call_command(*args, **kwargs) except django.core.management.base.CommandError as exc: args = ", ".join(args) kwargs = ", ".join(["--%s" % kw for kw in kwargs]) print(ERROR_INPUT.format(traceback=exc, args=args, kwargs=kwargs)) else: # no input; print evennia info print(ABOUT_INFO) if __name__ == '__main__': # start Evennia from the command line main()<|fim▁end|>
#------------------------------------------------------------
<|file_name|>KotlinTopLevelFunctionFqnNameIndex.java<|end_file_name|><|fim▁begin|>// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package org.jetbrains.kotlin.idea.stubindex; import com.intellij.openapi.project.Project; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.stubs.StubIndex; import com.intellij.psi.stubs.StubIndexKey; import org.jetbrains.annotations.NotNull; import org.jetbrains.kotlin.psi.KtNamedFunction; import java.util.Collection; /** * Stores package top level function (both extension and non-extension) full qualified names. */ public class KotlinTopLevelFunctionFqnNameIndex extends AbstractStringStubIndexExtension<KtNamedFunction> { private static final StubIndexKey<String, KtNamedFunction> KEY = KotlinIndexUtil.createIndexKey(KotlinTopLevelFunctionFqnNameIndex.class); private static final KotlinTopLevelFunctionFqnNameIndex INSTANCE = new KotlinTopLevelFunctionFqnNameIndex(); @NotNull public static KotlinTopLevelFunctionFqnNameIndex getInstance() { return INSTANCE; } private KotlinTopLevelFunctionFqnNameIndex() { super(KtNamedFunction.class); } <|fim▁hole|> @Override public StubIndexKey<String, KtNamedFunction> getKey() { return KEY; } @NotNull @Override public Collection<KtNamedFunction> get(@NotNull String s, @NotNull Project project, @NotNull GlobalSearchScope scope) { return StubIndex.getElements(KEY, s, project, scope, KtNamedFunction.class); } // temporary hack, see comments in findCandidateDeclarationsInIndex (findDecompiledDeclaration.kt) @NotNull public Collection<KtNamedFunction> getNoScopeWrap(@NotNull String s, @NotNull Project project, @NotNull GlobalSearchScope scope) { return StubIndex.getElements(KEY, s, project, scope, KtNamedFunction.class); } }<|fim▁end|>
@NotNull
<|file_name|>integration_credential.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from rest_framework import viewsets, serializers from dbaas_credentials.models import Credential<|fim▁hole|>from .environment import EnvironmentSerializer from .integration_type import CredentialTypeSerializer class IntegrationCredentialSerializer(serializers.HyperlinkedModelSerializer): environments = EnvironmentSerializer(many=True, read_only=True) integration_type = CredentialTypeSerializer(many=False, read_only=True) class Meta: model = Credential fields = ('user', 'password', 'integration_type', 'token', 'secret', 'endpoint', 'environments', "project", "team") class IntegrationCredentialAPI(viewsets.ModelViewSet): """ Integration Credential Api """ serializer_class = IntegrationCredentialSerializer queryset = Credential.objects.all()<|fim▁end|>
<|file_name|>mul.rs<|end_file_name|><|fim▁begin|>#![feature(core, core_simd)] extern crate core; #[cfg(test)] mod tests { use core::simd::f32x4; // #[simd] // #[derive(Copy, Clone, Debug)] // #[repr(C)] // pub struct f32x4(pub f32, pub f32, pub f32, pub f32); #[test] fn mul_test1() { let x: f32x4 = f32x4( 1.23, 4.56, 7.89, 0.12 );<|fim▁hole|> 2.0, 2.0, 2.0, 2.0 ); let z: f32x4 = x * y; let result: String = format!("{:?}", z); assert_eq!(result, "f32x4(\ 2.46, 9.12, 15.78, 0.24\ )".to_string()); } }<|fim▁end|>
let y: f32x4 = f32x4(
<|file_name|>systemglobal_authenticationpolicy_binding.py<|end_file_name|><|fim▁begin|># # Copyright (c) 2008-2015 Citrix Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License") # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response from nssrc.com.citrix.netscaler.nitro.service.options import options from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util class systemglobal_authenticationpolicy_binding(base_resource) : """ Binding class showing the authenticationpolicy that can be bound to systemglobal. """ def __init__(self) : self._policyname = "" self._priority = 0 self._builtin = [] self.___count = 0 @property def priority(self) : ur"""The priority of the command policy. """ try : return self._priority except Exception as e: raise e @priority.setter def priority(self, priority) : ur"""The priority of the command policy. """ try : self._priority = priority except Exception as e: raise e @property def builtin(self) : ur"""Indicates that a variable is a built-in (SYSTEM INTERNAL) type.<br/>Possible values = MODIFIABLE, DELETABLE, IMMUTABLE. """ try : return self._builtin except Exception as e: raise e @builtin.setter<|fim▁hole|> self._builtin = builtin except Exception as e: raise e @property def policyname(self) : ur"""The name of the command policy. """ try : return self._policyname except Exception as e: raise e @policyname.setter def policyname(self, policyname) : ur"""The name of the command policy. """ try : self._policyname = policyname except Exception as e: raise e def _get_nitro_response(self, service, response) : ur""" converts nitro response into object and returns the object array in case of get request. """ try : result = service.payload_formatter.string_to_resource(systemglobal_authenticationpolicy_binding_response, response, self.__class__.__name__) if(result.errorcode != 0) : if (result.errorcode == 444) : service.clear_session(self) if result.severity : if (result.severity == "ERROR") : raise nitro_exception(result.errorcode, str(result.message), str(result.severity)) else : raise nitro_exception(result.errorcode, str(result.message), str(result.severity)) return result.systemglobal_authenticationpolicy_binding except Exception as e : raise e def _get_object_name(self) : ur""" Returns the value of object identifier argument """ try : return 0 except Exception as e : raise e @classmethod def add(cls, client, resource) : try : if resource and type(resource) is not list : updateresource = systemglobal_authenticationpolicy_binding() updateresource.policyname = resource.policyname updateresource.priority = resource.priority return updateresource.update_resource(client) else : if resource and len(resource) > 0 : updateresources = [systemglobal_authenticationpolicy_binding() for _ in range(len(resource))] for i in range(len(resource)) : updateresources[i].policyname = resource[i].policyname updateresources[i].priority = resource[i].priority return cls.update_bulk_request(client, updateresources) except Exception as e : raise e @classmethod def delete(cls, client, resource) : try : if resource and type(resource) is not list : deleteresource = systemglobal_authenticationpolicy_binding() deleteresource.policyname = resource.policyname return deleteresource.delete_resource(client) else : if resource and len(resource) > 0 : deleteresources = [systemglobal_authenticationpolicy_binding() for _ in range(len(resource))] for i in range(len(resource)) : deleteresources[i].policyname = resource[i].policyname return cls.delete_bulk_request(client, deleteresources) except Exception as e : raise e @classmethod def get(cls, service) : ur""" Use this API to fetch a systemglobal_authenticationpolicy_binding resources. """ try : obj = systemglobal_authenticationpolicy_binding() response = obj.get_resources(service) return response except Exception as e: raise e @classmethod def get_filtered(cls, service, filter_) : ur""" Use this API to fetch filtered set of systemglobal_authenticationpolicy_binding resources. Filter string should be in JSON format.eg: "port:80,servicetype:HTTP". """ try : obj = systemglobal_authenticationpolicy_binding() option_ = options() option_.filter = filter_ response = obj.getfiltered(service, option_) return response except Exception as e: raise e @classmethod def count(cls, service) : ur""" Use this API to count systemglobal_authenticationpolicy_binding resources configued on NetScaler. """ try : obj = systemglobal_authenticationpolicy_binding() option_ = options() option_.count = True response = obj.get_resources(service, option_) if response : return response[0].__dict__['___count'] return 0 except Exception as e: raise e @classmethod def count_filtered(cls, service, filter_) : ur""" Use this API to count the filtered set of systemglobal_authenticationpolicy_binding resources. Filter string should be in JSON format.eg: "port:80,servicetype:HTTP". """ try : obj = systemglobal_authenticationpolicy_binding() option_ = options() option_.count = True option_.filter = filter_ response = obj.getfiltered(service, option_) if response : return response[0].__dict__['___count'] return 0 except Exception as e: raise e class Builtin: MODIFIABLE = "MODIFIABLE" DELETABLE = "DELETABLE" IMMUTABLE = "IMMUTABLE" class systemglobal_authenticationpolicy_binding_response(base_response) : def __init__(self, length=1) : self.systemglobal_authenticationpolicy_binding = [] self.errorcode = 0 self.message = "" self.severity = "" self.sessionid = "" self.systemglobal_authenticationpolicy_binding = [systemglobal_authenticationpolicy_binding() for _ in range(length)]<|fim▁end|>
def builtin(self, builtin) : ur"""Indicates that a variable is a built-in (SYSTEM INTERNAL) type.<br/>Possible values = MODIFIABLE, DELETABLE, IMMUTABLE """ try :
<|file_name|>require.js<|end_file_name|><|fim▁begin|>/** vim: et:ts=4:sw=4:sts=4 * @license RequireJS 2.1.15 Copyright (c) 2010-2014, The Dojo Foundation All Rights Reserved. * Available via the MIT or new BSD license. * see: http://github.com/jrburke/requirejs for details */ //Not using strict: uneven strict support in browsers, #392, and causes //problems with requirejs.exec()/transpiler plugins that may not be strict. /*jslint regexp: true, nomen: true, sloppy: true */ /*global window, navigator, document, importScripts, setTimeout, opera */ var requirejs, require, define; (function (global) { var req, s, head, baseElement, dataMain, src, interactiveScript, currentlyAddingScript, mainScript, subPath, version = '2.1.15', commentRegExp = /(\/\*([\s\S]*?)\*\/|([^:]|^)\/\/(.*)$)/mg, cjsRequireRegExp = /[^.]\s*require\s*\(\s*["']([^'"\s]+)["']\s*\)/g, jsSuffixRegExp = /\.js$/, currDirRegExp = /^\.\//, op = Object.prototype, ostring = op.toString, hasOwn = op.hasOwnProperty, ap = Array.prototype, apsp = ap.splice, isBrowser = !!(typeof window !== 'undefined' && typeof navigator !== 'undefined' && window.document), isWebWorker = !isBrowser && typeof importScripts !== 'undefined', //PS3 indicates loaded and complete, but need to wait for complete //specifically. Sequence is 'loading', 'loaded', execution, // then 'complete'. The UA check is unfortunate, but not sure how //to feature test w/o causing perf issues. readyRegExp = isBrowser && navigator.platform === 'PLAYSTATION 3' ? /^complete$/ : /^(complete|loaded)$/, defContextName = '_', //Oh the tragedy, detecting opera. See the usage of isOpera for reason. isOpera = typeof opera !== 'undefined' && opera.toString() === '[object Opera]', contexts = {}, cfg = {}, globalDefQueue = [], useInteractive = false; function isFunction(it) { return ostring.call(it) === '[object Function]'; } function isArray(it) { return ostring.call(it) === '[object Array]'; } /** * Helper function for iterating over an array. If the func returns * a true value, it will break out of the loop. */ function each(ary, func) { if (ary) { var i; for (i = 0; i < ary.length; i += 1) { if (ary[i] && func(ary[i], i, ary)) { break; } } } } /** * Helper function for iterating over an array backwards. If the func * returns a true value, it will break out of the loop. */ function eachReverse(ary, func) { if (ary) { var i; for (i = ary.length - 1; i > -1; i -= 1) { if (ary[i] && func(ary[i], i, ary)) { break; } } } } function hasProp(obj, prop) { return hasOwn.call(obj, prop); } function getOwn(obj, prop) { return hasProp(obj, prop) && obj[prop]; } /** * Cycles over properties in an object and calls a function for each * property value. If the function returns a truthy value, then the * iteration is stopped. */ function eachProp(obj, func) { var prop; for (prop in obj) { if (hasProp(obj, prop)) { if (func(obj[prop], prop)) { break; } } } } /** * Simple function to mix in properties from source into target, * but only if target does not already have a property of the same name. */ function mixin(target, source, force, deepStringMixin) { if (source) { eachProp(source, function (value, prop) { if (force || !hasProp(target, prop)) { if (deepStringMixin && typeof value === 'object' && value && !isArray(value) && !isFunction(value) && !(value instanceof RegExp)) { if (!target[prop]) { target[prop] = {}; } mixin(target[prop], value, force, deepStringMixin); } else { target[prop] = value; } } }); } return target; } //Similar to Function.prototype.bind, but the 'this' object is specified //first, since it is easier to read/figure out what 'this' will be. function bind(obj, fn) { return function () { return fn.apply(obj, arguments); }; } function scripts() { return document.getElementsByTagName('script'); } function defaultOnError(err) { throw err; } //Allow getting a global that is expressed in //dot notation, like 'a.b.c'. function getGlobal(value) { if (!value) { return value; } var g = global; each(value.split('.'), function (part) { g = g[part]; }); return g; } /** * Constructs an error with a pointer to an URL with more information. * @param {String} id the error ID that maps to an ID on a web page. * @param {String} message human readable error. * @param {Error} [err] the original error, if there is one. * * @returns {Error} */ function makeError(id, msg, err, requireModules) { var e = new Error(msg + '\nhttp://requirejs.org/docs/errors.html#' + id); e.requireType = id; e.requireModules = requireModules; if (err) { e.originalError = err; } return e; } if (typeof define !== 'undefined') { //If a define is already in play via another AMD loader, //do not overwrite. return; } if (typeof requirejs !== 'undefined') { if (isFunction(requirejs)) { //Do not overwrite an existing requirejs instance. return; } cfg = requirejs; requirejs = undefined; } //Allow for a require config object if (typeof require !== 'undefined' && !isFunction(require)) { //assume it is a config object. cfg = require; require = undefined; } function newContext(contextName) { var inCheckLoaded, Module, context, handlers, checkLoadedTimeoutId, config = { //Defaults. Do not set a default for map //config to speed up normalize(), which //will run faster if there is no default. waitSeconds: 7, baseUrl: './', paths: {}, bundles: {}, pkgs: {}, shim: {}, config: {} }, registry = {}, //registry of just enabled modules, to speed //cycle breaking code when lots of modules //are registered, but not activated. enabledRegistry = {}, undefEvents = {}, defQueue = [], defined = {}, urlFetched = {}, bundlesMap = {}, requireCounter = 1, unnormalizedCounter = 1; /** * Trims the . and .. from an array of path segments. * It will keep a leading path segment if a .. will become * the first path segment, to help with module name lookups, * which act like paths, but can be remapped. But the end result, * all paths that use this function should look normalized. * NOTE: this method MODIFIES the input array. * @param {Array} ary the array of path segments. */ function trimDots(ary) { var i, part; for (i = 0; i < ary.length; i++) { part = ary[i]; if (part === '.') { ary.splice(i, 1); i -= 1; } else if (part === '..') { // If at the start, or previous value is still .., // keep them so that when converted to a path it may // still work when converted to a path, even though // as an ID it is less than ideal. In larger point // releases, may be better to just kick out an error. if (i === 0 || (i == 1 && ary[2] === '..') || ary[i - 1] === '..') { continue; } else if (i > 0) { ary.splice(i - 1, 2); i -= 2; } } } } /** * Given a relative module name, like ./something, normalize it to * a real name that can be mapped to a path. * @param {String} name the relative name * @param {String} baseName a real name that the name arg is relative * to. * @param {Boolean} applyMap apply the map config to the value. Should * only be done if this normalization is for a dependency ID. * @returns {String} normalized name */ function normalize(name, baseName, applyMap) { var pkgMain, mapValue, nameParts, i, j, nameSegment, lastIndex, foundMap, foundI, foundStarMap, starI, normalizedBaseParts, baseParts = (baseName && baseName.split('/')), map = config.map, starMap = map && map['*']; //Adjust any relative paths. if (name) { name = name.split('/'); lastIndex = name.length - 1; // If wanting node ID compatibility, strip .js from end // of IDs. Have to do this here, and not in nameToUrl // because node allows either .js or non .js to map // to same file. if (config.nodeIdCompat && jsSuffixRegExp.test(name[lastIndex])) { name[lastIndex] = name[lastIndex].replace(jsSuffixRegExp, ''); } // Starts with a '.' so need the baseName if (name[0].charAt(0) === '.' && baseParts) { //Convert baseName to array, and lop off the last part, //so that . matches that 'directory' and not name of the baseName's //module. For instance, baseName of 'one/two/three', maps to //'one/two/three.js', but we want the directory, 'one/two' for //this normalization. normalizedBaseParts = baseParts.slice(0, baseParts.length - 1); name = normalizedBaseParts.concat(name); } trimDots(name); name = name.join('/'); } //Apply map config if available. if (applyMap && map && (baseParts || starMap)) { nameParts = name.split('/'); outerLoop: for (i = nameParts.length; i > 0; i -= 1) { nameSegment = nameParts.slice(0, i).join('/'); if (baseParts) { //Find the longest baseName segment match in the config. //So, do joins on the biggest to smallest lengths of baseParts. for (j = baseParts.length; j > 0; j -= 1) { mapValue = getOwn(map, baseParts.slice(0, j).join('/')); //baseName segment has config, find if it has one for //this name. if (mapValue) { mapValue = getOwn(mapValue, nameSegment); if (mapValue) { //Match, update name to the new value. foundMap = mapValue; foundI = i; break outerLoop; } } } } //Check for a star map match, but just hold on to it, //if there is a shorter segment match later in a matching //config, then favor over this star map. if (!foundStarMap && starMap && getOwn(starMap, nameSegment)) { foundStarMap = getOwn(starMap, nameSegment); starI = i; } } if (!foundMap && foundStarMap) { foundMap = foundStarMap; foundI = starI; } if (foundMap) { nameParts.splice(0, foundI, foundMap); name = nameParts.join('/'); } } // If the name points to a package's name, use // the package main instead. pkgMain = getOwn(config.pkgs, name); return pkgMain ? pkgMain : name; } function removeScript(name) { if (isBrowser) { each(scripts(), function (scriptNode) { if (scriptNode.getAttribute('data-requiremodule') === name && scriptNode.getAttribute('data-requirecontext') === context.contextName) { scriptNode.parentNode.removeChild(scriptNode); return true; } }); } } function hasPathFallback(id) { var pathConfig = getOwn(config.paths, id); if (pathConfig && isArray(pathConfig) && pathConfig.length > 1) { //Pop off the first array value, since it failed, and //retry pathConfig.shift(); context.require.undef(id); //Custom require that does not do map translation, since //ID is "absolute", already mapped/resolved. context.makeRequire(null, { skipMap: true })([id]); return true; } } //Turns a plugin!resource to [plugin, resource] //with the plugin being undefined if the name //did not have a plugin prefix. function splitPrefix(name) { var prefix, index = name ? name.indexOf('!') : -1; if (index > -1) { prefix = name.substring(0, index); name = name.substring(index + 1, name.length); } return [prefix, name]; } /** * Creates a module mapping that includes plugin prefix, module * name, and path. If parentModuleMap is provided it will * also normalize the name via require.normalize() * * @param {String} name the module name * @param {String} [parentModuleMap] parent module map * for the module name, used to resolve relative names. * @param {Boolean} isNormalized: is the ID already normalized. * This is true if this call is done for a define() module ID. * @param {Boolean} applyMap: apply the map config to the ID. * Should only be true if this map is for a dependency. * * @returns {Object} */ function makeModuleMap(name, parentModuleMap, isNormalized, applyMap) { var url, pluginModule, suffix, nameParts, prefix = null, parentName = parentModuleMap ? parentModuleMap.name : null, originalName = name, isDefine = true, normalizedName = ''; //If no name, then it means it is a require call, generate an //internal name. if (!name) { isDefine = false; name = '_@r' + (requireCounter += 1); } nameParts = splitPrefix(name); prefix = nameParts[0]; name = nameParts[1]; if (prefix) { prefix = normalize(prefix, parentName, applyMap); pluginModule = getOwn(defined, prefix); } //Account for relative paths if there is a base name. if (name) { if (prefix) { if (pluginModule && pluginModule.normalize) { //Plugin is loaded, use its normalize method. normalizedName = pluginModule.normalize(name, function (name) { return normalize(name, parentName, applyMap); }); } else { // If nested plugin references, then do not try to // normalize, as it will not normalize correctly. This // places a restriction on resourceIds, and the longer // term solution is not to normalize until plugins are // loaded and all normalizations to allow for async // loading of a loader plugin. But for now, fixes the // common uses. Details in #1131 normalizedName = name.indexOf('!') === -1 ? normalize(name, parentName, applyMap) : name; } } else { //A regular module. normalizedName = normalize(name, parentName, applyMap); //Normalized name may be a plugin ID due to map config //application in normalize. The map config values must //already be normalized, so do not need to redo that part. nameParts = splitPrefix(normalizedName); prefix = nameParts[0]; normalizedName = nameParts[1]; isNormalized = true; url = context.nameToUrl(normalizedName); } } //If the id is a plugin id that cannot be determined if it needs //normalization, stamp it with a unique ID so two matching relative //ids that may conflict can be separate. suffix = prefix && !pluginModule && !isNormalized ? '_unnormalized' + (unnormalizedCounter += 1) : ''; return { prefix: prefix, name: normalizedName, parentMap: parentModuleMap, unnormalized: !!suffix, url: url, originalName: originalName, isDefine: isDefine, id: (prefix ? prefix + '!' + normalizedName : normalizedName) + suffix }; } function getModule(depMap) { var id = depMap.id, mod = getOwn(registry, id); if (!mod) { mod = registry[id] = new context.Module(depMap); } return mod; } function on(depMap, name, fn) { var id = depMap.id, mod = getOwn(registry, id); if (hasProp(defined, id) && (!mod || mod.defineEmitComplete)) { if (name === 'defined') { fn(defined[id]); } } else { mod = getModule(depMap); if (mod.error && name === 'error') { fn(mod.error); } else { mod.on(name, fn); } } } function onError(err, errback) { var ids = err.requireModules, notified = false; if (errback) { errback(err); } else { each(ids, function (id) { var mod = getOwn(registry, id); if (mod) { //Set error on module, so it skips timeout checks. mod.error = err; if (mod.events.error) { notified = true; mod.emit('error', err); } } }); if (!notified) { req.onError(err); } } } /** * Internal method to transfer globalQueue items to this context's * defQueue. */ function takeGlobalQueue() { //Push all the globalDefQueue items into the context's defQueue if (globalDefQueue.length) { //Array splice in the values since the context code has a //local var ref to defQueue, so cannot just reassign the one //on context. apsp.apply(defQueue, [defQueue.length, 0].concat(globalDefQueue)); globalDefQueue = []; } } handlers = { 'require': function (mod) { if (mod.require) { return mod.require; } else { return (mod.require = context.makeRequire(mod.map)); } }, 'exports': function (mod) { mod.usingExports = true; if (mod.map.isDefine) { if (mod.exports) { return (defined[mod.map.id] = mod.exports); } else { return (mod.exports = defined[mod.map.id] = {}); } } }, 'module': function (mod) { if (mod.module) { return mod.module; } else { return (mod.module = { id: mod.map.id, uri: mod.map.url, config: function () { return getOwn(config.config, mod.map.id) || {}; }, exports: mod.exports || (mod.exports = {}) }); } } }; function cleanRegistry(id) { //Clean up machinery used for waiting modules. delete registry[id]; delete enabledRegistry[id]; } function breakCycle(mod, traced, processed) { var id = mod.map.id; if (mod.error) { mod.emit('error', mod.error); } else { traced[id] = true; each(mod.depMaps, function (depMap, i) { var depId = depMap.id, dep = getOwn(registry, depId); //Only force things that have not completed //being defined, so still in the registry, //and only if it has not been matched up //in the module already. if (dep && !mod.depMatched[i] && !processed[depId]) { if (getOwn(traced, depId)) { mod.defineDep(i, defined[depId]); mod.check(); //pass false? } else { breakCycle(dep, traced, processed); } } }); processed[id] = true; } } function checkLoaded() { var err, usingPathFallback, waitInterval = config.waitSeconds * 1000, //It is possible to disable the wait interval by using waitSeconds of 0. expired = waitInterval && (context.startTime + waitInterval) < new Date().getTime(), noLoads = [], reqCalls = [], stillLoading = false, needCycleCheck = true; //Do not bother if this call was a result of a cycle break. if (inCheckLoaded) { return; } inCheckLoaded = true; //Figure out the state of all the modules. eachProp(enabledRegistry, function (mod) { var map = mod.map, modId = map.id; //Skip things that are not enabled or in error state. if (!mod.enabled) { return; } if (!map.isDefine) { reqCalls.push(mod); } if (!mod.error) { //If the module should be executed, and it has not //been inited and time is up, remember it. if (!mod.inited && expired) { if (hasPathFallback(modId)) { usingPathFallback = true; stillLoading = true; } else { noLoads.push(modId); removeScript(modId); } } else if (!mod.inited && mod.fetched && map.isDefine) { stillLoading = true; if (!map.prefix) { //No reason to keep looking for unfinished //loading. If the only stillLoading is a //plugin resource though, keep going, //because it may be that a plugin resource //is waiting on a non-plugin cycle. return (needCycleCheck = false); } } } }); if (expired && noLoads.length) { //If wait time expired, throw error of unloaded modules. err = makeError('timeout', 'Load timeout for modules: ' + noLoads, null, noLoads); err.contextName = context.contextName; return onError(err); } //Not expired, check for a cycle. if (needCycleCheck) { each(reqCalls, function (mod) { breakCycle(mod, {}, {}); }); } //If still waiting on loads, and the waiting load is something //other than a plugin resource, or there are still outstanding //scripts, then just try back later. if ((!expired || usingPathFallback) && stillLoading) { //Something is still waiting to load. Wait for it, but only //if a timeout is not already in effect. if ((isBrowser || isWebWorker) && !checkLoadedTimeoutId) { checkLoadedTimeoutId = setTimeout(function () { checkLoadedTimeoutId = 0; checkLoaded(); }, 50); } } inCheckLoaded = false; } Module = function (map) { this.events = getOwn(undefEvents, map.id) || {}; this.map = map; this.shim = getOwn(config.shim, map.id); this.depExports = []; this.depMaps = []; this.depMatched = []; this.pluginMaps = {}; this.depCount = 0; /* this.exports this.factory this.depMaps = [], this.enabled, this.fetched */ }; Module.prototype = { init: function (depMaps, factory, errback, options) { options = options || {}; //Do not do more inits if already done. Can happen if there //are multiple define calls for the same module. That is not //a normal, common case, but it is also not unexpected. if (this.inited) { return; } this.factory = factory; if (errback) { //Register for errors on this module. this.on('error', errback); } else if (this.events.error) { //If no errback already, but there are error listeners //on this module, set up an errback to pass to the deps. errback = bind(this, function (err) { this.emit('error', err); }); } //Do a copy of the dependency array, so that //source inputs are not modified. For example //"shim" deps are passed in here directly, and //doing a direct modification of the depMaps array //would affect that config. this.depMaps = depMaps && depMaps.slice(0); this.errback = errback; //Indicate this module has be initialized this.inited = true; this.ignore = options.ignore; //Could have option to init this module in enabled mode, //or could have been previously marked as enabled. However, //the dependencies are not known until init is called. So //if enabled previously, now trigger dependencies as enabled. if (options.enabled || this.enabled) { //Enable this module and dependencies. //Will call this.check() this.enable(); } else { this.check(); } }, defineDep: function (i, depExports) { //Because of cycles, defined callback for a given //export can be called more than once. if (!this.depMatched[i]) { this.depMatched[i] = true; this.depCount -= 1; this.depExports[i] = depExports; } }, fetch: function () { if (this.fetched) { return; } this.fetched = true; context.startTime = (new Date()).getTime(); var map = this.map; //If the manager is for a plugin managed resource, //ask the plugin to load it now. if (this.shim) { context.makeRequire(this.map, { enableBuildCallback: true })(this.shim.deps || [], bind(this, function () { return map.prefix ? this.callPlugin() : this.load(); })); } else { //Regular dependency. return map.prefix ? this.callPlugin() : this.load(); } }, load: function () { var url = this.map.url; //Regular dependency. if (!urlFetched[url]) { urlFetched[url] = true; context.load(this.map.id, url); } }, /** * Checks if the module is ready to define itself, and if so, * define it. */ check: function () { if (!this.enabled || this.enabling) { return; } var err, cjsModule, id = this.map.id, depExports = this.depExports, exports = this.exports, factory = this.factory; if (!this.inited) { this.fetch(); } else if (this.error) { this.emit('error', this.error); } else if (!this.defining) { //The factory could trigger another require call //that would result in checking this module to //define itself again. If already in the process //of doing that, skip this work. this.defining = true; if (this.depCount < 1 && !this.defined) { if (isFunction(factory)) { //If there is an error listener, favor passing //to that instead of throwing an error. However, //only do it for define()'d modules. require //errbacks should not be called for failures in //their callbacks (#699). However if a global //onError is set, use that. if ((this.events.error && this.map.isDefine) || req.onError !== defaultOnError) { try { exports = context.execCb(id, factory, depExports, exports); } catch (e) { err = e; } } else { exports = context.execCb(id, factory, depExports, exports); } // Favor return value over exports. If node/cjs in play, // then will not have a return value anyway. Favor // module.exports assignment over exports object. if (this.map.isDefine && exports === undefined) { cjsModule = this.module; if (cjsModule) { exports = cjsModule.exports; } else if (this.usingExports) { //exports already set the defined value. exports = this.exports; } } if (err) { err.requireMap = this.map; err.requireModules = this.map.isDefine ? [this.map.id] : null; err.requireType = this.map.isDefine ? 'define' : 'require'; return onError((this.error = err)); } } else { //Just a literal value exports = factory; } this.exports = exports; if (this.map.isDefine && !this.ignore) { defined[id] = exports; if (req.onResourceLoad) { req.onResourceLoad(context, this.map, this.depMaps); } } //Clean up cleanRegistry(id); this.defined = true; } //Finished the define stage. Allow calling check again //to allow define notifications below in the case of a //cycle. this.defining = false; if (this.defined && !this.defineEmitted) { this.defineEmitted = true; this.emit('defined', this.exports); this.defineEmitComplete = true; } } }, callPlugin: function () { var map = this.map, id = map.id, //Map already normalized the prefix. pluginMap = makeModuleMap(map.prefix); //Mark this as a dependency for this plugin, so it //can be traced for cycles. this.depMaps.push(pluginMap); on(pluginMap, 'defined', bind(this, function (plugin) { var load, normalizedMap, normalizedMod, bundleId = getOwn(bundlesMap, this.map.id), name = this.map.name, parentName = this.map.parentMap ? this.map.parentMap.name : null, localRequire = context.makeRequire(map.parentMap, { enableBuildCallback: true }); //If current map is not normalized, wait for that //normalized name to load instead of continuing. if (this.map.unnormalized) { //Normalize the ID if the plugin allows it. if (plugin.normalize) { name = plugin.normalize(name, function (name) { return normalize(name, parentName, true); }) || ''; } //prefix and name should already be normalized, no need //for applying map config again either. normalizedMap = makeModuleMap(map.prefix + '!' + name, this.map.parentMap); on(normalizedMap, 'defined', bind(this, function (value) { this.init([], function () { return value; }, null, { enabled: true, ignore: true }); })); normalizedMod = getOwn(registry, normalizedMap.id); if (normalizedMod) { //Mark this as a dependency for this plugin, so it //can be traced for cycles. this.depMaps.push(normalizedMap); if (this.events.error) { normalizedMod.on('error', bind(this, function (err) { this.emit('error', err); })); } normalizedMod.enable(); } return; } //If a paths config, then just load that file instead to //resolve the plugin, as it is built into that paths layer. if (bundleId) { this.map.url = context.nameToUrl(bundleId); this.load(); return; } load = bind(this, function (value) { this.init([], function () { return value; }, null, { enabled: true }); }); load.error = bind(this, function (err) { this.inited = true; this.error = err; err.requireModules = [id]; //Remove temp unnormalized modules for this module, //since they will never be resolved otherwise now. eachProp(registry, function (mod) { if (mod.map.id.indexOf(id + '_unnormalized') === 0) { cleanRegistry(mod.map.id); } }); onError(err); }); //Allow plugins to load other code without having to know the //context or how to 'complete' the load. load.fromText = bind(this, function (text, textAlt) { /*jslint evil: true */ var moduleName = map.name, moduleMap = makeModuleMap(moduleName), hasInteractive = useInteractive; //As of 2.1.0, support just passing the text, to reinforce //fromText only being called once per resource. Still //support old style of passing moduleName but discard //that moduleName in favor of the internal ref. if (textAlt) { text = textAlt; } //Turn off interactive script matching for IE for any define //calls in the text, then turn it back on at the end. if (hasInteractive) { useInteractive = false; } //Prime the system by creating a module instance for //it. getModule(moduleMap); //Transfer any config to this other module. if (hasProp(config.config, id)) { config.config[moduleName] = config.config[id]; } try { req.exec(text); } catch (e) { return onError(makeError('fromtexteval', 'fromText eval for ' + id + ' failed: ' + e, e, [id])); } if (hasInteractive) { useInteractive = true; } //Mark this as a dependency for the plugin //resource this.depMaps.push(moduleMap); //Support anonymous modules. context.completeLoad(moduleName); //Bind the value of that module to the value for this //resource ID. localRequire([moduleName], load); }); //Use parentName here since the plugin's name is not reliable, //could be some weird string with no path that actually wants to //reference the parentName's path. plugin.load(map.name, localRequire, load, config); })); context.enable(pluginMap, this); this.pluginMaps[pluginMap.id] = pluginMap; }, enable: function () { enabledRegistry[this.map.id] = this; this.enabled = true; //Set flag mentioning that the module is enabling, //so that immediate calls to the defined callbacks //for dependencies do not trigger inadvertent load //with the depCount still being zero. this.enabling = true; //Enable each dependency each(this.depMaps, bind(this, function (depMap, i) { var id, mod, handler; if (typeof depMap === 'string') { //Dependency needs to be converted to a depMap //and wired up to this module. depMap = makeModuleMap(depMap, (this.map.isDefine ? this.map : this.map.parentMap), false, !this.skipMap); this.depMaps[i] = depMap; handler = getOwn(handlers, depMap.id); if (handler) { this.depExports[i] = handler(this); return; } this.depCount += 1; on(depMap, 'defined', bind(this, function (depExports) { this.defineDep(i, depExports); this.check(); })); if (this.errback) { on(depMap, 'error', bind(this, this.errback)); } } id = depMap.id; mod = registry[id]; //Skip special modules like 'require', 'exports', 'module' //Also, don't call enable if it is already enabled, //important in circular dependency cases. if (!hasProp(handlers, id) && mod && !mod.enabled) { context.enable(depMap, this); } })); //Enable each plugin that is used in //a dependency eachProp(this.pluginMaps, bind(this, function (pluginMap) { var mod = getOwn(registry, pluginMap.id); if (mod && !mod.enabled) { context.enable(pluginMap, this); } })); this.enabling = false; this.check(); }, on: function (name, cb) { var cbs = this.events[name]; if (!cbs) { cbs = this.events[name] = []; } cbs.push(cb); }, emit: function (name, evt) { each(this.events[name], function (cb) { cb(evt); }); if (name === 'error') { //Now that the error handler was triggered, remove //the listeners, since this broken Module instance //can stay around for a while in the registry. delete this.events[name]; } } }; function callGetModule(args) { //Skip modules already defined. if (!hasProp(defined, args[0])) { getModule(makeModuleMap(args[0], null, true)).init(args[1], args[2]); } } function removeListener(node, func, name, ieName) { //Favor detachEvent because of IE9 //issue, see attachEvent/addEventListener comment elsewhere //in this file. if (node.detachEvent && !isOpera) { //Probably IE. If not it will throw an error, which will be //useful to know. if (ieName) { node.detachEvent(ieName, func); } } else { node.removeEventListener(name, func, false); } } /** * Given an event from a script node, get the requirejs info from it, * and then removes the event listeners on the node. * @param {Event} evt * @returns {Object} */ function getScriptData(evt) { //Using currentTarget instead of target for Firefox 2.0's sake. Not //all old browsers will be supported, but this one was easy enough //to support and still makes sense. var node = evt.currentTarget || evt.srcElement; //Remove the listeners once here. removeListener(node, context.onScriptLoad, 'load', 'onreadystatechange'); removeListener(node, context.onScriptError, 'error'); return { node: node, id: node && node.getAttribute('data-requiremodule') }; } function intakeDefines() { var args; //Any defined modules in the global queue, intake them now. takeGlobalQueue(); //Make sure any remaining defQueue items get properly processed. while (defQueue.length) { args = defQueue.shift(); if (args[0] === null) { return onError(makeError('mismatch', 'Mismatched anonymous define() module: ' + args[args.length - 1])); } else { //args are id, deps, factory. Should be normalized by the //define() function. callGetModule(args); } } } context = { config: config, contextName: contextName, registry: registry, defined: defined, urlFetched: urlFetched, defQueue: defQueue, Module: Module, makeModuleMap: makeModuleMap, nextTick: req.nextTick, onError: onError, /** * Set a configuration for the context. * @param {Object} cfg config object to integrate. */ configure: function (cfg) { //Make sure the baseUrl ends in a slash. if (cfg.baseUrl) { if (cfg.baseUrl.charAt(cfg.baseUrl.length - 1) !== '/') { cfg.baseUrl += '/'; } } //Save off the paths since they require special processing, //they are additive. var shim = config.shim, objs = { paths: true, bundles: true, config: true, map: true }; eachProp(cfg, function (value, prop) { if (objs[prop]) { if (!config[prop]) { config[prop] = {}; } mixin(config[prop], value, true, true); } else { config[prop] = value; } }); //Reverse map the bundles if (cfg.bundles) { eachProp(cfg.bundles, function (value, prop) { each(value, function (v) { if (v !== prop) { bundlesMap[v] = prop; } }); }); } //Merge shim if (cfg.shim) { eachProp(cfg.shim, function (value, id) { //Normalize the structure if (isArray(value)) { value = { deps: value }; } if ((value.exports || value.init) && !value.exportsFn) { value.exportsFn = context.makeShimExports(value); } shim[id] = value; }); config.shim = shim; } //Adjust packages if necessary. if (cfg.packages) { each(cfg.packages, function (pkgObj) { var location, name; pkgObj = typeof pkgObj === 'string' ? { name: pkgObj } : pkgObj; name = pkgObj.name; location = pkgObj.location; if (location) { config.paths[name] = pkgObj.location; } //Save pointer to main module ID for pkg name. //Remove leading dot in main, so main paths are normalized, //and remove any trailing .js, since different package //envs have different conventions: some use a module name, //some use a file name. config.pkgs[name] = pkgObj.name + '/' + (pkgObj.main || 'main') .replace(currDirRegExp, '') .replace(jsSuffixRegExp, ''); }); } //If there are any "waiting to execute" modules in the registry, //update the maps for them, since their info, like URLs to load, //may have changed. eachProp(registry, function (mod, id) { //If module already has init called, since it is too //late to modify them, and ignore unnormalized ones //since they are transient. if (!mod.inited && !mod.map.unnormalized) { mod.map = makeModuleMap(id); } }); //If a deps array or a config callback is specified, then call //require with those args. This is useful when require is defined as a //config object before require.js is loaded. if (cfg.deps || cfg.callback) { context.require(cfg.deps || [], cfg.callback); } }, makeShimExports: function (value) { function fn() { var ret; if (value.init) { ret = value.init.apply(global, arguments); } return ret || (value.exports && getGlobal(value.exports)); } return fn; }, makeRequire: function (relMap, options) { options = options || {}; function localRequire(deps, callback, errback) { var id, map, requireMod; if (options.enableBuildCallback && callback && isFunction(callback)) { callback.__requireJsBuild = true; } if (typeof deps === 'string') { if (isFunction(callback)) { //Invalid call return onError(makeError('requireargs', 'Invalid require call'), errback); } //If require|exports|module are requested, get the //value for them from the special handlers. Caveat: //this only works while module is being defined. if (relMap && hasProp(handlers, deps)) { return handlers[deps](registry[relMap.id]); } //Synchronous access to one module. If require.get is //available (as in the Node adapter), prefer that. if (req.get) { return req.get(context, deps, relMap, localRequire); } //Normalize module name, if it contains . or .. map = makeModuleMap(deps, relMap, false, true); id = map.id; if (!hasProp(defined, id)) { return onError(makeError('notloaded', 'Module name "' + id + '" has not been loaded yet for context: ' + contextName + (relMap ? '' : '. Use require([])'))); } return defined[id]; } //Grab defines waiting in the global queue. intakeDefines(); //Mark all the dependencies as needing to be loaded. context.nextTick(function () { //Some defines could have been added since the //require call, collect them.<|fim▁hole|> intakeDefines(); requireMod = getModule(makeModuleMap(null, relMap)); //Store if map config should be applied to this require //call for dependencies. requireMod.skipMap = options.skipMap; requireMod.init(deps, callback, errback, { enabled: true }); checkLoaded(); }); return localRequire; } mixin(localRequire, { isBrowser: isBrowser, /** * Converts a module name + .extension into an URL path. * *Requires* the use of a module name. It does not support using * plain URLs like nameToUrl. */ toUrl: function (moduleNamePlusExt) { var ext, index = moduleNamePlusExt.lastIndexOf('.'), segment = moduleNamePlusExt.split('/')[0], isRelative = segment === '.' || segment === '..'; //Have a file extension alias, and it is not the //dots from a relative path. if (index !== -1 && (!isRelative || index > 1)) { ext = moduleNamePlusExt.substring(index, moduleNamePlusExt.length); moduleNamePlusExt = moduleNamePlusExt.substring(0, index); } return context.nameToUrl(normalize(moduleNamePlusExt, relMap && relMap.id, true), ext, true); }, defined: function (id) { return hasProp(defined, makeModuleMap(id, relMap, false, true).id); }, specified: function (id) { id = makeModuleMap(id, relMap, false, true).id; return hasProp(defined, id) || hasProp(registry, id); } }); //Only allow undef on top level require calls if (!relMap) { localRequire.undef = function (id) { //Bind any waiting define() calls to this context, //fix for #408 takeGlobalQueue(); var map = makeModuleMap(id, relMap, true), mod = getOwn(registry, id); removeScript(id); delete defined[id]; delete urlFetched[map.url]; delete undefEvents[id]; //Clean queued defines too. Go backwards //in array so that the splices do not //mess up the iteration. eachReverse(defQueue, function (args, i) { if (args[0] === id) { defQueue.splice(i, 1); } }); if (mod) { //Hold on to listeners in case the //module will be attempted to be reloaded //using a different config. if (mod.events.defined) { undefEvents[id] = mod.events; } cleanRegistry(id); } }; } return localRequire; }, /** * Called to enable a module if it is still in the registry * awaiting enablement. A second arg, parent, the parent module, * is passed in for context, when this method is overridden by * the optimizer. Not shown here to keep code compact. */ enable: function (depMap) { var mod = getOwn(registry, depMap.id); if (mod) { getModule(depMap).enable(); } }, /** * Internal method used by environment adapters to complete a load event. * A load event could be a script load or just a load pass from a synchronous * load call. * @param {String} moduleName the name of the module to potentially complete. */ completeLoad: function (moduleName) { var found, args, mod, shim = getOwn(config.shim, moduleName) || {}, shExports = shim.exports; takeGlobalQueue(); while (defQueue.length) { args = defQueue.shift(); if (args[0] === null) { args[0] = moduleName; //If already found an anonymous module and bound it //to this name, then this is some other anon module //waiting for its completeLoad to fire. if (found) { break; } found = true; } else if (args[0] === moduleName) { //Found matching define call for this script! found = true; } callGetModule(args); } //Do this after the cycle of callGetModule in case the result //of those calls/init calls changes the registry. mod = getOwn(registry, moduleName); if (!found && !hasProp(defined, moduleName) && mod && !mod.inited) { if (config.enforceDefine && (!shExports || !getGlobal(shExports))) { if (hasPathFallback(moduleName)) { return; } else { return onError(makeError('nodefine', 'No define call for ' + moduleName, null, [moduleName])); } } else { //A script that does not call define(), so just simulate //the call for it. callGetModule([moduleName, (shim.deps || []), shim.exportsFn]); } } checkLoaded(); }, /** * Converts a module name to a file path. Supports cases where * moduleName may actually be just an URL. * Note that it **does not** call normalize on the moduleName, * it is assumed to have already been normalized. This is an * internal API, not a public one. Use toUrl for the public API. */ nameToUrl: function (moduleName, ext, skipExt) { var paths, syms, i, parentModule, url, parentPath, bundleId, pkgMain = getOwn(config.pkgs, moduleName); if (pkgMain) { moduleName = pkgMain; } bundleId = getOwn(bundlesMap, moduleName); if (bundleId) { return context.nameToUrl(bundleId, ext, skipExt); } //If a colon is in the URL, it indicates a protocol is used and it is just //an URL to a file, or if it starts with a slash, contains a query arg (i.e. ?) //or ends with .js, then assume the user meant to use an url and not a module id. //The slash is important for protocol-less URLs as well as full paths. if (req.jsExtRegExp.test(moduleName)) { //Just a plain path, not module name lookup, so just return it. //Add extension if it is included. This is a bit wonky, only non-.js things pass //an extension, this method probably needs to be reworked. url = moduleName + (ext || ''); } else { //A module that needs to be converted to a path. paths = config.paths; syms = moduleName.split('/'); //For each module name segment, see if there is a path //registered for it. Start with most specific name //and work up from it. for (i = syms.length; i > 0; i -= 1) { parentModule = syms.slice(0, i).join('/'); parentPath = getOwn(paths, parentModule); if (parentPath) { //If an array, it means there are a few choices, //Choose the one that is desired if (isArray(parentPath)) { parentPath = parentPath[0]; } syms.splice(0, i, parentPath); break; } } //Join the path parts together, then figure out if baseUrl is needed. url = syms.join('/'); url += (ext || (/^data\:|\?/.test(url) || skipExt ? '' : '.js')); url = (url.charAt(0) === '/' || url.match(/^[\w\+\.\-]+:/) ? '' : config.baseUrl) + url; } return config.urlArgs ? url + ((url.indexOf('?') === -1 ? '?' : '&') + config.urlArgs) : url; }, //Delegates to req.load. Broken out as a separate function to //allow overriding in the optimizer. load: function (id, url) { req.load(context, id, url); }, /** * Executes a module callback function. Broken out as a separate function * solely to allow the build system to sequence the files in the built * layer in the right sequence. * * @private */ execCb: function (name, callback, args, exports) { return callback.apply(exports, args); }, /** * callback for script loads, used to check status of loading. * * @param {Event} evt the event from the browser for the script * that was loaded. */ onScriptLoad: function (evt) { //Using currentTarget instead of target for Firefox 2.0's sake. Not //all old browsers will be supported, but this one was easy enough //to support and still makes sense. if (evt.type === 'load' || (readyRegExp.test((evt.currentTarget || evt.srcElement).readyState))) { //Reset interactive script so a script node is not held onto for //to long. interactiveScript = null; //Pull out the name of the module and the context. var data = getScriptData(evt); context.completeLoad(data.id); } }, /** * Callback for script errors. */ onScriptError: function (evt) { var data = getScriptData(evt); if (!hasPathFallback(data.id)) { return onError(makeError('scripterror', 'Script error for: ' + data.id, evt, [data.id])); } } }; context.require = context.makeRequire(); return context; } /** * Main entry point. * * If the only argument to require is a string, then the module that * is represented by that string is fetched for the appropriate context. * * If the first argument is an array, then it will be treated as an array * of dependency string names to fetch. An optional function callback can * be specified to execute when all of those dependencies are available. * * Make a local req variable to help Caja compliance (it assumes things * on a require that are not standardized), and to give a short * name for minification/local scope use. */ req = requirejs = function (deps, callback, errback, optional) { //Find the right context, use default var context, config, contextName = defContextName; // Determine if have config object in the call. if (!isArray(deps) && typeof deps !== 'string') { // deps is a config object config = deps; if (isArray(callback)) { // Adjust args if there are dependencies deps = callback; callback = errback; errback = optional; } else { deps = []; } } if (config && config.context) { contextName = config.context; } context = getOwn(contexts, contextName); if (!context) { context = contexts[contextName] = req.s.newContext(contextName); } if (config) { context.configure(config); } return context.require(deps, callback, errback); }; /** * Support require.config() to make it easier to cooperate with other * AMD loaders on globally agreed names. */ req.config = function (config) { return req(config); }; /** * Execute something after the current tick * of the event loop. Override for other envs * that have a better solution than setTimeout. * @param {Function} fn function to execute later. */ req.nextTick = typeof setTimeout !== 'undefined' ? function (fn) { setTimeout(fn, 4); } : function (fn) { fn(); }; /** * Export require as a global, but only if it does not already exist. */ if (!require) { require = req; } req.version = version; //Used to filter out dependencies that are already paths. req.jsExtRegExp = /^\/|:|\?|\.js$/; req.isBrowser = isBrowser; s = req.s = { contexts: contexts, newContext: newContext }; //Create default context. req({}); //Exports some context-sensitive methods on global require. each([ 'toUrl', 'undef', 'defined', 'specified' ], function (prop) { //Reference from contexts instead of early binding to default context, //so that during builds, the latest instance of the default context //with its config gets used. req[prop] = function () { var ctx = contexts[defContextName]; return ctx.require[prop].apply(ctx, arguments); }; }); if (isBrowser) { head = s.head = document.getElementsByTagName('head')[0]; //If BASE tag is in play, using appendChild is a problem for IE6. //When that browser dies, this can be removed. Details in this jQuery bug: //http://dev.jquery.com/ticket/2709 baseElement = document.getElementsByTagName('base')[0]; if (baseElement) { head = s.head = baseElement.parentNode; } } /** * Any errors that require explicitly generates will be passed to this * function. Intercept/override it if you want custom error handling. * @param {Error} err the error object. */ req.onError = defaultOnError; /** * Creates the node for the load command. Only used in browser envs. */ req.createNode = function (config, moduleName, url) { var node = config.xhtml ? document.createElementNS('http://www.w3.org/1999/xhtml', 'html:script') : document.createElement('script'); node.type = config.scriptType || 'text/javascript'; node.charset = 'utf-8'; node.async = true; return node; }; /** * Does the request to load a module for the browser case. * Make this a separate function to allow other environments * to override it. * * @param {Object} context the require context to find state. * @param {String} moduleName the name of the module. * @param {Object} url the URL to the module. */ req.load = function (context, moduleName, url) { var config = (context && context.config) || {}, node; if (isBrowser) { //In the browser so use a script tag node = req.createNode(config, moduleName, url); node.setAttribute('data-requirecontext', context.contextName); node.setAttribute('data-requiremodule', moduleName); //Set up load listener. Test attachEvent first because IE9 has //a subtle issue in its addEventListener and script onload firings //that do not match the behavior of all other browsers with //addEventListener support, which fire the onload event for a //script right after the script execution. See: //https://connect.microsoft.com/IE/feedback/details/648057/script-onload-event-is-not-fired-immediately-after-script-execution //UNFORTUNATELY Opera implements attachEvent but does not follow the script //script execution mode. if (node.attachEvent && //Check if node.attachEvent is artificially added by custom script or //natively supported by browser //read https://github.com/jrburke/requirejs/issues/187 //if we can NOT find [native code] then it must NOT natively supported. //in IE8, node.attachEvent does not have toString() //Note the test for "[native code" with no closing brace, see: //https://github.com/jrburke/requirejs/issues/273 !(node.attachEvent.toString && node.attachEvent.toString().indexOf('[native code') < 0) && !isOpera) { //Probably IE. IE (at least 6-8) do not fire //script onload right after executing the script, so //we cannot tie the anonymous define call to a name. //However, IE reports the script as being in 'interactive' //readyState at the time of the define call. useInteractive = true; node.attachEvent('onreadystatechange', context.onScriptLoad); //It would be great to add an error handler here to catch //404s in IE9+. However, onreadystatechange will fire before //the error handler, so that does not help. If addEventListener //is used, then IE will fire error before load, but we cannot //use that pathway given the connect.microsoft.com issue //mentioned above about not doing the 'script execute, //then fire the script load event listener before execute //next script' that other browsers do. //Best hope: IE10 fixes the issues, //and then destroys all installs of IE 6-9. //node.attachEvent('onerror', context.onScriptError); } else { node.addEventListener('load', context.onScriptLoad, false); node.addEventListener('error', context.onScriptError, false); } node.src = url; //For some cache cases in IE 6-8, the script executes before the end //of the appendChild execution, so to tie an anonymous define //call to the module name (which is stored on the node), hold on //to a reference to this node, but clear after the DOM insertion. currentlyAddingScript = node; if (baseElement) { head.insertBefore(node, baseElement); } else { head.appendChild(node); } currentlyAddingScript = null; return node; } else if (isWebWorker) { try { //In a web worker, use importScripts. This is not a very //efficient use of importScripts, importScripts will block until //its script is downloaded and evaluated. However, if web workers //are in play, the expectation that a build has been done so that //only one script needs to be loaded anyway. This may need to be //reevaluated if other use cases become common. importScripts(url); //Account for anonymous modules context.completeLoad(moduleName); } catch (e) { context.onError(makeError('importscripts', 'importScripts failed for ' + moduleName + ' at ' + url, e, [moduleName])); } } }; function getInteractiveScript() { if (interactiveScript && interactiveScript.readyState === 'interactive') { return interactiveScript; } eachReverse(scripts(), function (script) { if (script.readyState === 'interactive') { return (interactiveScript = script); } }); return interactiveScript; } //Look for a data-main script attribute, which could also adjust the baseUrl. if (isBrowser && !cfg.skipDataMain) { //Figure out baseUrl. Get it from the script tag with require.js in it. eachReverse(scripts(), function (script) { //Set the 'head' where we can append children by //using the script's parent. if (!head) { head = script.parentNode; } //Look for a data-main attribute to set main script for the page //to load. If it is there, the path to data main becomes the //baseUrl, if it is not already set. dataMain = script.getAttribute('data-main'); if (dataMain) { //Preserve dataMain in case it is a path (i.e. contains '?') mainScript = dataMain; //Set final baseUrl if there is not already an explicit one. if (!cfg.baseUrl) { //Pull off the directory of data-main for use as the //baseUrl. src = mainScript.split('/'); mainScript = src.pop(); subPath = src.length ? src.join('/') + '/' : './'; cfg.baseUrl = subPath; } //Strip off any trailing .js since mainScript is now //like a module name. mainScript = mainScript.replace(jsSuffixRegExp, ''); //If mainScript is still a path, fall back to dataMain if (req.jsExtRegExp.test(mainScript)) { mainScript = dataMain; } //Put the data-main script in the files to load. cfg.deps = cfg.deps ? cfg.deps.concat(mainScript) : [mainScript]; return true; } }); } /** * The function that handles definitions of modules. Differs from * require() in that a string for the module should be the first argument, * and the function to execute after dependencies are loaded should * return a value to define the module corresponding to the first argument's * name. */ define = function (name, deps, callback) { var node, context; //Allow for anonymous modules if (typeof name !== 'string') { //Adjust args appropriately callback = deps; deps = name; name = null; } //This module may not have dependencies if (!isArray(deps)) { callback = deps; deps = null; } //If no name, and callback is a function, then figure out if it a //CommonJS thing with dependencies. if (!deps && isFunction(callback)) { deps = []; //Remove comments from the callback string, //look for require calls, and pull them into the dependencies, //but only if there are function args. if (callback.length) { callback .toString() .replace(commentRegExp, '') .replace(cjsRequireRegExp, function (match, dep) { deps.push(dep); }); //May be a CommonJS thing even without require calls, but still //could use exports, and module. Avoid doing exports and module //work though if it just needs require. //REQUIRES the function to expect the CommonJS variables in the //order listed below. deps = (callback.length === 1 ? ['require'] : ['require', 'exports', 'module']).concat(deps); } } //If in IE 6-8 and hit an anonymous define() call, do the interactive //work. if (useInteractive) { node = currentlyAddingScript || getInteractiveScript(); if (node) { if (!name) { name = node.getAttribute('data-requiremodule'); } context = contexts[node.getAttribute('data-requirecontext')]; } } //Always save off evaluating the def call until the script onload handler. //This allows multiple modules to be in a file without prematurely //tracing dependencies, and allows for anonymous module support, //where the module name is not known until the script onload event //occurs. If no context, use the global queue, and get it processed //in the onscript load callback. (context ? context.defQueue : globalDefQueue).push([name, deps, callback]); }; define.amd = { jQuery: true }; /** * Executes the text. Normally just uses eval, but can be modified * to use a better, environment-specific call. Only used for transpiling * loader plugins, not for plain JS modules. * @param {String} text the text to execute/evaluate. */ req.exec = function (text) { /*jslint evil: true */ return eval(text); }; //Set up with config info. req(cfg); }(this));<|fim▁end|>
<|file_name|>skkserv_test.go<|end_file_name|><|fim▁begin|>package skkserv import ( "bytes" "net" "testing" "time" ) func createSkkserv(t *testing.T, readable string) (*Skkserv, MockConn) { var config Config config.Engines = append([]Engine{}, TestDb) var mock MockConn mock.t = t e, _ := StringToEuc(readable) mock.readable = bytes.NewBuffer(e) mock.written = bytes.NewBuffer(nil) mock.closed = new(bool) *mock.closed = false return New(config), mock } type MockAddr struct { } func (a MockAddr) Network() string { return "127.0.0.1:0" } func (a MockAddr) String() string { return "127.0.0.1:0" } type MockConn struct { t *testing.T readable *bytes.Buffer written *bytes.Buffer closed *bool } func (c MockConn) Read(b []byte) (n int, err error) { return c.readable.Read(b) } func (c MockConn) Write(b []byte) (n int, err error) { return c.written.Write(b) } func (c MockConn) Close() error { *c.closed = true return nil } func (c MockConn) LocalAddr() net.Addr { var m MockAddr return m } func (c MockConn) RemoteAddr() net.Addr { var m MockAddr return m } func (c MockConn) SetDeadline(t time.Time) error { c.t.Fatalf("SetDeadLine is not supported") return nil } func (c MockConn) SetReadDeadline(t time.Time) error { c.t.Fatalf("SetReadDeadLine is not supported") return nil } func (c MockConn) SetWriteDeadline(t time.Time) error { c.t.Fatalf("SetWriteDeadLine is not supported") return nil } func TestHandleRequestEnd(t *testing.T) { serv, conn := createSkkserv(t, "0\n") serv.HandleRequest(conn) if !*conn.closed { t.Error("0 does not close connection") } if conn.written.Len() != 0 { t.Errorf("connection get unexpected data %v", conn.written.Bytes()) } } func TestHandleRequestVersionThenEnd(t *testing.T) { serv, conn := createSkkserv(t, "2\n0\n") serv.HandleRequest(conn) if !*conn.closed { t.Error("0 does not close connection") } written := EucToString(conn.written.Bytes()) expected := "mskkserv " + VERSION + " \n" if written != expected { t.Errorf("Version returned unexpected string %v (expected: %v)", written, expected) } } func TestHandleRequestVersionNoLF(t *testing.T) { serv, conn := createSkkserv(t, "2") serv.HandleRequest(conn) written := EucToString(conn.written.Bytes()) expected := "mskkserv " + VERSION + " \n" if written != expected { t.Errorf("Version returned unexpected string %v (expected: %v)", written, expected) } } func TestHandleRequestVersionTwiceThenEnd(t *testing.T) {<|fim▁hole|> serv, conn := createSkkserv(t, "2\n2\n0\n") serv.HandleRequest(conn) if !*conn.closed { t.Error("0 does not close connection") } written := EucToString(conn.written.Bytes()) expected := "mskkserv " + VERSION + " \n" if written != expected+expected { t.Errorf("Version returned unexpected string %v", written) } } func TestHandleRequestRequestFailEnd(t *testing.T) { serv, conn := createSkkserv(t, "1みちご \n0\n") serv.HandleRequest(conn) if !*conn.closed { t.Error("0 does not close connection") } written := EucToString(conn.written.Bytes()) expected := "4みちご \n" if written != expected { t.Errorf("Version returned unexpected string %v (expected: %v)", written, expected) } } func TestHandleRequestRequestSuccessEnd(t *testing.T) { serv, conn := createSkkserv(t, "1わりもどs \n0\n") serv.HandleRequest(conn) if !*conn.closed { t.Error("0 does not close connection") } written := EucToString(conn.written.Bytes()) expected := "1/割り戻/割戻/\n" if written != expected { t.Errorf("Version returned unexpected string %v (expected: %v)", written, expected) } } func TestHandleRequestRequestNoLFSuccessEnd(t *testing.T) { serv, conn := createSkkserv(t, "1わりもどs ") serv.HandleRequest(conn) if !*conn.closed { t.Error("0 does not close connection") } written := EucToString(conn.written.Bytes()) expected := "1/割り戻/割戻/\n" if written != expected { t.Errorf("Version returned unexpected string %v (expected: %v)", written, expected) } }<|fim▁end|>
<|file_name|>server.go<|end_file_name|><|fim▁begin|>package command import ( "encoding/hex" "fmt" "log" "net" "net/http" "net/url" "os" "os/signal" "runtime" "sort" "strconv" "strings" "syscall" "time" "github.com/armon/go-metrics" "github.com/hashicorp/errwrap" "github.com/hashicorp/go-multierror" "github.com/hashicorp/logutils" "github.com/hashicorp/vault/audit" "github.com/hashicorp/vault/command/server" "github.com/hashicorp/vault/helper/flag-slice" "github.com/hashicorp/vault/helper/gated-writer" "github.com/hashicorp/vault/helper/mlock" vaulthttp "github.com/hashicorp/vault/http" "github.com/hashicorp/vault/logical" "github.com/hashicorp/vault/meta" "github.com/hashicorp/vault/physical" "github.com/hashicorp/vault/vault" "github.com/hashicorp/vault/version" ) // ServerCommand is a Command that starts the Vault server. type ServerCommand struct { AuditBackends map[string]audit.Factory CredentialBackends map[string]logical.Factory LogicalBackends map[string]logical.Factory ShutdownCh chan struct{} SighupCh chan struct{} meta.Meta ReloadFuncs map[string][]server.ReloadFunc } func (c *ServerCommand) Run(args []string) int { var dev, verifyOnly bool var configPath []string var logLevel, devRootTokenID, devListenAddress string flags := c.Meta.FlagSet("server", meta.FlagSetDefault) flags.BoolVar(&dev, "dev", false, "") flags.StringVar(&devRootTokenID, "dev-root-token-id", "", "") flags.StringVar(&devListenAddress, "dev-listen-address", "", "") flags.StringVar(&logLevel, "log-level", "info", "") flags.BoolVar(&verifyOnly, "verify-only", false, "") flags.Usage = func() { c.Ui.Error(c.Help()) } flags.Var((*sliceflag.StringFlag)(&configPath), "config", "config") if err := flags.Parse(args); err != nil { return 1 } if os.Getenv("VAULT_DEV_ROOT_TOKEN_ID") != "" { devRootTokenID = os.Getenv("VAULT_DEV_ROOT_TOKEN_ID") } if os.Getenv("VAULT_DEV_LISTEN_ADDRESS") != "" { devListenAddress = os.Getenv("VAULT_DEV_LISTEN_ADDRESS") } // Validation if !dev { switch { case len(configPath) == 0: c.Ui.Error("At least one config path must be specified with -config") flags.Usage() return 1 case devRootTokenID != "": c.Ui.Error("Root token ID can only be specified with -dev") flags.Usage() return 1 case devListenAddress != "": c.Ui.Error("Development address can only be specified with -dev") flags.Usage() return 1 } } // Load the configuration var config *server.Config if dev { config = server.DevConfig() if devListenAddress != "" { config.Listeners[0].Config["address"] = devListenAddress } } for _, path := range configPath { current, err := server.LoadConfig(path) if err != nil { c.Ui.Error(fmt.Sprintf( "Error loading configuration from %s: %s", path, err)) return 1 } if config == nil { config = current } else { config = config.Merge(current) } } // Ensure at least one config was found. if config == nil { c.Ui.Error("No configuration files found.") return 1 } // Ensure that a backend is provided if config.Backend == nil { c.Ui.Error("A physical backend must be specified") return 1 } // If mlockall(2) isn't supported, show a warning. We disable this // in dev because it is quite scary to see when first using Vault. if !dev && !mlock.Supported() { c.Ui.Output("==> WARNING: mlock not supported on this system!\n") c.Ui.Output(" An `mlockall(2)`-like syscall to prevent memory from being") c.Ui.Output(" swapped to disk is not supported on this system. Running") c.Ui.Output(" Vault on an mlockall(2) enabled system is much more secure.\n") } // Create a logger. We wrap it in a gated writer so that it doesn't // start logging too early. logGate := &gatedwriter.Writer{Writer: os.Stderr} logger := log.New(&logutils.LevelFilter{ Levels: []logutils.LogLevel{ "TRACE", "DEBUG", "INFO", "WARN", "ERR"}, MinLevel: logutils.LogLevel(strings.ToUpper(logLevel)), Writer: logGate, }, "", log.LstdFlags) if err := c.setupTelemetry(config); err != nil { c.Ui.Error(fmt.Sprintf("Error initializing telemetry: %s", err)) return 1 } // Initialize the backend backend, err := physical.NewBackend( config.Backend.Type, logger, config.Backend.Config) if err != nil { c.Ui.Error(fmt.Sprintf( "Error initializing backend of type %s: %s", config.Backend.Type, err)) return 1 } infoKeys := make([]string, 0, 10) info := make(map[string]string) var seal vault.Seal = &vault.DefaultSeal{} // Ensure that the seal finalizer is called, even if using verify-only defer func() { err = seal.Finalize() if err != nil { c.Ui.Error(fmt.Sprintf("Error finalizing seals: %v", err)) } }() coreConfig := &vault.CoreConfig{ Physical: backend, AdvertiseAddr: config.Backend.AdvertiseAddr, HAPhysical: nil, Seal: seal, AuditBackends: c.AuditBackends, CredentialBackends: c.CredentialBackends, LogicalBackends: c.LogicalBackends, Logger: logger, DisableCache: config.DisableCache, DisableMlock: config.DisableMlock, MaxLeaseTTL: config.MaxLeaseTTL, DefaultLeaseTTL: config.DefaultLeaseTTL, } // Initialize the separate HA physical backend, if it exists var ok bool if config.HABackend != nil { habackend, err := physical.NewBackend( config.HABackend.Type, logger, config.HABackend.Config) if err != nil { c.Ui.Error(fmt.Sprintf( "Error initializing backend of type %s: %s", config.HABackend.Type, err)) return 1 } if coreConfig.HAPhysical, ok = habackend.(physical.HABackend); !ok { c.Ui.Error("Specified HA backend does not support HA") return 1 } coreConfig.AdvertiseAddr = config.HABackend.AdvertiseAddr } else { if coreConfig.HAPhysical, ok = backend.(physical.HABackend); ok { coreConfig.AdvertiseAddr = config.Backend.AdvertiseAddr } } if envAA := os.Getenv("VAULT_ADVERTISE_ADDR"); envAA != "" { coreConfig.AdvertiseAddr = envAA } // Attempt to detect the advertise address, if possible var detect physical.AdvertiseDetect if coreConfig.HAPhysical != nil { detect, ok = coreConfig.HAPhysical.(physical.AdvertiseDetect) } else { detect, ok = coreConfig.Physical.(physical.AdvertiseDetect) } if ok && coreConfig.AdvertiseAddr == "" { advertise, err := c.detectAdvertise(detect, config) if err != nil { c.Ui.Error(fmt.Sprintf("Error detecting advertise address: %s", err)) } else if advertise == "" { c.Ui.Error("Failed to detect advertise address.") } else { coreConfig.AdvertiseAddr = advertise } } // Initialize the core core, newCoreError := vault.NewCore(coreConfig) if newCoreError != nil { if !errwrap.ContainsType(newCoreError, new(vault.NonFatalError)) { c.Ui.Error(fmt.Sprintf("Error initializing core: %s", newCoreError)) return 1 } } // If we're in dev mode, then initialize the core if dev { init, err := c.enableDev(core, devRootTokenID) if err != nil { c.Ui.Error(fmt.Sprintf( "Error initializing dev mode: %s", err)) return 1 } export := "export" quote := "'" if runtime.GOOS == "windows" { export = "set" quote = "" } c.Ui.Output(fmt.Sprintf( "==> WARNING: Dev mode is enabled!\n\n"+ "In this mode, Vault is completely in-memory and unsealed.\n"+ "Vault is configured to only have a single unseal key. The root\n"+ "token has already been authenticated with the CLI, so you can\n"+ "immediately begin using the Vault CLI.\n\n"+ "The only step you need to take is to set the following\n"+ "environment variables:\n\n"+ " "+export+" VAULT_ADDR="+quote+"http://"+config.Listeners[0].Config["address"]+quote+"\n\n"+ "The unseal key and root token are reproduced below in case you\n"+ "want to seal/unseal the Vault or play with authentication.\n\n"+ "Unseal Key: %s\nRoot Token: %s\n", hex.EncodeToString(init.SecretShares[0]), init.RootToken, )) } // Compile server information for output later info["backend"] = config.Backend.Type info["log level"] = logLevel info["mlock"] = fmt.Sprintf( "supported: %v, enabled: %v", mlock.Supported(), !config.DisableMlock) infoKeys = append(infoKeys, "log level", "mlock", "backend") if config.HABackend != nil { info["HA backend"] = config.HABackend.Type info["advertise address"] = coreConfig.AdvertiseAddr infoKeys = append(infoKeys, "HA backend", "advertise address") } else { // If the backend supports HA, then note it if coreConfig.HAPhysical != nil { info["backend"] += " (HA available)" info["advertise address"] = coreConfig.AdvertiseAddr infoKeys = append(infoKeys, "advertise address") } } // If the backend supports service discovery, run service discovery if coreConfig.HAPhysical != nil { sd, ok := coreConfig.HAPhysical.(physical.ServiceDiscovery) if ok { if err := sd.RunServiceDiscovery(c.ShutdownCh, coreConfig.AdvertiseAddr); err != nil { c.Ui.Error(fmt.Sprintf("Error initializing service discovery: %v", err)) return 1 } } } // Initialize the listeners lns := make([]net.Listener, 0, len(config.Listeners)) for i, lnConfig := range config.Listeners { ln, props, reloadFunc, err := server.NewListener(lnConfig.Type, lnConfig.Config) if err != nil { c.Ui.Error(fmt.Sprintf( "Error initializing listener of type %s: %s", lnConfig.Type, err)) return 1 } // Store the listener props for output later key := fmt.Sprintf("listener %d", i+1) propsList := make([]string, 0, len(props)) for k, v := range props { propsList = append(propsList, fmt.Sprintf( "%s: %q", k, v)) } sort.Strings(propsList) infoKeys = append(infoKeys, key) info[key] = fmt.Sprintf( "%s (%s)", lnConfig.Type, strings.Join(propsList, ", ")) lns = append(lns, ln) if reloadFunc != nil { relSlice := c.ReloadFuncs["listener|"+lnConfig.Type] relSlice = append(relSlice, reloadFunc) c.ReloadFuncs["listener|"+lnConfig.Type] = relSlice } } infoKeys = append(infoKeys, "version") info["version"] = version.GetVersion().String() // Server configuration output padding := 24 sort.Strings(infoKeys) c.Ui.Output("==> Vault server configuration:\n") for _, k := range infoKeys { c.Ui.Output(fmt.Sprintf( "%s%s: %s", strings.Repeat(" ", padding-len(k)), strings.Title(k), info[k])) } c.Ui.Output("") if verifyOnly { for _, listener := range lns { listener.Close() } return 0 } // Initialize the HTTP server server := &http.Server{} server.Handler = vaulthttp.Handler(core) for _, ln := range lns { go server.Serve(ln) } if newCoreError != nil { c.Ui.Output("==> Warning:\n\nNon-fatal error during initialization; check the logs for more information.") c.Ui.Output("") } // Output the header that the server has started c.Ui.Output("==> Vault server started! Log data will stream in below:\n") // Release the log gate. logGate.Flush() // Wait for shutdown shutdownTriggered := false for !shutdownTriggered { select { case <-c.ShutdownCh: c.Ui.Output("==> Vault shutdown triggered") if err := core.Shutdown(); err != nil { c.Ui.Error(fmt.Sprintf("Error with core shutdown: %s", err)) } shutdownTriggered = true case <-c.SighupCh: c.Ui.Output("==> Vault reload triggered") if err := c.Reload(configPath); err != nil { c.Ui.Error(fmt.Sprintf("Error(s) were encountered during reload: %s", err)) } } } for _, listener := range lns { listener.Close() } return 0 } func (c *ServerCommand) enableDev(core *vault.Core, rootTokenID string) (*vault.InitResult, error) { // Initialize it with a basic single key init, err := core.Initialize(&vault.SealConfig{ SecretShares: 1, SecretThreshold: 1, }, nil) if err != nil { return nil, err } // Copy the key so that it can be zeroed key := make([]byte, len(init.SecretShares[0])) copy(key, init.SecretShares[0]) // Unseal the core unsealed, err := core.Unseal(key) if err != nil { return nil, err } if !unsealed { return nil, fmt.Errorf("failed to unseal Vault for dev mode") } if rootTokenID != "" { req := &logical.Request{ Operation: logical.UpdateOperation, ClientToken: init.RootToken, Path: "auth/token/create", Data: map[string]interface{}{ "id": rootTokenID, "policies": []string{"root"}, "no_parent": true, "no_default_policy": true, }, } resp, err := core.HandleRequest(req) if err != nil { return nil, fmt.Errorf("failed to create root token with ID %s: %s", rootTokenID, err) } if resp == nil { return nil, fmt.Errorf("nil response when creating root token with ID %s", rootTokenID) } if resp.Auth == nil { return nil, fmt.Errorf("nil auth when creating root token with ID %s", rootTokenID) } init.RootToken = resp.Auth.ClientToken req.Path = "auth/token/revoke-self" req.Data = nil resp, err = core.HandleRequest(req) if err != nil { return nil, fmt.Errorf("failed to revoke initial root token: %s", err) } } // Set the token tokenHelper, err := c.TokenHelper() if err != nil { return nil, err } if err := tokenHelper.Store(init.RootToken); err != nil { return nil, err } return init, nil } // detectAdvertise is used to attempt advertise address detection func (c *ServerCommand) detectAdvertise(detect physical.AdvertiseDetect, config *server.Config) (string, error) { // Get the hostname host, err := detect.DetectHostAddr() if err != nil { return "", err } // set [] for ipv6 addresses if strings.Contains(host, ":") && !strings.Contains(host, "]") { host = "[" + host + "]" } // Default the port and scheme scheme := "https" port := 8200 // Attempt to detect overrides for _, list := range config.Listeners { // Only attempt TCP if list.Type != "tcp" { continue } // Check if TLS is disabled if val, ok := list.Config["tls_disable"]; ok { disable, err := strconv.ParseBool(val) if err != nil { return "", fmt.Errorf("tls_disable: %s", err) }<|fim▁hole|> } } // Check for address override addr, ok := list.Config["address"] if !ok { addr = "127.0.0.1:8200" } // Check for localhost hostStr, portStr, err := net.SplitHostPort(addr) if err != nil { continue } if hostStr == "127.0.0.1" { host = hostStr } // Check for custom port listPort, err := strconv.Atoi(portStr) if err != nil { continue } port = listPort } // Build a URL url := &url.URL{ Scheme: scheme, Host: fmt.Sprintf("%s:%d", host, port), } // Return the URL string return url.String(), nil } // setupTelemetry is used to setup the telemetry sub-systems func (c *ServerCommand) setupTelemetry(config *server.Config) error { /* Setup telemetry Aggregate on 10 second intervals for 1 minute. Expose the metrics over stderr when there is a SIGUSR1 received. */ inm := metrics.NewInmemSink(10*time.Second, time.Minute) metrics.DefaultInmemSignal(inm) var telConfig *server.Telemetry if config.Telemetry == nil { telConfig = &server.Telemetry{} } else { telConfig = config.Telemetry } metricsConf := metrics.DefaultConfig("vault") metricsConf.EnableHostname = !telConfig.DisableHostname // Configure the statsite sink var fanout metrics.FanoutSink if telConfig.StatsiteAddr != "" { sink, err := metrics.NewStatsiteSink(telConfig.StatsiteAddr) if err != nil { return err } fanout = append(fanout, sink) } // Configure the statsd sink if telConfig.StatsdAddr != "" { sink, err := metrics.NewStatsdSink(telConfig.StatsdAddr) if err != nil { return err } fanout = append(fanout, sink) } // Initialize the global sink if len(fanout) > 0 { fanout = append(fanout, inm) metrics.NewGlobal(metricsConf, fanout) } else { metricsConf.EnableHostname = false metrics.NewGlobal(metricsConf, inm) } return nil } func (c *ServerCommand) Reload(configPath []string) error { // Read the new config var config *server.Config for _, path := range configPath { current, err := server.LoadConfig(path) if err != nil { retErr := fmt.Errorf("Error loading configuration from %s: %s", path, err) c.Ui.Error(retErr.Error()) return retErr } if config == nil { config = current } else { config = config.Merge(current) } } // Ensure at least one config was found. if config == nil { retErr := fmt.Errorf("No configuration files found") c.Ui.Error(retErr.Error()) return retErr } var reloadErrors *multierror.Error // Call reload on the listeners. This will call each listener with each // config block, but they verify the address. for _, lnConfig := range config.Listeners { for _, relFunc := range c.ReloadFuncs["listener|"+lnConfig.Type] { if err := relFunc(lnConfig.Config); err != nil { retErr := fmt.Errorf("Error encountered reloading configuration: %s", err) reloadErrors = multierror.Append(retErr) } } } return reloadErrors.ErrorOrNil() } func (c *ServerCommand) Synopsis() string { return "Start a Vault server" } func (c *ServerCommand) Help() string { helpText := ` Usage: vault server [options] Start a Vault server. This command starts a Vault server that responds to API requests. Vault will start in a "sealed" state. The Vault must be unsealed with "vault unseal" or the API before this server can respond to requests. This must be done for every server. If the server is being started against a storage backend that has brand new (no existing Vault data in it), it must be initialized with "vault init" or the API first. General Options: -config=<path> Path to the configuration file or directory. This can be specified multiple times. If it is a directory, all files with a ".hcl" or ".json" suffix will be loaded. -dev Enables Dev mode. In this mode, Vault is completely in-memory and unsealed. Do not run the Dev server in production! -dev-root-token-id="" If set, the root token returned in Dev mode will have the given ID. This *only* has an effect when running in Dev mode. Can also be specified with the VAULT_DEV_ROOT_TOKEN_ID environment variable. -dev-listen-address="" If set, this overrides the normal Dev mode listen address of "127.0.0.1:8200". Can also be specified with the VAULT_DEV_LISTEN_ADDRESS environment variable. -log-level=info Log verbosity. Defaults to "info", will be output to stderr. Supported values: "trace", "debug", "info", "warn", "err" ` return strings.TrimSpace(helpText) } // MakeShutdownCh returns a channel that can be used for shutdown // notifications for commands. This channel will send a message for every // SIGINT or SIGTERM received. func MakeShutdownCh() chan struct{} { resultCh := make(chan struct{}) shutdownCh := make(chan os.Signal, 4) signal.Notify(shutdownCh, os.Interrupt, syscall.SIGTERM) go func() { for { <-shutdownCh resultCh <- struct{}{} } }() return resultCh } // MakeSighupCh returns a channel that can be used for SIGHUP // reloading. This channel will send a message for every // SIGHUP received. func MakeSighupCh() chan struct{} { resultCh := make(chan struct{}) signalCh := make(chan os.Signal, 4) signal.Notify(signalCh, syscall.SIGHUP) go func() { for { <-signalCh resultCh <- struct{}{} } }() return resultCh }<|fim▁end|>
if disable { scheme = "http"
<|file_name|>run_type.rs<|end_file_name|><|fim▁begin|>//declare a struct, -> string name, enum type and order #[derive(Clone)] pub enum SqlType { PreDeployment, TableSpace, Schema, Table, Sproc, PostDeployment, } pub struct SqlRunType { pub sql_type: SqlType, pub folder_name: String, pub drop_procedure: String, } pub fn get_types() -> Vec<SqlRunType> { return vec![SqlRunType { sql_type: SqlType::PreDeployment, folder_name: "pre-deployment".to_string(), drop_procedure: "".to_string(), }, SqlRunType { sql_type: SqlType::TableSpace, folder_name: "table-space".to_string(),<|fim▁hole|> folder_name: "schema".to_string(), drop_procedure: "DROP SCHEMA ".to_string(), }, SqlRunType { sql_type: SqlType::Table, folder_name: "table".to_string(), drop_procedure: "DROP TABLE ".to_string(), }, SqlRunType { sql_type: SqlType::Sproc, folder_name: "sproc".to_string(), drop_procedure: "DROP FUNCTION ".to_string(), }, SqlRunType { sql_type: SqlType::PostDeployment, folder_name: "post-deployment".to_string(), drop_procedure: "".to_string(), }]; }<|fim▁end|>
drop_procedure: "DROP TABLESPACE ".to_string(), }, SqlRunType { sql_type: SqlType::Schema,
<|file_name|>soundcloud.module.ts<|end_file_name|><|fim▁begin|>import { NgModule, Provider } from '@angular/core'; import { NgxsModule } from '@ngxs/store'; import { AppSoundcloudService } from './soundcloud.service'; import { AppSoundcloudState } from './soundcloud.store'; import { AppSoundcloudApiService } from './soundcloud-api.service'; export const soundcloudStoreModuleProviders: Provider[] = [ AppSoundcloudService, AppSoundcloudApiService,<|fim▁hole|>]; @NgModule({ imports: [NgxsModule.forFeature([AppSoundcloudState])], }) export class AppSoundcloudStoreModule {}<|fim▁end|>
<|file_name|>webglrenderingcontext.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use canvas_traits::{CanvasCommonMsg, CanvasMsg, byte_swap}; use core::nonzero::NonZero; use dom::bindings::codegen::Bindings::WebGLRenderingContextBinding::{self, WebGLContextAttributes}; use dom::bindings::codegen::Bindings::WebGLRenderingContextBinding::WebGLRenderingContextConstants as constants; use dom::bindings::codegen::Bindings::WebGLRenderingContextBinding::WebGLRenderingContextMethods; use dom::bindings::codegen::UnionTypes::ImageDataOrHTMLImageElementOrHTMLCanvasElementOrHTMLVideoElement; use dom::bindings::conversions::{ArrayBufferViewContents, ConversionResult, FromJSValConvertible, ToJSValConvertible}; use dom::bindings::conversions::{array_buffer_to_vec, array_buffer_view_data, array_buffer_view_data_checked}; use dom::bindings::conversions::{array_buffer_view_to_vec, array_buffer_view_to_vec_checked}; use dom::bindings::error::{Error, Fallible}; use dom::bindings::inheritance::Castable; use dom::bindings::js::{JS, LayoutJS, MutNullableHeap, Root}; use dom::bindings::reflector::{Reflectable, Reflector, reflect_dom_object}; use dom::bindings::str::DOMString; use dom::event::{Event, EventBubbles, EventCancelable}; use dom::globalscope::GlobalScope; use dom::htmlcanvaselement::HTMLCanvasElement; use dom::htmlcanvaselement::utils as canvas_utils; use dom::node::{Node, NodeDamage, window_from_node}; use dom::webgl_validations::WebGLValidator; use dom::webgl_validations::tex_image_2d::{CommonTexImage2DValidator, CommonTexImage2DValidatorResult}; use dom::webgl_validations::tex_image_2d::{TexImage2DValidator, TexImage2DValidatorResult}; use dom::webgl_validations::types::{TexDataType, TexFormat, TexImageTarget}; use dom::webglactiveinfo::WebGLActiveInfo; use dom::webglbuffer::WebGLBuffer; use dom::webglcontextevent::WebGLContextEvent; use dom::webglframebuffer::WebGLFramebuffer; use dom::webglprogram::WebGLProgram; use dom::webglrenderbuffer::WebGLRenderbuffer; use dom::webglshader::WebGLShader; use dom::webgltexture::{TexParameterValue, WebGLTexture}; use dom::webgluniformlocation::WebGLUniformLocation; use euclid::size::Size2D; use ipc_channel::ipc::{self, IpcSender}; use js::conversions::ConversionBehavior; use js::jsapi::{JSContext, JSObject, JS_GetArrayBufferViewType, Type}; use js::jsval::{BooleanValue, DoubleValue, Int32Value, JSVal, NullValue, UndefinedValue}; use net_traits::image::base::PixelFormat; use net_traits::image_cache_thread::ImageResponse; use offscreen_gl_context::{GLContextAttributes, GLLimits}; use script_traits::ScriptMsg as ConstellationMsg; use std::cell::Cell; use webrender_traits::{WebGLCommand, WebGLError, WebGLFramebufferBindingRequest, WebGLParameter}; use webrender_traits::WebGLError::*; type ImagePixelResult = Result<(Vec<u8>, Size2D<i32>), ()>; pub const MAX_UNIFORM_AND_ATTRIBUTE_LEN: usize = 256; macro_rules! handle_potential_webgl_error { ($context:ident, $call:expr, $return_on_error:expr) => { match $call { Ok(ret) => ret, Err(error) => { $context.webgl_error(error); $return_on_error } } }; ($context:ident, $call:expr) => { handle_potential_webgl_error!($context, $call, ()); }; } // From the GLES 2.0.25 spec, page 85: // // "If a texture that is currently bound to one of the targets // TEXTURE_2D, or TEXTURE_CUBE_MAP is deleted, it is as though // BindTexture had been executed with the same target and texture // zero." // // and similar text occurs for other object types. macro_rules! handle_object_deletion { ($self_:expr, $binding:expr, $object:ident, $unbind_command:expr) => { if let Some(bound_object) = $binding.get() { if bound_object.id() == $object.id() { $binding.set(None); } if let Some(command) = $unbind_command { $self_.ipc_renderer .send(CanvasMsg::WebGL(command)) .unwrap(); } } }; } macro_rules! object_binding_to_js_or_null { ($cx: expr, $binding:expr) => { { rooted!(in($cx) let mut rval = NullValue()); if let Some(bound_object) = $binding.get() { unsafe { bound_object.to_jsval($cx, rval.handle_mut()); } } rval.get() } }; } /// Set of bitflags for texture unpacking (texImage2d, etc...) bitflags! { #[derive(HeapSizeOf, JSTraceable)] flags TextureUnpacking: u8 { const FLIP_Y_AXIS = 0x01, const PREMULTIPLY_ALPHA = 0x02, const CONVERT_COLORSPACE = 0x04, } } #[dom_struct] pub struct WebGLRenderingContext { reflector_: Reflector, #[ignore_heap_size_of = "Defined in ipc-channel"] ipc_renderer: IpcSender<CanvasMsg>, #[ignore_heap_size_of = "Defined in offscreen_gl_context"] limits: GLLimits, canvas: JS<HTMLCanvasElement>, #[ignore_heap_size_of = "Defined in webrender_traits"] last_error: Cell<Option<WebGLError>>, texture_unpacking_settings: Cell<TextureUnpacking>, bound_framebuffer: MutNullableHeap<JS<WebGLFramebuffer>>, bound_renderbuffer: MutNullableHeap<JS<WebGLRenderbuffer>>, bound_texture_2d: MutNullableHeap<JS<WebGLTexture>>, bound_texture_cube_map: MutNullableHeap<JS<WebGLTexture>>, bound_buffer_array: MutNullableHeap<JS<WebGLBuffer>>, bound_buffer_element_array: MutNullableHeap<JS<WebGLBuffer>>, current_program: MutNullableHeap<JS<WebGLProgram>>, #[ignore_heap_size_of = "Because it's small"]<|fim▁hole|> current_vertex_attrib_0: Cell<(f32, f32, f32, f32)>, } impl WebGLRenderingContext { fn new_inherited(global: &GlobalScope, canvas: &HTMLCanvasElement, size: Size2D<i32>, attrs: GLContextAttributes) -> Result<WebGLRenderingContext, String> { let (sender, receiver) = ipc::channel().unwrap(); let constellation_chan = global.constellation_chan(); constellation_chan.send(ConstellationMsg::CreateWebGLPaintThread(size, attrs, sender)) .unwrap(); let result = receiver.recv().unwrap(); result.map(|(ipc_renderer, context_limits)| { WebGLRenderingContext { reflector_: Reflector::new(), ipc_renderer: ipc_renderer, limits: context_limits, canvas: JS::from_ref(canvas), last_error: Cell::new(None), texture_unpacking_settings: Cell::new(CONVERT_COLORSPACE), bound_framebuffer: MutNullableHeap::new(None), bound_texture_2d: MutNullableHeap::new(None), bound_texture_cube_map: MutNullableHeap::new(None), bound_buffer_array: MutNullableHeap::new(None), bound_buffer_element_array: MutNullableHeap::new(None), bound_renderbuffer: MutNullableHeap::new(None), current_program: MutNullableHeap::new(None), current_vertex_attrib_0: Cell::new((0f32, 0f32, 0f32, 1f32)), } }) } #[allow(unrooted_must_root)] pub fn new(global: &GlobalScope, canvas: &HTMLCanvasElement, size: Size2D<i32>, attrs: GLContextAttributes) -> Option<Root<WebGLRenderingContext>> { match WebGLRenderingContext::new_inherited(global, canvas, size, attrs) { Ok(ctx) => Some(reflect_dom_object(box ctx, global, WebGLRenderingContextBinding::Wrap)), Err(msg) => { error!("Couldn't create WebGLRenderingContext: {}", msg); let event = WebGLContextEvent::new(global, atom!("webglcontextcreationerror"), EventBubbles::DoesNotBubble, EventCancelable::Cancelable, DOMString::from(msg)); event.upcast::<Event>().fire(canvas.upcast()); None } } } pub fn limits(&self) -> &GLLimits { &self.limits } pub fn bound_texture_for_target(&self, target: &TexImageTarget) -> Option<Root<WebGLTexture>> { match *target { TexImageTarget::Texture2D => self.bound_texture_2d.get(), TexImageTarget::CubeMapPositiveX | TexImageTarget::CubeMapNegativeX | TexImageTarget::CubeMapPositiveY | TexImageTarget::CubeMapNegativeY | TexImageTarget::CubeMapPositiveZ | TexImageTarget::CubeMapNegativeZ => self.bound_texture_cube_map.get(), } } pub fn recreate(&self, size: Size2D<i32>) { self.ipc_renderer.send(CanvasMsg::Common(CanvasCommonMsg::Recreate(size))).unwrap(); } pub fn ipc_renderer(&self) -> IpcSender<CanvasMsg> { self.ipc_renderer.clone() } pub fn webgl_error(&self, err: WebGLError) { // TODO(emilio): Add useful debug messages to this warn!("WebGL error: {:?}, previous error was {:?}", err, self.last_error.get()); // If an error has been detected no further errors must be // recorded until `getError` has been called if self.last_error.get().is_none() { self.last_error.set(Some(err)); } } // Helper function for validating framebuffer completeness in // calls touching the framebuffer. From the GLES 2.0.25 spec, // page 119: // // "Effects of Framebuffer Completeness on Framebuffer // Operations // // If the currently bound framebuffer is not framebuffer // complete, then it is an error to attempt to use the // framebuffer for writing or reading. This means that // rendering commands such as DrawArrays and DrawElements, as // well as commands that read the framebuffer such as // ReadPixels and CopyTexSubImage, will generate the error // INVALID_FRAMEBUFFER_OPERATION if called while the // framebuffer is not framebuffer complete." // // The WebGL spec mentions a couple more operations that trigger // this: clear() and getParameter(IMPLEMENTATION_COLOR_READ_*). fn validate_framebuffer_complete(&self) -> bool { match self.bound_framebuffer.get() { Some(fb) => match fb.check_status() { constants::FRAMEBUFFER_COMPLETE => return true, _ => { self.webgl_error(InvalidFramebufferOperation); return false; } }, // The default framebuffer is always complete. None => return true, } } fn tex_parameter(&self, target: u32, name: u32, value: TexParameterValue) { let texture = match target { constants::TEXTURE_2D => self.bound_texture_2d.get(), constants::TEXTURE_CUBE_MAP => self.bound_texture_cube_map.get(), _ => return self.webgl_error(InvalidEnum), }; if let Some(texture) = texture { handle_potential_webgl_error!(self, texture.tex_parameter(target, name, value)); } else { self.webgl_error(InvalidOperation) } } fn mark_as_dirty(&self) { self.canvas.upcast::<Node>().dirty(NodeDamage::OtherNodeDamage); } fn vertex_attrib(&self, indx: u32, x: f32, y: f32, z: f32, w: f32) { if indx > self.limits.max_vertex_attribs { return self.webgl_error(InvalidValue); } if indx == 0 { self.current_vertex_attrib_0.set((x, y, z, w)) } self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::VertexAttrib(indx, x, y, z, w))) .unwrap(); } fn validate_stencil_actions(&self, action: u32) -> bool { match action { 0 | constants::KEEP | constants::REPLACE | constants::INCR | constants::DECR | constants::INVERT | constants::INCR_WRAP | constants::DECR_WRAP => true, _ => false, } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 // https://www.khronos.org/opengles/sdk/docs/man/xhtml/glUniform.xml // https://www.khronos.org/registry/gles/specs/2.0/es_full_spec_2.0.25.pdf#nameddest=section-2.10.4 fn validate_uniform_parameters<T>(&self, uniform: Option<&WebGLUniformLocation>, uniform_type: UniformSetterType, data: &[T]) -> bool { let uniform = match uniform { Some(uniform) => uniform, None => return false, }; let program = self.current_program.get(); match program { Some(ref program) if program.id() == uniform.program_id() => {}, _ => { self.webgl_error(InvalidOperation); return false; }, }; // TODO(emilio): Get more complex uniform info from ANGLE, and use it to // properly validate that the uniform setter type is compatible with the // uniform type, and that the uniform size matches. if data.len() % uniform_type.element_count() != 0 { self.webgl_error(InvalidOperation); return false; } true } fn get_image_pixels(&self, source: Option<ImageDataOrHTMLImageElementOrHTMLCanvasElementOrHTMLVideoElement>) -> ImagePixelResult { let source = match source { Some(s) => s, None => return Err(()), }; // NOTE: Getting the pixels probably can be short-circuited if some // parameter is invalid. // // Nontheless, since it's the error case, I'm not totally sure the // complexity is worth it. let (pixels, size) = match source { ImageDataOrHTMLImageElementOrHTMLCanvasElementOrHTMLVideoElement::ImageData(image_data) => { (image_data.get_data_array(), image_data.get_size()) }, ImageDataOrHTMLImageElementOrHTMLCanvasElementOrHTMLVideoElement::HTMLImageElement(image) => { let img_url = match image.get_url() { Some(url) => url, None => return Err(()), }; let window = window_from_node(&*self.canvas); let img = match canvas_utils::request_image_from_cache(&window, img_url) { ImageResponse::Loaded(img) => img, ImageResponse::PlaceholderLoaded(_) | ImageResponse::None | ImageResponse::MetadataLoaded(_) => return Err(()), }; let size = Size2D::new(img.width as i32, img.height as i32); // TODO(emilio): Validate that the format argument // is coherent with the image. // // RGB8 should be easy to support too let mut data = match img.format { PixelFormat::RGBA8 => img.bytes.to_vec(), _ => unimplemented!(), }; byte_swap(&mut data); (data, size) }, // TODO(emilio): Getting canvas data is implemented in CanvasRenderingContext2D, // but we need to refactor it moving it to `HTMLCanvasElement` and support // WebGLContext (probably via GetPixels()). ImageDataOrHTMLImageElementOrHTMLCanvasElementOrHTMLVideoElement::HTMLCanvasElement(canvas) => { if let Some((mut data, size)) = canvas.fetch_all_data() { byte_swap(&mut data); (data, size) } else { return Err(()); } }, ImageDataOrHTMLImageElementOrHTMLCanvasElementOrHTMLVideoElement::HTMLVideoElement(_rooted_video) => unimplemented!(), }; return Ok((pixels, size)); } // TODO(emilio): Move this logic to a validator. #[allow(unsafe_code)] unsafe fn validate_tex_image_2d_data(&self, width: u32, height: u32, format: TexFormat, data_type: TexDataType, data: *mut JSObject) -> Result<u32, ()> { let element_size = data_type.element_size(); let components_per_element = data_type.components_per_element(); let components = format.components(); // If data is non-null, the type of pixels must match the type of the // data to be read. // If it is UNSIGNED_BYTE, a Uint8Array must be supplied; // if it is UNSIGNED_SHORT_5_6_5, UNSIGNED_SHORT_4_4_4_4, // or UNSIGNED_SHORT_5_5_5_1, a Uint16Array must be supplied. // If the types do not match, an INVALID_OPERATION error is generated. let received_size = if data.is_null() { element_size } else { if array_buffer_view_data_checked::<u16>(data).is_some() { 2 } else if array_buffer_view_data_checked::<u8>(data).is_some() { 1 } else { self.webgl_error(InvalidOperation); return Err(()); } }; if received_size != element_size { self.webgl_error(InvalidOperation); return Err(()); } // NOTE: width and height are positive or zero due to validate() let expected_byte_length = width * height * element_size * components / components_per_element; return Ok(expected_byte_length); } fn tex_image_2d(&self, texture: Root<WebGLTexture>, target: TexImageTarget, data_type: TexDataType, internal_format: TexFormat, level: u32, width: u32, height: u32, _border: u32, pixels: Vec<u8>) { // NB: pixels should NOT be premultipied if internal_format == TexFormat::RGBA && data_type == TexDataType::UnsignedByte && self.texture_unpacking_settings.get().contains(PREMULTIPLY_ALPHA) { // TODO(emilio): premultiply here. } // TODO(emilio): Flip Y axis if necessary here // TexImage2D depth is always equal to 1 handle_potential_webgl_error!(self, texture.initialize(target, width, height, 1, internal_format, level, Some(data_type))); // TODO(emilio): Invert axis, convert colorspace, premultiply alpha if requested let msg = WebGLCommand::TexImage2D(target.as_gl_constant(), level as i32, internal_format.as_gl_constant() as i32, width as i32, height as i32, internal_format.as_gl_constant(), data_type.as_gl_constant(), pixels); self.ipc_renderer .send(CanvasMsg::WebGL(msg)) .unwrap() } fn tex_sub_image_2d(&self, texture: Root<WebGLTexture>, target: TexImageTarget, level: u32, xoffset: i32, yoffset: i32, width: u32, height: u32, format: TexFormat, data_type: TexDataType, pixels: Vec<u8>) { // NB: pixels should NOT be premultipied // We have already validated level let image_info = texture.image_info_for_target(&target, level); // GL_INVALID_VALUE is generated if: // - xoffset or yoffset is less than 0 // - x offset plus the width is greater than the texture width // - y offset plus the height is greater than the texture height if xoffset < 0 || (xoffset as u32 + width) > image_info.width() || yoffset < 0 || (yoffset as u32 + height) > image_info.height() { return self.webgl_error(InvalidValue); } // NB: format and internal_format must match. if format != image_info.internal_format().unwrap() || data_type != image_info.data_type().unwrap() { return self.webgl_error(InvalidOperation); } // TODO(emilio): Flip Y axis if necessary here // TODO(emilio): Invert axis, convert colorspace, premultiply alpha if requested let msg = WebGLCommand::TexSubImage2D(target.as_gl_constant(), level as i32, xoffset, yoffset, width as i32, height as i32, format.as_gl_constant(), data_type.as_gl_constant(), pixels); self.ipc_renderer .send(CanvasMsg::WebGL(msg)) .unwrap() } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14 fn validate_feature_enum(&self, cap: u32) -> bool { match cap { constants::BLEND | constants::CULL_FACE | constants::DEPTH_TEST | constants::DITHER | constants::POLYGON_OFFSET_FILL | constants::SAMPLE_ALPHA_TO_COVERAGE | constants::SAMPLE_COVERAGE | constants::SAMPLE_COVERAGE_INVERT | constants::SCISSOR_TEST => true, _ => { self.webgl_error(InvalidEnum); false }, } } } impl Drop for WebGLRenderingContext { fn drop(&mut self) { self.ipc_renderer.send(CanvasMsg::Common(CanvasCommonMsg::Close)).unwrap(); } } // FIXME: After [1] lands and the relevant Servo and codegen PR too, we should // convert all our raw JSObject pointers to proper types. // // [1]: https://github.com/servo/rust-mozjs/pull/304 #[allow(unsafe_code)] unsafe fn typed_array_or_sequence_to_vec<T>(cx: *mut JSContext, sequence_or_abv: *mut JSObject, config: <T as FromJSValConvertible>::Config) -> Result<Vec<T>, Error> where T: ArrayBufferViewContents + FromJSValConvertible, <T as FromJSValConvertible>::Config: Clone, { assert!(!sequence_or_abv.is_null()); if let Some(v) = array_buffer_view_to_vec_checked::<T>(sequence_or_abv) { return Ok(v); } rooted!(in(cx) let mut val = UndefinedValue()); sequence_or_abv.to_jsval(cx, val.handle_mut()); match Vec::<T>::from_jsval(cx, val.handle(), config) { Ok(ConversionResult::Success(v)) => Ok(v), Ok(ConversionResult::Failure(error)) => Err(Error::Type(error.into_owned())), // FIXME: What to do here? Generated code only aborts the execution of // the script. Err(err) => panic!("unexpected conversion error: {:?}", err), } } #[allow(unsafe_code)] unsafe fn fallible_array_buffer_view_to_vec<T>(abv: *mut JSObject) -> Result<Vec<T>, Error> where T: ArrayBufferViewContents { assert!(!abv.is_null()); match array_buffer_view_to_vec::<T>(abv) { Some(v) => Ok(v), None => Err(Error::Type("Not an ArrayBufferView".to_owned())), } } impl WebGLRenderingContextMethods for WebGLRenderingContext { // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.1 fn Canvas(&self) -> Root<HTMLCanvasElement> { Root::from_ref(&*self.canvas) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.11 fn Flush(&self) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::Flush)) .unwrap(); } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.11 fn Finish(&self) { let (sender, receiver) = ipc::channel().unwrap(); self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::Finish(sender))) .unwrap(); receiver.recv().unwrap() } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.1 fn DrawingBufferWidth(&self) -> i32 { let (sender, receiver) = ipc::channel().unwrap(); self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::DrawingBufferWidth(sender))) .unwrap(); receiver.recv().unwrap() } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.1 fn DrawingBufferHeight(&self) -> i32 { let (sender, receiver) = ipc::channel().unwrap(); self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::DrawingBufferHeight(sender))) .unwrap(); receiver.recv().unwrap() } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.5 fn GetBufferParameter(&self, _cx: *mut JSContext, target: u32, parameter: u32) -> JSVal { let (sender, receiver) = ipc::channel().unwrap(); self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::GetBufferParameter(target, parameter, sender))) .unwrap(); match handle_potential_webgl_error!(self, receiver.recv().unwrap(), WebGLParameter::Invalid) { WebGLParameter::Int(val) => Int32Value(val), WebGLParameter::Bool(_) => panic!("Buffer parameter should not be bool"), WebGLParameter::Float(_) => panic!("Buffer parameter should not be float"), WebGLParameter::FloatArray(_) => panic!("Buffer parameter should not be float array"), WebGLParameter::String(_) => panic!("Buffer parameter should not be string"), WebGLParameter::Invalid => NullValue(), } } #[allow(unsafe_code)] // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn GetParameter(&self, cx: *mut JSContext, parameter: u32) -> JSVal { // Handle the GL_*_BINDING without going all the way // to the GL, since we would just need to map back from GL's // returned ID to the WebGL* object we're tracking. match parameter { constants::ARRAY_BUFFER_BINDING => return object_binding_to_js_or_null!(cx, &self.bound_buffer_array), constants::ELEMENT_ARRAY_BUFFER_BINDING => return object_binding_to_js_or_null!(cx, &self.bound_buffer_element_array), constants::FRAMEBUFFER_BINDING => return object_binding_to_js_or_null!(cx, &self.bound_framebuffer), constants::RENDERBUFFER_BINDING => return object_binding_to_js_or_null!(cx, &self.bound_renderbuffer), constants::TEXTURE_BINDING_2D => return object_binding_to_js_or_null!(cx, &self.bound_texture_2d), constants::TEXTURE_BINDING_CUBE_MAP => return object_binding_to_js_or_null!(cx, &self.bound_texture_cube_map), _ => {} } let (sender, receiver) = ipc::channel().unwrap(); self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::GetParameter(parameter, sender))) .unwrap(); match handle_potential_webgl_error!(self, receiver.recv().unwrap(), WebGLParameter::Invalid) { WebGLParameter::Int(val) => Int32Value(val), WebGLParameter::Bool(val) => BooleanValue(val), WebGLParameter::Float(val) => DoubleValue(val as f64), WebGLParameter::FloatArray(_) => panic!("Parameter should not be float array"), WebGLParameter::String(val) => { rooted!(in(cx) let mut rval = UndefinedValue()); unsafe { val.to_jsval(cx, rval.handle_mut()); } rval.get() } WebGLParameter::Invalid => NullValue(), } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn GetError(&self) -> u32 { let error_code = if let Some(error) = self.last_error.get() { match error { WebGLError::InvalidEnum => constants::INVALID_ENUM, WebGLError::InvalidFramebufferOperation => constants::INVALID_FRAMEBUFFER_OPERATION, WebGLError::InvalidValue => constants::INVALID_VALUE, WebGLError::InvalidOperation => constants::INVALID_OPERATION, WebGLError::OutOfMemory => constants::OUT_OF_MEMORY, WebGLError::ContextLost => constants::CONTEXT_LOST_WEBGL, } } else { constants::NO_ERROR }; self.last_error.set(None); error_code } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.2 fn GetContextAttributes(&self) -> Option<WebGLContextAttributes> { let (sender, receiver) = ipc::channel().unwrap(); // If the send does not succeed, assume context lost if let Err(_) = self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::GetContextAttributes(sender))) { return None; } let attrs = receiver.recv().unwrap(); Some(WebGLContextAttributes { alpha: attrs.alpha, antialias: attrs.antialias, depth: attrs.depth, failIfMajorPerformanceCaveat: false, preferLowPowerToHighPerformance: false, premultipliedAlpha: attrs.premultiplied_alpha, preserveDrawingBuffer: attrs.preserve_drawing_buffer, stencil: attrs.stencil }) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.14 fn GetSupportedExtensions(&self) -> Option<Vec<DOMString>> { Some(vec![]) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.14 fn GetExtension(&self, _cx: *mut JSContext, _name: DOMString) -> Option<NonZero<*mut JSObject>> { None } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn ActiveTexture(&self, texture: u32) { self.ipc_renderer.send(CanvasMsg::WebGL(WebGLCommand::ActiveTexture(texture))).unwrap(); } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn BlendColor(&self, r: f32, g: f32, b: f32, a: f32) { self.ipc_renderer.send(CanvasMsg::WebGL(WebGLCommand::BlendColor(r, g, b, a))).unwrap(); } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn BlendEquation(&self, mode: u32) { self.ipc_renderer.send(CanvasMsg::WebGL(WebGLCommand::BlendEquation(mode))).unwrap(); } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn BlendEquationSeparate(&self, mode_rgb: u32, mode_alpha: u32) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::BlendEquationSeparate(mode_rgb, mode_alpha))) .unwrap(); } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn BlendFunc(&self, src_factor: u32, dest_factor: u32) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::BlendFunc(src_factor, dest_factor))) .unwrap(); } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn BlendFuncSeparate(&self, src_rgb: u32, dest_rgb: u32, src_alpha: u32, dest_alpha: u32) { self.ipc_renderer.send( CanvasMsg::WebGL(WebGLCommand::BlendFuncSeparate(src_rgb, dest_rgb, src_alpha, dest_alpha))).unwrap(); } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.9 fn AttachShader(&self, program: Option<&WebGLProgram>, shader: Option<&WebGLShader>) { if let Some(program) = program { if let Some(shader) = shader { handle_potential_webgl_error!(self, program.attach_shader(shader)); } } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.9 fn DetachShader(&self, program: Option<&WebGLProgram>, shader: Option<&WebGLShader>) { if let Some(program) = program { if let Some(shader) = shader { handle_potential_webgl_error!(self, program.detach_shader(shader)); } } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.9 fn BindAttribLocation(&self, program: Option<&WebGLProgram>, index: u32, name: DOMString) { if let Some(program) = program { handle_potential_webgl_error!(self, program.bind_attrib_location(index, name)); } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.5 fn BindBuffer(&self, target: u32, buffer: Option<&WebGLBuffer>) { let slot = match target { constants::ARRAY_BUFFER => &self.bound_buffer_array, constants::ELEMENT_ARRAY_BUFFER => &self.bound_buffer_element_array, _ => return self.webgl_error(InvalidEnum), }; if let Some(buffer) = buffer { match buffer.bind(target) { Ok(_) => slot.set(Some(buffer)), Err(e) => return self.webgl_error(e), } } else { slot.set(None); // Unbind the current buffer self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::BindBuffer(target, None))) .unwrap() } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.6 fn BindFramebuffer(&self, target: u32, framebuffer: Option<&WebGLFramebuffer>) { if target != constants::FRAMEBUFFER { return self.webgl_error(InvalidOperation); } if let Some(framebuffer) = framebuffer { if framebuffer.is_deleted() { // From the WebGL spec: // // "An attempt to bind a deleted framebuffer will // generate an INVALID_OPERATION error, and the // current binding will remain untouched." return self.webgl_error(InvalidOperation); } else { framebuffer.bind(target); self.bound_framebuffer.set(Some(framebuffer)); } } else { // Bind the default framebuffer let cmd = WebGLCommand::BindFramebuffer(target, WebGLFramebufferBindingRequest::Default); self.ipc_renderer.send(CanvasMsg::WebGL(cmd)).unwrap(); self.bound_framebuffer.set(framebuffer); } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.7 fn BindRenderbuffer(&self, target: u32, renderbuffer: Option<&WebGLRenderbuffer>) { if target != constants::RENDERBUFFER { return self.webgl_error(InvalidEnum); } match renderbuffer { // Implementations differ on what to do in the deleted // case: Chromium currently unbinds, and Gecko silently // returns. The conformance tests don't cover this case. Some(renderbuffer) if !renderbuffer.is_deleted() => { self.bound_renderbuffer.set(Some(renderbuffer)); renderbuffer.bind(target); } _ => { self.bound_renderbuffer.set(None); // Unbind the currently bound renderbuffer self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::BindRenderbuffer(target, None))) .unwrap() } } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.8 fn BindTexture(&self, target: u32, texture: Option<&WebGLTexture>) { let slot = match target { constants::TEXTURE_2D => &self.bound_texture_2d, constants::TEXTURE_CUBE_MAP => &self.bound_texture_cube_map, _ => return self.webgl_error(InvalidEnum), }; if let Some(texture) = texture { match texture.bind(target) { Ok(_) => slot.set(Some(texture)), Err(err) => return self.webgl_error(err), } } else { slot.set(None); // Unbind the currently bound texture self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::BindTexture(target, None))) .unwrap() } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.8 fn GenerateMipmap(&self, target: u32) { let slot = match target { constants::TEXTURE_2D => &self.bound_texture_2d, constants::TEXTURE_CUBE_MAP => &self.bound_texture_cube_map, _ => return self.webgl_error(InvalidEnum), }; match slot.get() { Some(texture) => handle_potential_webgl_error!(self, texture.generate_mipmap()), None => self.webgl_error(InvalidOperation) } } #[allow(unsafe_code)] // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.5 fn BufferData(&self, _cx: *mut JSContext, target: u32, data: *mut JSObject, usage: u32) -> Fallible<()> { if data.is_null() { return Ok(self.webgl_error(InvalidValue)); } let data_vec = unsafe { match array_buffer_to_vec::<u8>(data) { Some(data) => data, // Not an ArrayBuffer object, maybe an ArrayBufferView? None => try!(fallible_array_buffer_view_to_vec::<u8>(data)), } }; let bound_buffer = match target { constants::ARRAY_BUFFER => self.bound_buffer_array.get(), constants::ELEMENT_ARRAY_BUFFER => self.bound_buffer_element_array.get(), _ => return Ok(self.webgl_error(InvalidEnum)), }; let bound_buffer = match bound_buffer { Some(bound_buffer) => bound_buffer, None => return Ok(self.webgl_error(InvalidValue)), }; match usage { constants::STREAM_DRAW | constants::STATIC_DRAW | constants::DYNAMIC_DRAW => (), _ => return Ok(self.webgl_error(InvalidEnum)), } handle_potential_webgl_error!(self, bound_buffer.buffer_data(target, &data_vec, usage)); Ok(()) } #[allow(unsafe_code)] // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.5 fn BufferSubData(&self, _cx: *mut JSContext, target: u32, offset: i64, data: *mut JSObject) -> Fallible<()> { if data.is_null() { return Ok(self.webgl_error(InvalidValue)); } let data_vec = unsafe { match array_buffer_to_vec::<u8>(data) { Some(data) => data, // Not an ArrayBuffer object, maybe an ArrayBufferView? None => try!(fallible_array_buffer_view_to_vec::<u8>(data)), } }; let bound_buffer = match target { constants::ARRAY_BUFFER => self.bound_buffer_array.get(), constants::ELEMENT_ARRAY_BUFFER => self.bound_buffer_element_array.get(), _ => return Ok(self.webgl_error(InvalidEnum)), }; let bound_buffer = match bound_buffer { Some(bound_buffer) => bound_buffer, None => return Ok(self.webgl_error(InvalidOperation)), }; if offset < 0 { return Ok(self.webgl_error(InvalidValue)); } if (offset as usize) + data_vec.len() > bound_buffer.capacity() { return Ok(self.webgl_error(InvalidValue)); } self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::BufferSubData(target, offset as isize, data_vec))) .unwrap(); Ok(()) } #[allow(unsafe_code)] // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.8 fn CompressedTexImage2D(&self, _cx: *mut JSContext, _target: u32, _level: i32, _internal_format: u32, _width: i32, _height: i32, _border: i32, pixels: *mut JSObject) -> Fallible<()> { let _data = try!(unsafe { fallible_array_buffer_view_to_vec::<u8>(pixels) }); // FIXME: No compressed texture format is currently supported, so error out as per // https://www.khronos.org/registry/webgl/specs/latest/1.0/#COMPRESSED_TEXTURE_SUPPORT self.webgl_error(InvalidEnum); Ok(()) } #[allow(unsafe_code)] // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.8 fn CompressedTexSubImage2D(&self, _cx: *mut JSContext, _target: u32, _level: i32, _xoffset: i32, _yoffset: i32, _width: i32, _height: i32, _format: u32, pixels: *mut JSObject) -> Fallible<()> { let _data = try!(unsafe { fallible_array_buffer_view_to_vec::<u8>(pixels) }); // FIXME: No compressed texture format is currently supported, so error out as per // https://www.khronos.org/registry/webgl/specs/latest/1.0/#COMPRESSED_TEXTURE_SUPPORT self.webgl_error(InvalidEnum); Ok(()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.8 fn CopyTexImage2D(&self, target: u32, level: i32, internal_format: u32, x: i32, y: i32, width: i32, height: i32, border: i32) { if !self.validate_framebuffer_complete() { return; } let validator = CommonTexImage2DValidator::new(self, target, level, internal_format, width, height, border); let CommonTexImage2DValidatorResult { texture, target, level, internal_format, width, height, border, } = match validator.validate() { Ok(result) => result, Err(_) => return, }; let image_info = texture.image_info_for_target(&target, level); // The color buffer components can be dropped during the conversion to // the internal_format, but new components cannot be added. // // Note that this only applies if we're copying to an already // initialized texture. // // GL_INVALID_OPERATION is generated if the color buffer cannot be // converted to the internal_format. if let Some(old_internal_format) = image_info.internal_format() { if old_internal_format.components() > internal_format.components() { return self.webgl_error(InvalidOperation); } } // NB: TexImage2D depth is always equal to 1 handle_potential_webgl_error!(self, texture.initialize(target, width as u32, height as u32, 1, internal_format, level as u32, None)); let msg = WebGLCommand::CopyTexImage2D(target.as_gl_constant(), level as i32, internal_format.as_gl_constant(), x, y, width as i32, height as i32, border as i32); self.ipc_renderer.send(CanvasMsg::WebGL(msg)).unwrap() } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.8 fn CopyTexSubImage2D(&self, target: u32, level: i32, xoffset: i32, yoffset: i32, x: i32, y: i32, width: i32, height: i32) { if !self.validate_framebuffer_complete() { return; } // NB: We use a dummy (valid) format and border in order to reuse the // common validations, but this should have its own validator. let validator = CommonTexImage2DValidator::new(self, target, level, TexFormat::RGBA.as_gl_constant(), width, height, 0); let CommonTexImage2DValidatorResult { texture, target, level, width, height, .. } = match validator.validate() { Ok(result) => result, Err(_) => return, }; let image_info = texture.image_info_for_target(&target, level); // GL_INVALID_VALUE is generated if: // - xoffset or yoffset is less than 0 // - x offset plus the width is greater than the texture width // - y offset plus the height is greater than the texture height if xoffset < 0 || (xoffset as u32 + width) > image_info.width() || yoffset < 0 || (yoffset as u32 + height) > image_info.height() { self.webgl_error(InvalidValue); return; } let msg = WebGLCommand::CopyTexSubImage2D(target.as_gl_constant(), level as i32, xoffset, yoffset, x, y, width as i32, height as i32); self.ipc_renderer.send(CanvasMsg::WebGL(msg)).unwrap(); } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.11 fn Clear(&self, mask: u32) { if !self.validate_framebuffer_complete() { return; } self.ipc_renderer.send(CanvasMsg::WebGL(WebGLCommand::Clear(mask))).unwrap(); self.mark_as_dirty(); } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn ClearColor(&self, red: f32, green: f32, blue: f32, alpha: f32) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::ClearColor(red, green, blue, alpha))) .unwrap() } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn ClearDepth(&self, depth: f32) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::ClearDepth(depth as f64))) .unwrap() } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn ClearStencil(&self, stencil: i32) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::ClearStencil(stencil))) .unwrap() } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn ColorMask(&self, r: bool, g: bool, b: bool, a: bool) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::ColorMask(r, g, b, a))) .unwrap() } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn CullFace(&self, mode: u32) { match mode { constants::FRONT | constants::BACK | constants::FRONT_AND_BACK => self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::CullFace(mode))) .unwrap(), _ => self.webgl_error(InvalidEnum), } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn FrontFace(&self, mode: u32) { match mode { constants::CW | constants::CCW => self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::FrontFace(mode))) .unwrap(), _ => self.webgl_error(InvalidEnum), } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn DepthFunc(&self, func: u32) { match func { constants::NEVER | constants::LESS | constants::EQUAL | constants::LEQUAL | constants::GREATER | constants::NOTEQUAL | constants::GEQUAL | constants::ALWAYS => self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::DepthFunc(func))) .unwrap(), _ => self.webgl_error(InvalidEnum), } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn DepthMask(&self, flag: bool) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::DepthMask(flag))) .unwrap() } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn DepthRange(&self, near: f32, far: f32) { // From the WebGL 1.0 spec, 6.12: Viewport Depth Range: // // "A call to depthRange will generate an // INVALID_OPERATION error if zNear is greater than // zFar." if near > far { return self.webgl_error(InvalidOperation); } self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::DepthRange(near as f64, far as f64))) .unwrap() } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn Enable(&self, cap: u32) { if self.validate_feature_enum(cap) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::Enable(cap))) .unwrap(); } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn Disable(&self, cap: u32) { if self.validate_feature_enum(cap) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::Disable(cap))) .unwrap() } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.9 fn CompileShader(&self, shader: Option<&WebGLShader>) { if let Some(shader) = shader { shader.compile() } } // TODO(emilio): Probably in the future we should keep track of the // generated objects, either here or in the webgl thread // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.5 fn CreateBuffer(&self) -> Option<Root<WebGLBuffer>> { WebGLBuffer::maybe_new(&self.global(), self.ipc_renderer.clone()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.6 fn CreateFramebuffer(&self) -> Option<Root<WebGLFramebuffer>> { WebGLFramebuffer::maybe_new(&self.global(), self.ipc_renderer.clone()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.7 fn CreateRenderbuffer(&self) -> Option<Root<WebGLRenderbuffer>> { WebGLRenderbuffer::maybe_new(&self.global(), self.ipc_renderer.clone()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.8 fn CreateTexture(&self) -> Option<Root<WebGLTexture>> { WebGLTexture::maybe_new(&self.global(), self.ipc_renderer.clone()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.9 fn CreateProgram(&self) -> Option<Root<WebGLProgram>> { WebGLProgram::maybe_new(&self.global(), self.ipc_renderer.clone()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.9 fn CreateShader(&self, shader_type: u32) -> Option<Root<WebGLShader>> { match shader_type { constants::VERTEX_SHADER | constants::FRAGMENT_SHADER => {}, _ => { self.webgl_error(InvalidEnum); return None; } } WebGLShader::maybe_new(&self.global(), self.ipc_renderer.clone(), shader_type) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.5 fn DeleteBuffer(&self, buffer: Option<&WebGLBuffer>) { if let Some(buffer) = buffer { handle_object_deletion!(self, self.bound_buffer_array, buffer, Some(WebGLCommand::BindBuffer(constants::ARRAY_BUFFER, None))); handle_object_deletion!(self, self.bound_buffer_element_array, buffer, Some(WebGLCommand::BindBuffer(constants::ELEMENT_ARRAY_BUFFER, None))); buffer.delete() } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.6 fn DeleteFramebuffer(&self, framebuffer: Option<&WebGLFramebuffer>) { if let Some(framebuffer) = framebuffer { handle_object_deletion!(self, self.bound_framebuffer, framebuffer, Some(WebGLCommand::BindFramebuffer(constants::FRAMEBUFFER, WebGLFramebufferBindingRequest::Default))); framebuffer.delete() } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.7 fn DeleteRenderbuffer(&self, renderbuffer: Option<&WebGLRenderbuffer>) { if let Some(renderbuffer) = renderbuffer { handle_object_deletion!(self, self.bound_renderbuffer, renderbuffer, Some(WebGLCommand::BindRenderbuffer(constants::RENDERBUFFER, None))); // From the GLES 2.0.25 spec, page 113: // // "If a renderbuffer object is deleted while its // image is attached to the currently bound // framebuffer, then it is as if // FramebufferRenderbuffer had been called, with a // renderbuffer of 0, for each attachment point to // which this image was attached in the currently // bound framebuffer." // if let Some(fb) = self.bound_framebuffer.get() { fb.detach_renderbuffer(renderbuffer); } renderbuffer.delete() } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.8 fn DeleteTexture(&self, texture: Option<&WebGLTexture>) { if let Some(texture) = texture { handle_object_deletion!(self, self.bound_texture_2d, texture, Some(WebGLCommand::BindTexture(constants::TEXTURE_2D, None))); handle_object_deletion!(self, self.bound_texture_cube_map, texture, Some(WebGLCommand::BindTexture(constants::TEXTURE_CUBE_MAP, None))); // From the GLES 2.0.25 spec, page 113: // // "If a texture object is deleted while its image is // attached to the currently bound framebuffer, then // it is as if FramebufferTexture2D had been called, // with a texture of 0, for each attachment point to // which this image was attached in the currently // bound framebuffer." if let Some(fb) = self.bound_framebuffer.get() { fb.detach_texture(texture); } texture.delete() } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.9 fn DeleteProgram(&self, program: Option<&WebGLProgram>) { if let Some(program) = program { // FIXME: We should call glUseProgram(0), but // WebGLCommand::UseProgram() doesn't take an Option // currently. This is also a problem for useProgram(null) handle_object_deletion!(self, self.current_program, program, None); program.delete() } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.9 fn DeleteShader(&self, shader: Option<&WebGLShader>) { if let Some(shader) = shader { shader.delete() } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.11 fn DrawArrays(&self, mode: u32, first: i32, count: i32) { match mode { constants::POINTS | constants::LINE_STRIP | constants::LINE_LOOP | constants::LINES | constants::TRIANGLE_STRIP | constants::TRIANGLE_FAN | constants::TRIANGLES => { if self.current_program.get().is_none() { return self.webgl_error(InvalidOperation); } if first < 0 || count < 0 { return self.webgl_error(InvalidValue); } if !self.validate_framebuffer_complete() { return; } self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::DrawArrays(mode, first, count))) .unwrap(); self.mark_as_dirty(); }, _ => self.webgl_error(InvalidEnum), } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.11 fn DrawElements(&self, mode: u32, count: i32, type_: u32, offset: i64) { // From the GLES 2.0.25 spec, page 21: // // "type must be one of UNSIGNED_BYTE or UNSIGNED_SHORT" let type_size = match type_ { constants::UNSIGNED_BYTE => 1, constants::UNSIGNED_SHORT => 2, _ => return self.webgl_error(InvalidEnum), }; if offset % type_size != 0 { return self.webgl_error(InvalidOperation); } if count < 0 { return self.webgl_error(InvalidValue); } if offset < 0 { return self.webgl_error(InvalidValue); } if self.current_program.get().is_none() { // From the WebGL spec // // If the CURRENT_PROGRAM is null, an INVALID_OPERATION error will be generated. // WebGL performs additional error checking beyond that specified // in OpenGL ES 2.0 during calls to drawArrays and drawElements. // return self.webgl_error(InvalidOperation); } if let Some(array_buffer) = self.bound_buffer_element_array.get() { // WebGL Spec: check buffer overflows, must be a valid multiple of the size. let val = offset as u64 + (count as u64 * type_size as u64); if val > array_buffer.capacity() as u64 { return self.webgl_error(InvalidOperation); } } else { // From the WebGL spec // // a non-null WebGLBuffer must be bound to the ELEMENT_ARRAY_BUFFER binding point // or an INVALID_OPERATION error will be generated. // return self.webgl_error(InvalidOperation); } if !self.validate_framebuffer_complete() { return; } match mode { constants::POINTS | constants::LINE_STRIP | constants::LINE_LOOP | constants::LINES | constants::TRIANGLE_STRIP | constants::TRIANGLE_FAN | constants::TRIANGLES => { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::DrawElements(mode, count, type_, offset))) .unwrap(); self.mark_as_dirty(); }, _ => self.webgl_error(InvalidEnum), } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 fn EnableVertexAttribArray(&self, attrib_id: u32) { if attrib_id > self.limits.max_vertex_attribs { return self.webgl_error(InvalidValue); } self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::EnableVertexAttribArray(attrib_id))) .unwrap() } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 fn DisableVertexAttribArray(&self, attrib_id: u32) { if attrib_id > self.limits.max_vertex_attribs { return self.webgl_error(InvalidValue); } self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::DisableVertexAttribArray(attrib_id))) .unwrap() } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 fn GetActiveUniform(&self, program: Option<&WebGLProgram>, index: u32) -> Option<Root<WebGLActiveInfo>> { let program = match program { Some(program) => program, None => { // Reasons to generate InvalidValue error // From the GLES 2.0 spec // // "INVALID_VALUE is generated if index is greater than or equal // to the number of active uniform variables in program" // // A null program has no uniforms so any index is always greater than the active uniforms // WebGl conformance expects error with null programs. Check tests in get-active-test.html self.webgl_error(InvalidValue); return None; } }; match program.get_active_uniform(index) { Ok(ret) => Some(ret), Err(e) => { self.webgl_error(e); return None; } } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 fn GetActiveAttrib(&self, program: Option<&WebGLProgram>, index: u32) -> Option<Root<WebGLActiveInfo>> { let program = match program { Some(program) => program, None => { // Reasons to generate InvalidValue error // From the GLES 2.0 spec // // "INVALID_VALUE is generated if index is greater than or equal // to the number of active attribute variables in program" // // A null program has no attributes so any index is always greater than the active uniforms // WebGl conformance expects error with null programs. Check tests in get-active-test.html self.webgl_error(InvalidValue); return None; } }; match program.get_active_attrib(index) { Ok(ret) => Some(ret), Err(e) => { self.webgl_error(e); return None; } } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 fn GetAttribLocation(&self, program: Option<&WebGLProgram>, name: DOMString) -> i32 { if let Some(program) = program { handle_potential_webgl_error!(self, program.get_attrib_location(name), None).unwrap_or(-1) } else { -1 } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.9 fn GetProgramInfoLog(&self, program: Option<&WebGLProgram>) -> Option<DOMString> { if let Some(program) = program { match program.get_info_log() { Ok(value) => Some(DOMString::from(value)), Err(e) => { self.webgl_error(e); None } } } else { self.webgl_error(WebGLError::InvalidValue); None } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.9 fn GetProgramParameter(&self, _: *mut JSContext, program: Option<&WebGLProgram>, param_id: u32) -> JSVal { if let Some(program) = program { match handle_potential_webgl_error!(self, program.parameter(param_id), WebGLParameter::Invalid) { WebGLParameter::Int(val) => Int32Value(val), WebGLParameter::Bool(val) => BooleanValue(val), WebGLParameter::String(_) => panic!("Program parameter should not be string"), WebGLParameter::Float(_) => panic!("Program parameter should not be float"), WebGLParameter::FloatArray(_) => { panic!("Program paramenter should not be float array") } WebGLParameter::Invalid => NullValue(), } } else { NullValue() } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.9 fn GetShaderInfoLog(&self, shader: Option<&WebGLShader>) -> Option<DOMString> { shader.and_then(|s| s.info_log()).map(DOMString::from) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.9 fn GetShaderParameter(&self, _: *mut JSContext, shader: Option<&WebGLShader>, param_id: u32) -> JSVal { if let Some(shader) = shader { match handle_potential_webgl_error!(self, shader.parameter(param_id), WebGLParameter::Invalid) { WebGLParameter::Int(val) => Int32Value(val), WebGLParameter::Bool(val) => BooleanValue(val), WebGLParameter::String(_) => panic!("Shader parameter should not be string"), WebGLParameter::Float(_) => panic!("Shader parameter should not be float"), WebGLParameter::FloatArray(_) => { panic!("Shader paramenter should not be float array") } WebGLParameter::Invalid => NullValue(), } } else { NullValue() } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 fn GetUniformLocation(&self, program: Option<&WebGLProgram>, name: DOMString) -> Option<Root<WebGLUniformLocation>> { program.and_then(|p| { handle_potential_webgl_error!(self, p.get_uniform_location(name), None) .map(|location| WebGLUniformLocation::new(&self.global(), location, p.id())) }) } #[allow(unsafe_code)] // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.9 fn GetVertexAttrib(&self, cx: *mut JSContext, index: u32, pname: u32) -> JSVal { if index == 0 && pname == constants::CURRENT_VERTEX_ATTRIB { rooted!(in(cx) let mut result = UndefinedValue()); let (x, y, z, w) = self.current_vertex_attrib_0.get(); let attrib = vec![x, y, z, w]; unsafe { attrib.to_jsval(cx, result.handle_mut()); } return result.get() } let (sender, receiver) = ipc::channel().unwrap(); self.ipc_renderer.send(CanvasMsg::WebGL(WebGLCommand::GetVertexAttrib(index, pname, sender))).unwrap(); match handle_potential_webgl_error!(self, receiver.recv().unwrap(), WebGLParameter::Invalid) { WebGLParameter::Int(val) => Int32Value(val), WebGLParameter::Bool(val) => BooleanValue(val), WebGLParameter::String(_) => panic!("Vertex attrib should not be string"), WebGLParameter::Float(_) => panic!("Vertex attrib should not be float"), WebGLParameter::FloatArray(val) => { rooted!(in(cx) let mut result = UndefinedValue()); unsafe { val.to_jsval(cx, result.handle_mut()); } result.get() } WebGLParameter::Invalid => NullValue(), } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn Hint(&self, target: u32, mode: u32) { if target != constants::GENERATE_MIPMAP_HINT { return self.webgl_error(InvalidEnum); } match mode { constants::FASTEST | constants::NICEST | constants::DONT_CARE => (), _ => return self.webgl_error(InvalidEnum), } self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::Hint(target, mode))) .unwrap() } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.5 fn IsBuffer(&self, buffer: Option<&WebGLBuffer>) -> bool { buffer.map_or(false, |buf| buf.target().is_some() && !buf.is_deleted()) } // TODO: We could write this without IPC, recording the calls to `enable` and `disable`. // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn IsEnabled(&self, cap: u32) -> bool { if self.validate_feature_enum(cap) { let (sender, receiver) = ipc::channel().unwrap(); self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::IsEnabled(cap, sender))) .unwrap(); return receiver.recv().unwrap(); } false } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.6 fn IsFramebuffer(&self, frame_buffer: Option<&WebGLFramebuffer>) -> bool { frame_buffer.map_or(false, |buf| buf.target().is_some() && !buf.is_deleted()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.9 fn IsProgram(&self, program: Option<&WebGLProgram>) -> bool { program.map_or(false, |p| !p.is_deleted()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.7 fn IsRenderbuffer(&self, render_buffer: Option<&WebGLRenderbuffer>) -> bool { render_buffer.map_or(false, |buf| buf.ever_bound() && !buf.is_deleted()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.9 fn IsShader(&self, shader: Option<&WebGLShader>) -> bool { shader.map_or(false, |s| !s.is_deleted() || s.is_attached()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.8 fn IsTexture(&self, texture: Option<&WebGLTexture>) -> bool { texture.map_or(false, |tex| tex.target().is_some() && !tex.is_deleted()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn LineWidth(&self, width: f32) { if width.is_nan() || width <= 0f32 { return self.webgl_error(InvalidValue); } self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::LineWidth(width))) .unwrap() } // NOTE: Usage of this function could affect rendering while we keep using // readback to render to the page. // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn PixelStorei(&self, param_name: u32, param_value: i32) { let mut texture_settings = self.texture_unpacking_settings.get(); match param_name { constants::UNPACK_FLIP_Y_WEBGL => { if param_value != 0 { texture_settings.insert(FLIP_Y_AXIS) } else { texture_settings.remove(FLIP_Y_AXIS) } self.texture_unpacking_settings.set(texture_settings); return; }, constants::UNPACK_PREMULTIPLY_ALPHA_WEBGL => { if param_value != 0 { texture_settings.insert(PREMULTIPLY_ALPHA) } else { texture_settings.remove(PREMULTIPLY_ALPHA) } self.texture_unpacking_settings.set(texture_settings); return; }, constants::UNPACK_COLORSPACE_CONVERSION_WEBGL => { match param_value as u32 { constants::BROWSER_DEFAULT_WEBGL => texture_settings.insert(CONVERT_COLORSPACE), constants::NONE => texture_settings.remove(CONVERT_COLORSPACE), _ => return self.webgl_error(InvalidEnum), } self.texture_unpacking_settings.set(texture_settings); return; }, constants::UNPACK_ALIGNMENT | constants::PACK_ALIGNMENT => { match param_value { 1 | 2 | 4 | 8 => (), _ => return self.webgl_error(InvalidValue), } }, _ => return self.webgl_error(InvalidEnum), } self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::PixelStorei(param_name, param_value))) .unwrap() } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn PolygonOffset(&self, factor: f32, units: f32) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::PolygonOffset(factor, units))) .unwrap() } #[allow(unsafe_code)] // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.12 fn ReadPixels(&self, _cx: *mut JSContext, x: i32, y: i32, width: i32, height: i32, format: u32, pixel_type: u32, pixels: *mut JSObject) -> Fallible<()> { if pixels.is_null() { return Ok(self.webgl_error(InvalidValue)); } let mut data = match unsafe { array_buffer_view_data::<u8>(pixels) } { Some(data) => data, None => return Err(Error::Type("Not an ArrayBufferView".to_owned())), }; if !self.validate_framebuffer_complete() { return Ok(()); } match unsafe { JS_GetArrayBufferViewType(pixels) } { Type::Uint8 => (), _ => return Ok(self.webgl_error(InvalidOperation)), } let (sender, receiver) = ipc::channel().unwrap(); self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::ReadPixels(x, y, width, height, format, pixel_type, sender))) .unwrap(); let result = receiver.recv().unwrap(); if result.len() > data.len() { return Ok(self.webgl_error(InvalidOperation)); } for i in 0..result.len() { data[i] = result[i] } Ok(()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn SampleCoverage(&self, value: f32, invert: bool) { self.ipc_renderer.send(CanvasMsg::WebGL(WebGLCommand::SampleCoverage(value, invert))).unwrap(); } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.4 fn Scissor(&self, x: i32, y: i32, width: i32, height: i32) { if width < 0 || height < 0 { return self.webgl_error(InvalidValue) } self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::Scissor(x, y, width, height))) .unwrap() } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn StencilFunc(&self, func: u32, ref_: i32, mask: u32) { match func { constants::NEVER | constants::LESS | constants::EQUAL | constants::LEQUAL | constants::GREATER | constants::NOTEQUAL | constants::GEQUAL | constants::ALWAYS => self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::StencilFunc(func, ref_, mask))) .unwrap(), _ => self.webgl_error(InvalidEnum), } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn StencilFuncSeparate(&self, face: u32, func: u32, ref_: i32, mask: u32) { match face { constants::FRONT | constants::BACK | constants::FRONT_AND_BACK => (), _ => return self.webgl_error(InvalidEnum), } match func { constants::NEVER | constants::LESS | constants::EQUAL | constants::LEQUAL | constants::GREATER | constants::NOTEQUAL | constants::GEQUAL | constants::ALWAYS => self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::StencilFuncSeparate(face, func, ref_, mask))) .unwrap(), _ => self.webgl_error(InvalidEnum), } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn StencilMask(&self, mask: u32) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::StencilMask(mask))) .unwrap() } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn StencilMaskSeparate(&self, face: u32, mask: u32) { match face { constants::FRONT | constants::BACK | constants::FRONT_AND_BACK => self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::StencilMaskSeparate(face, mask))) .unwrap(), _ => return self.webgl_error(InvalidEnum), } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn StencilOp(&self, fail: u32, zfail: u32, zpass: u32) { if self.validate_stencil_actions(fail) && self.validate_stencil_actions(zfail) && self.validate_stencil_actions(zpass) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::StencilOp(fail, zfail, zpass))) .unwrap() } else { self.webgl_error(InvalidEnum) } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.3 fn StencilOpSeparate(&self, face: u32, fail: u32, zfail: u32, zpass: u32) { match face { constants::FRONT | constants::BACK | constants::FRONT_AND_BACK => (), _ => return self.webgl_error(InvalidEnum), } if self.validate_stencil_actions(fail) && self.validate_stencil_actions(zfail) && self.validate_stencil_actions(zpass) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::StencilOpSeparate(face, fail, zfail, zpass))) .unwrap() } else { self.webgl_error(InvalidEnum) } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.9 fn LinkProgram(&self, program: Option<&WebGLProgram>) { if let Some(program) = program { if let Err(e) = program.link() { self.webgl_error(e); } } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.9 fn ShaderSource(&self, shader: Option<&WebGLShader>, source: DOMString) { if let Some(shader) = shader { shader.set_source(source) } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.9 fn GetShaderSource(&self, shader: Option<&WebGLShader>) -> Option<DOMString> { shader.and_then(|s| s.source()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 fn Uniform1f(&self, uniform: Option<&WebGLUniformLocation>, val: f32) { if self.validate_uniform_parameters(uniform, UniformSetterType::Float, &[val]) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::Uniform1f(uniform.unwrap().id(), val))) .unwrap() } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 fn Uniform1i(&self, uniform: Option<&WebGLUniformLocation>, val: i32) { if self.validate_uniform_parameters(uniform, UniformSetterType::Int, &[val]) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::Uniform1i(uniform.unwrap().id(), val))) .unwrap() } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 #[allow(unsafe_code)] fn Uniform1iv(&self, cx: *mut JSContext, uniform: Option<&WebGLUniformLocation>, data: *mut JSObject) -> Fallible<()> { assert!(!data.is_null()); let data_vec = try!(unsafe { typed_array_or_sequence_to_vec::<i32>(cx, data, ConversionBehavior::Default) }); if self.validate_uniform_parameters(uniform, UniformSetterType::Int, &data_vec) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::Uniform1iv(uniform.unwrap().id(), data_vec))) .unwrap() } Ok(()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 #[allow(unsafe_code)] fn Uniform1fv(&self, cx: *mut JSContext, uniform: Option<&WebGLUniformLocation>, data: *mut JSObject) -> Fallible<()> { assert!(!data.is_null()); let data_vec = try!(unsafe { typed_array_or_sequence_to_vec::<f32>(cx, data, ()) }); if self.validate_uniform_parameters(uniform, UniformSetterType::Float, &data_vec) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::Uniform1fv(uniform.unwrap().id(), data_vec))) .unwrap() } Ok(()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 fn Uniform2f(&self, uniform: Option<&WebGLUniformLocation>, x: f32, y: f32) { if self.validate_uniform_parameters(uniform, UniformSetterType::FloatVec2, &[x, y]) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::Uniform2f(uniform.unwrap().id(), x, y))) .unwrap() } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 #[allow(unsafe_code)] fn Uniform2fv(&self, cx: *mut JSContext, uniform: Option<&WebGLUniformLocation>, data: *mut JSObject) -> Fallible<()> { assert!(!data.is_null()); let data_vec = try!(unsafe { typed_array_or_sequence_to_vec::<f32>(cx, data, ()) }); if self.validate_uniform_parameters(uniform, UniformSetterType::FloatVec2, &data_vec) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::Uniform2fv(uniform.unwrap().id(), data_vec))) .unwrap() } Ok(()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 fn Uniform2i(&self, uniform: Option<&WebGLUniformLocation>, x: i32, y: i32) { if self.validate_uniform_parameters(uniform, UniformSetterType::IntVec2, &[x, y]) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::Uniform2i(uniform.unwrap().id(), x, y))) .unwrap() } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 #[allow(unsafe_code)] fn Uniform2iv(&self, cx: *mut JSContext, uniform: Option<&WebGLUniformLocation>, data: *mut JSObject) -> Fallible<()> { assert!(!data.is_null()); let data_vec = try!(unsafe { typed_array_or_sequence_to_vec::<i32>(cx, data, ConversionBehavior::Default) }); if self.validate_uniform_parameters(uniform, UniformSetterType::IntVec2, &data_vec) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::Uniform2iv(uniform.unwrap().id(), data_vec))) .unwrap() } Ok(()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 fn Uniform3f(&self, uniform: Option<&WebGLUniformLocation>, x: f32, y: f32, z: f32) { if self.validate_uniform_parameters(uniform, UniformSetterType::FloatVec3, &[x, y, z]) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::Uniform3f(uniform.unwrap().id(), x, y, z))) .unwrap() } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 #[allow(unsafe_code)] fn Uniform3fv(&self, cx: *mut JSContext, uniform: Option<&WebGLUniformLocation>, data: *mut JSObject) -> Fallible<()> { assert!(!data.is_null()); let data_vec = try!(unsafe { typed_array_or_sequence_to_vec::<f32>(cx, data, ()) }); if self.validate_uniform_parameters(uniform, UniformSetterType::FloatVec3, &data_vec) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::Uniform3fv(uniform.unwrap().id(), data_vec))) .unwrap() } Ok(()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 fn Uniform3i(&self, uniform: Option<&WebGLUniformLocation>, x: i32, y: i32, z: i32) { if self.validate_uniform_parameters(uniform, UniformSetterType::IntVec3, &[x, y, z]) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::Uniform3i(uniform.unwrap().id(), x, y, z))) .unwrap() } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 #[allow(unsafe_code)] fn Uniform3iv(&self, cx: *mut JSContext, uniform: Option<&WebGLUniformLocation>, data: *mut JSObject) -> Fallible<()> { assert!(!data.is_null()); let data_vec = try!(unsafe { typed_array_or_sequence_to_vec::<i32>(cx, data, ConversionBehavior::Default) }); if self.validate_uniform_parameters(uniform, UniformSetterType::IntVec3, &data_vec) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::Uniform3iv(uniform.unwrap().id(), data_vec))) .unwrap() } Ok(()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 fn Uniform4i(&self, uniform: Option<&WebGLUniformLocation>, x: i32, y: i32, z: i32, w: i32) { if self.validate_uniform_parameters(uniform, UniformSetterType::IntVec4, &[x, y, z, w]) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::Uniform4i(uniform.unwrap().id(), x, y, z, w))) .unwrap() } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 #[allow(unsafe_code)] fn Uniform4iv(&self, cx: *mut JSContext, uniform: Option<&WebGLUniformLocation>, data: *mut JSObject) -> Fallible<()> { assert!(!data.is_null()); let data_vec = try!(unsafe { typed_array_or_sequence_to_vec::<i32>(cx, data, ConversionBehavior::Default) }); if self.validate_uniform_parameters(uniform, UniformSetterType::IntVec4, &data_vec) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::Uniform4iv(uniform.unwrap().id(), data_vec))) .unwrap() } Ok(()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 fn Uniform4f(&self, uniform: Option<&WebGLUniformLocation>, x: f32, y: f32, z: f32, w: f32) { if self.validate_uniform_parameters(uniform, UniformSetterType::FloatVec4, &[x, y, z, w]) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::Uniform4f(uniform.unwrap().id(), x, y, z, w))) .unwrap() } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 #[allow(unsafe_code)] fn Uniform4fv(&self, cx: *mut JSContext, uniform: Option<&WebGLUniformLocation>, data: *mut JSObject) -> Fallible<()> { assert!(!data.is_null()); let data_vec = try!(unsafe { typed_array_or_sequence_to_vec::<f32>(cx, data, ()) }); if self.validate_uniform_parameters(uniform, UniformSetterType::FloatVec4, &data_vec) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::Uniform4fv(uniform.unwrap().id(), data_vec))) .unwrap() } Ok(()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 #[allow(unsafe_code)] fn UniformMatrix2fv(&self, cx: *mut JSContext, uniform: Option<&WebGLUniformLocation>, transpose: bool, data: *mut JSObject) -> Fallible<()> { assert!(!data.is_null()); let data_vec = try!(unsafe { typed_array_or_sequence_to_vec::<f32>(cx, data, ()) }); if self.validate_uniform_parameters(uniform, UniformSetterType::FloatMat2, &data_vec) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::UniformMatrix2fv(uniform.unwrap().id(), transpose, data_vec))) .unwrap() } Ok(()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 #[allow(unsafe_code)] fn UniformMatrix3fv(&self, cx: *mut JSContext, uniform: Option<&WebGLUniformLocation>, transpose: bool, data: *mut JSObject) -> Fallible<()> { assert!(!data.is_null()); let data_vec = try!(unsafe { typed_array_or_sequence_to_vec::<f32>(cx, data, ()) }); if self.validate_uniform_parameters(uniform, UniformSetterType::FloatMat3, &data_vec) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::UniformMatrix3fv(uniform.unwrap().id(), transpose, data_vec))) .unwrap() } Ok(()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 #[allow(unsafe_code)] fn UniformMatrix4fv(&self, cx: *mut JSContext, uniform: Option<&WebGLUniformLocation>, transpose: bool, data: *mut JSObject) -> Fallible<()> { assert!(!data.is_null()); let data_vec = try!(unsafe { typed_array_or_sequence_to_vec::<f32>(cx, data, ()) }); if self.validate_uniform_parameters(uniform, UniformSetterType::FloatMat4, &data_vec) { self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::UniformMatrix4fv(uniform.unwrap().id(), transpose, data_vec))) .unwrap() } Ok(()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.9 fn UseProgram(&self, program: Option<&WebGLProgram>) { if let Some(program) = program { match program.use_program() { Ok(()) => self.current_program.set(Some(program)), Err(e) => self.webgl_error(e), } } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.9 fn ValidateProgram(&self, program: Option<&WebGLProgram>) { if let Some(program) = program { if let Err(e) = program.validate() { self.webgl_error(e); } } } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 fn VertexAttrib1f(&self, indx: u32, x: f32) { self.vertex_attrib(indx, x, 0f32, 0f32, 1f32) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 #[allow(unsafe_code)] fn VertexAttrib1fv(&self, cx: *mut JSContext, indx: u32, data: *mut JSObject) -> Fallible<()> { assert!(!data.is_null()); let data_vec = try!(unsafe { typed_array_or_sequence_to_vec::<f32>(cx, data, ()) }); if data_vec.len() < 1 { return Ok(self.webgl_error(InvalidOperation)); } self.vertex_attrib(indx, data_vec[0], 0f32, 0f32, 1f32); Ok(()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 fn VertexAttrib2f(&self, indx: u32, x: f32, y: f32) { self.vertex_attrib(indx, x, y, 0f32, 1f32) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 #[allow(unsafe_code)] fn VertexAttrib2fv(&self, cx: *mut JSContext, indx: u32, data: *mut JSObject) -> Fallible<()> { assert!(!data.is_null()); let data_vec = try!(unsafe { typed_array_or_sequence_to_vec::<f32>(cx, data, ()) }); if data_vec.len() < 2 { return Ok(self.webgl_error(InvalidOperation)); } self.vertex_attrib(indx, data_vec[0], data_vec[1], 0f32, 1f32); Ok(()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 fn VertexAttrib3f(&self, indx: u32, x: f32, y: f32, z: f32) { self.vertex_attrib(indx, x, y, z, 1f32) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 #[allow(unsafe_code)] fn VertexAttrib3fv(&self, cx: *mut JSContext, indx: u32, data: *mut JSObject) -> Fallible<()> { assert!(!data.is_null()); let data_vec = try!(unsafe { typed_array_or_sequence_to_vec::<f32>(cx, data, ()) }); if data_vec.len() < 3 { return Ok(self.webgl_error(InvalidOperation)); } self.vertex_attrib(indx, data_vec[0], data_vec[1], data_vec[2], 1f32); Ok(()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 fn VertexAttrib4f(&self, indx: u32, x: f32, y: f32, z: f32, w: f32) { self.vertex_attrib(indx, x, y, z, w) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 #[allow(unsafe_code)] fn VertexAttrib4fv(&self, cx: *mut JSContext, indx: u32, data: *mut JSObject) -> Fallible<()> { assert!(!data.is_null()); let data_vec = try!(unsafe { typed_array_or_sequence_to_vec::<f32>(cx, data, ()) }); if data_vec.len() < 4 { return Ok(self.webgl_error(InvalidOperation)); } self.vertex_attrib(indx, data_vec[0], data_vec[1], data_vec[2], data_vec[3]); Ok(()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.10 fn VertexAttribPointer(&self, attrib_id: u32, size: i32, data_type: u32, normalized: bool, stride: i32, offset: i64) { if attrib_id > self.limits.max_vertex_attribs { return self.webgl_error(InvalidValue); } // GLES spec: If offset or stride is negative, an INVALID_VALUE error will be generated // WebGL spec: the maximum supported stride is 255 if stride < 0 || stride > 255 || offset < 0 { return self.webgl_error(InvalidValue); } if size < 1 || size > 4 { return self.webgl_error(InvalidValue); } if self.bound_buffer_array.get().is_none() { return self.webgl_error(InvalidOperation); } // stride and offset must be multiple of data_type match data_type { constants::BYTE | constants::UNSIGNED_BYTE => {}, constants::SHORT | constants::UNSIGNED_SHORT => { if offset % 2 > 0 || stride % 2 > 0 { return self.webgl_error(InvalidOperation); } }, constants::FLOAT => { if offset % 4 > 0 || stride % 4 > 0 { return self.webgl_error(InvalidOperation); } }, _ => return self.webgl_error(InvalidEnum), } let msg = CanvasMsg::WebGL( WebGLCommand::VertexAttribPointer(attrib_id, size, data_type, normalized, stride, offset as u32)); self.ipc_renderer.send(msg).unwrap() } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.4 fn Viewport(&self, x: i32, y: i32, width: i32, height: i32) { if width < 0 || height < 0 { return self.webgl_error(InvalidValue) } self.ipc_renderer .send(CanvasMsg::WebGL(WebGLCommand::Viewport(x, y, width, height))) .unwrap() } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.8 #[allow(unsafe_code)] fn TexImage2D(&self, _cx: *mut JSContext, target: u32, level: i32, internal_format: u32, width: i32, height: i32, border: i32, format: u32, data_type: u32, data_ptr: *mut JSObject) -> Fallible<()> { let data = if data_ptr.is_null() { None } else { Some(try!(unsafe { fallible_array_buffer_view_to_vec::<u8>(data_ptr) })) }; let validator = TexImage2DValidator::new(self, target, level, internal_format, width, height, border, format, data_type); let TexImage2DValidatorResult { texture, target, width, height, level, border, format, data_type, } = match validator.validate() { Ok(result) => result, Err(_) => return Ok(()), // NB: The validator sets the correct error for us. }; let expected_byte_length = match unsafe { self.validate_tex_image_2d_data(width, height, format, data_type, data_ptr) } { Ok(byte_length) => byte_length, Err(()) => return Ok(()), }; // If data is null, a buffer of sufficient size // initialized to 0 is passed. let buff = match data { None => vec![0u8; expected_byte_length as usize], Some(data) => data, }; // From the WebGL spec: // // "If pixels is non-null but its size is less than what // is required by the specified width, height, format, // type, and pixel storage parameters, generates an // INVALID_OPERATION error." if buff.len() < expected_byte_length as usize { return Ok(self.webgl_error(InvalidOperation)); } self.tex_image_2d(texture, target, data_type, format, level, width, height, border, buff); Ok(()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.8 fn TexImage2D_(&self, target: u32, level: i32, internal_format: u32, format: u32, data_type: u32, source: Option<ImageDataOrHTMLImageElementOrHTMLCanvasElementOrHTMLVideoElement>) -> Fallible<()> { // Get pixels from image source let (pixels, size) = match self.get_image_pixels(source) { Ok((pixels, size)) => (pixels, size), Err(_) => return Ok(()), }; let validator = TexImage2DValidator::new(self, target, level, internal_format, size.width, size.height, 0, format, data_type); let TexImage2DValidatorResult { texture, target, width, height, level, border, format, data_type, } = match validator.validate() { Ok(result) => result, Err(_) => return Ok(()), // NB: The validator sets the correct error for us. }; self.tex_image_2d(texture, target, data_type, format, level, width, height, border, pixels); Ok(()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.8 #[allow(unsafe_code)] fn TexSubImage2D(&self, _cx: *mut JSContext, target: u32, level: i32, xoffset: i32, yoffset: i32, width: i32, height: i32, format: u32, data_type: u32, data_ptr: *mut JSObject) -> Fallible<()> { let data = if data_ptr.is_null() { None } else { Some(try!(unsafe { fallible_array_buffer_view_to_vec::<u8>(data_ptr) })) }; let validator = TexImage2DValidator::new(self, target, level, format, width, height, 0, format, data_type); let TexImage2DValidatorResult { texture, target, width, height, level, format, data_type, .. } = match validator.validate() { Ok(result) => result, Err(_) => return Ok(()), // NB: The validator sets the correct error for us. }; let expected_byte_length = match unsafe { self.validate_tex_image_2d_data(width, height, format, data_type, data_ptr) } { Ok(byte_length) => byte_length, Err(()) => return Ok(()), }; // If data is null, a buffer of sufficient size // initialized to 0 is passed. let buff = match data { None => vec![0u8; expected_byte_length as usize], Some(data) => data, }; // From the WebGL spec: // // "If pixels is non-null but its size is less than what // is required by the specified width, height, format, // type, and pixel storage parameters, generates an // INVALID_OPERATION error." if buff.len() < expected_byte_length as usize { return Ok(self.webgl_error(InvalidOperation)); } self.tex_sub_image_2d(texture, target, level, xoffset, yoffset, width, height, format, data_type, buff); Ok(()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.8 fn TexSubImage2D_(&self, target: u32, level: i32, xoffset: i32, yoffset: i32, format: u32, data_type: u32, source: Option<ImageDataOrHTMLImageElementOrHTMLCanvasElementOrHTMLVideoElement>) -> Fallible<()> { let (pixels, size) = match self.get_image_pixels(source) { Ok((pixels, size)) => (pixels, size), Err(_) => return Ok(()), }; let validator = TexImage2DValidator::new(self, target, level, format, size.width, size.height, 0, format, data_type); let TexImage2DValidatorResult { texture, target, width, height, level, format, data_type, .. } = match validator.validate() { Ok(result) => result, Err(_) => return Ok(()), // NB: The validator sets the correct error for us. }; self.tex_sub_image_2d(texture, target, level, xoffset, yoffset, width, height, format, data_type, pixels); Ok(()) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.8 fn TexParameterf(&self, target: u32, name: u32, value: f32) { self.tex_parameter(target, name, TexParameterValue::Float(value)) } // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.14.8 fn TexParameteri(&self, target: u32, name: u32, value: i32) { self.tex_parameter(target, name, TexParameterValue::Int(value)) } fn CheckFramebufferStatus(&self, target: u32) -> u32 { // From the GLES 2.0.25 spec, 4.4 ("Framebuffer Objects"): // // "If target is not FRAMEBUFFER, INVALID_ENUM is // generated. If CheckFramebufferStatus generates an // error, 0 is returned." if target != constants::FRAMEBUFFER { self.webgl_error(InvalidEnum); return 0; } match self.bound_framebuffer.get() { Some(fb) => return fb.check_status(), None => return constants::FRAMEBUFFER_COMPLETE, } } fn RenderbufferStorage(&self, target: u32, internal_format: u32, width: i32, height: i32) { // From the GLES 2.0.25 spec: // // "target must be RENDERBUFFER." if target != constants::RENDERBUFFER { return self.webgl_error(InvalidOperation) } // From the GLES 2.0.25 spec: // // "If either width or height is greater than the value of // MAX_RENDERBUFFER_SIZE , the error INVALID_VALUE is // generated." // // and we have to throw out negative-size values as well just // like for TexImage. // // FIXME: Handle max_renderbuffer_size, which doesn't seem to // be in limits. if width < 0 || height < 0 { return self.webgl_error(InvalidValue); } match self.bound_renderbuffer.get() { Some(rb) => handle_potential_webgl_error!(self, rb.storage(internal_format, width, height)), None => self.webgl_error(InvalidOperation), }; // FIXME: We need to clear the renderbuffer before it can be // accessed. See https://github.com/servo/servo/issues/13710 } fn FramebufferRenderbuffer(&self, target: u32, attachment: u32, renderbuffertarget: u32, rb: Option<&WebGLRenderbuffer>) { if target != constants::FRAMEBUFFER || renderbuffertarget != constants::RENDERBUFFER { return self.webgl_error(InvalidEnum); } match self.bound_framebuffer.get() { Some(fb) => handle_potential_webgl_error!(self, fb.renderbuffer(attachment, rb)), None => self.webgl_error(InvalidOperation), }; } fn FramebufferTexture2D(&self, target: u32, attachment: u32, textarget: u32, texture: Option<&WebGLTexture>, level: i32) { if target != constants::FRAMEBUFFER { return self.webgl_error(InvalidEnum); } match self.bound_framebuffer.get() { Some(fb) => handle_potential_webgl_error!(self, fb.texture2d(attachment, textarget, texture, level)), None => self.webgl_error(InvalidOperation), }; } } pub trait LayoutCanvasWebGLRenderingContextHelpers { #[allow(unsafe_code)] unsafe fn get_ipc_renderer(&self) -> IpcSender<CanvasMsg>; } impl LayoutCanvasWebGLRenderingContextHelpers for LayoutJS<WebGLRenderingContext> { #[allow(unsafe_code)] unsafe fn get_ipc_renderer(&self) -> IpcSender<CanvasMsg> { (*self.unsafe_get()).ipc_renderer.clone() } } #[derive(Debug, PartialEq)] pub enum UniformSetterType { Int, IntVec2, IntVec3, IntVec4, Float, FloatVec2, FloatVec3, FloatVec4, FloatMat2, FloatMat3, FloatMat4, } impl UniformSetterType { pub fn element_count(&self) -> usize { match *self { UniformSetterType::Int => 1, UniformSetterType::IntVec2 => 2, UniformSetterType::IntVec3 => 3, UniformSetterType::IntVec4 => 4, UniformSetterType::Float => 1, UniformSetterType::FloatVec2 => 2, UniformSetterType::FloatVec3 => 3, UniformSetterType::FloatVec4 => 4, UniformSetterType::FloatMat2 => 4, UniformSetterType::FloatMat3 => 9, UniformSetterType::FloatMat4 => 16, } } pub fn is_compatible_with(&self, gl_type: u32) -> bool { gl_type == self.as_gl_constant() || match *self { // Sampler uniform variables have an index value (the index of the // texture), and as such they have to be set as ints UniformSetterType::Int => gl_type == constants::SAMPLER_2D || gl_type == constants::SAMPLER_CUBE, // Don't ask me why, but it seems we must allow setting bool // uniforms with uniform1f. // // See the WebGL conformance test // conformance/uniforms/gl-uniform-bool.html UniformSetterType::Float => gl_type == constants::BOOL, UniformSetterType::FloatVec2 => gl_type == constants::BOOL_VEC2, UniformSetterType::FloatVec3 => gl_type == constants::BOOL_VEC3, UniformSetterType::FloatVec4 => gl_type == constants::BOOL_VEC4, _ => false, } } fn as_gl_constant(&self) -> u32 { match *self { UniformSetterType::Int => constants::INT, UniformSetterType::IntVec2 => constants::INT_VEC2, UniformSetterType::IntVec3 => constants::INT_VEC3, UniformSetterType::IntVec4 => constants::INT_VEC4, UniformSetterType::Float => constants::FLOAT, UniformSetterType::FloatVec2 => constants::FLOAT_VEC2, UniformSetterType::FloatVec3 => constants::FLOAT_VEC3, UniformSetterType::FloatVec4 => constants::FLOAT_VEC4, UniformSetterType::FloatMat2 => constants::FLOAT_MAT2, UniformSetterType::FloatMat3 => constants::FLOAT_MAT3, UniformSetterType::FloatMat4 => constants::FLOAT_MAT4, } } }<|fim▁end|>
<|file_name|>config.go<|end_file_name|><|fim▁begin|>// Copyright 2022 Google LLC. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package alpha import ( "context" "crypto/sha256" "encoding/json" "fmt" "time" "google.golang.org/api/googleapi" "github.com/GoogleCloudPlatform/declarative-resource-client-library/dcl" ) type Config struct { SignIn *ConfigSignIn `json:"signIn"` Notification *ConfigNotification `json:"notification"` Quota *ConfigQuota `json:"quota"` Monitoring *ConfigMonitoring `json:"monitoring"` MultiTenant *ConfigMultiTenant `json:"multiTenant"` AuthorizedDomains []string `json:"authorizedDomains"` Subtype *ConfigSubtypeEnum `json:"subtype"` Client *ConfigClient `json:"client"` Mfa *ConfigMfa `json:"mfa"` BlockingFunctions *ConfigBlockingFunctions `json:"blockingFunctions"` Project *string `json:"project"` } func (r *Config) String() string { return dcl.SprintResource(r) } // The enum ConfigSignInEmailHashConfigAlgorithmEnum. type ConfigSignInEmailHashConfigAlgorithmEnum string // ConfigSignInEmailHashConfigAlgorithmEnumRef returns a *ConfigSignInEmailHashConfigAlgorithmEnum with the value of string s // If the empty string is provided, nil is returned. func ConfigSignInEmailHashConfigAlgorithmEnumRef(s string) *ConfigSignInEmailHashConfigAlgorithmEnum { v := ConfigSignInEmailHashConfigAlgorithmEnum(s) return &v } func (v ConfigSignInEmailHashConfigAlgorithmEnum) Validate() error { if string(v) == "" { // Empty enum is okay. return nil } for _, s := range []string{"HASH_ALGORITHM_UNSPECIFIED", "HMAC_SHA256", "HMAC_SHA1", "HMAC_MD5", "SCRYPT", "PBKDF_SHA1", "MD5", "HMAC_SHA512", "SHA1", "BCRYPT", "PBKDF2_SHA256", "SHA256", "SHA512", "STANDARD_SCRYPT"} { if string(v) == s { return nil } } return &dcl.EnumInvalidError{ Enum: "ConfigSignInEmailHashConfigAlgorithmEnum", Value: string(v), Valid: []string{}, } } // The enum ConfigSignInHashConfigAlgorithmEnum. type ConfigSignInHashConfigAlgorithmEnum string // ConfigSignInHashConfigAlgorithmEnumRef returns a *ConfigSignInHashConfigAlgorithmEnum with the value of string s // If the empty string is provided, nil is returned. func ConfigSignInHashConfigAlgorithmEnumRef(s string) *ConfigSignInHashConfigAlgorithmEnum { v := ConfigSignInHashConfigAlgorithmEnum(s) return &v } func (v ConfigSignInHashConfigAlgorithmEnum) Validate() error { if string(v) == "" { // Empty enum is okay. return nil } for _, s := range []string{"HASH_ALGORITHM_UNSPECIFIED", "HMAC_SHA256", "HMAC_SHA1", "HMAC_MD5", "SCRYPT", "PBKDF_SHA1", "MD5", "HMAC_SHA512", "SHA1", "BCRYPT", "PBKDF2_SHA256", "SHA256", "SHA512", "STANDARD_SCRYPT"} { if string(v) == s { return nil } } return &dcl.EnumInvalidError{ Enum: "ConfigSignInHashConfigAlgorithmEnum", Value: string(v), Valid: []string{}, } } // The enum ConfigNotificationSendEmailMethodEnum. type ConfigNotificationSendEmailMethodEnum string // ConfigNotificationSendEmailMethodEnumRef returns a *ConfigNotificationSendEmailMethodEnum with the value of string s // If the empty string is provided, nil is returned. func ConfigNotificationSendEmailMethodEnumRef(s string) *ConfigNotificationSendEmailMethodEnum { v := ConfigNotificationSendEmailMethodEnum(s) return &v } func (v ConfigNotificationSendEmailMethodEnum) Validate() error { if string(v) == "" { // Empty enum is okay. return nil } for _, s := range []string{"METHOD_UNSPECIFIED", "DEFAULT", "CUSTOM_SMTP"} { if string(v) == s { return nil } } return &dcl.EnumInvalidError{ Enum: "ConfigNotificationSendEmailMethodEnum", Value: string(v), Valid: []string{}, } } // The enum ConfigNotificationSendEmailSmtpSecurityModeEnum. type ConfigNotificationSendEmailSmtpSecurityModeEnum string // ConfigNotificationSendEmailSmtpSecurityModeEnumRef returns a *ConfigNotificationSendEmailSmtpSecurityModeEnum with the value of string s // If the empty string is provided, nil is returned. func ConfigNotificationSendEmailSmtpSecurityModeEnumRef(s string) *ConfigNotificationSendEmailSmtpSecurityModeEnum { v := ConfigNotificationSendEmailSmtpSecurityModeEnum(s) return &v } func (v ConfigNotificationSendEmailSmtpSecurityModeEnum) Validate() error { if string(v) == "" { // Empty enum is okay. return nil } for _, s := range []string{"SECURITY_MODE_UNSPECIFIED", "SSL", "START_TLS"} { if string(v) == s { return nil } } return &dcl.EnumInvalidError{ Enum: "ConfigNotificationSendEmailSmtpSecurityModeEnum", Value: string(v), Valid: []string{}, } } // The enum ConfigEmailTemplateBodyFormatEnum. type ConfigEmailTemplateBodyFormatEnum string // ConfigEmailTemplateBodyFormatEnumRef returns a *ConfigEmailTemplateBodyFormatEnum with the value of string s // If the empty string is provided, nil is returned. func ConfigEmailTemplateBodyFormatEnumRef(s string) *ConfigEmailTemplateBodyFormatEnum { v := ConfigEmailTemplateBodyFormatEnum(s) return &v } func (v ConfigEmailTemplateBodyFormatEnum) Validate() error { if string(v) == "" { // Empty enum is okay. return nil } for _, s := range []string{"BODY_FORMAT_UNSPECIFIED", "PLAIN_TEXT", "HTML"} { if string(v) == s { return nil } } return &dcl.EnumInvalidError{ Enum: "ConfigEmailTemplateBodyFormatEnum", Value: string(v), Valid: []string{}, } } // The enum ConfigNotificationSendEmailDnsInfoCustomDomainStateEnum. type ConfigNotificationSendEmailDnsInfoCustomDomainStateEnum string // ConfigNotificationSendEmailDnsInfoCustomDomainStateEnumRef returns a *ConfigNotificationSendEmailDnsInfoCustomDomainStateEnum with the value of string s // If the empty string is provided, nil is returned. func ConfigNotificationSendEmailDnsInfoCustomDomainStateEnumRef(s string) *ConfigNotificationSendEmailDnsInfoCustomDomainStateEnum { v := ConfigNotificationSendEmailDnsInfoCustomDomainStateEnum(s) return &v } func (v ConfigNotificationSendEmailDnsInfoCustomDomainStateEnum) Validate() error { if string(v) == "" { // Empty enum is okay. return nil } for _, s := range []string{"VERIFICATION_STATE_UNSPECIFIED", "NOT_STARTED", "IN_PROGRESS", "FAILED", "SUCCEEDED"} { if string(v) == s { return nil } } return &dcl.EnumInvalidError{ Enum: "ConfigNotificationSendEmailDnsInfoCustomDomainStateEnum", Value: string(v), Valid: []string{}, } } // The enum ConfigSubtypeEnum. type ConfigSubtypeEnum string // ConfigSubtypeEnumRef returns a *ConfigSubtypeEnum with the value of string s // If the empty string is provided, nil is returned. func ConfigSubtypeEnumRef(s string) *ConfigSubtypeEnum { v := ConfigSubtypeEnum(s) return &v } func (v ConfigSubtypeEnum) Validate() error { if string(v) == "" { // Empty enum is okay. return nil } for _, s := range []string{"SUBTYPE_UNSPECIFIED", "IDENTITY_PLATFORM", "FIREBASE_AUTH"} { if string(v) == s { return nil } } return &dcl.EnumInvalidError{ Enum: "ConfigSubtypeEnum", Value: string(v), Valid: []string{}, } } // The enum ConfigMfaStateEnum. type ConfigMfaStateEnum string // ConfigMfaStateEnumRef returns a *ConfigMfaStateEnum with the value of string s // If the empty string is provided, nil is returned. func ConfigMfaStateEnumRef(s string) *ConfigMfaStateEnum { v := ConfigMfaStateEnum(s) return &v } func (v ConfigMfaStateEnum) Validate() error { if string(v) == "" { // Empty enum is okay. return nil } for _, s := range []string{"STATE_UNSPECIFIED", "DISABLED", "ENABLED", "MANDATORY"} { if string(v) == s { return nil } } return &dcl.EnumInvalidError{ Enum: "ConfigMfaStateEnum", Value: string(v), Valid: []string{}, } } type ConfigSignIn struct { empty bool `json:"-"` Email *ConfigSignInEmail `json:"email"` PhoneNumber *ConfigSignInPhoneNumber `json:"phoneNumber"` Anonymous *ConfigSignInAnonymous `json:"anonymous"` AllowDuplicateEmails *bool `json:"allowDuplicateEmails"` HashConfig *ConfigSignInHashConfig `json:"hashConfig"` } type jsonConfigSignIn ConfigSignIn func (r *ConfigSignIn) UnmarshalJSON(data []byte) error { var res jsonConfigSignIn if err := json.Unmarshal(data, &res); err != nil { return err } var m map[string]interface{} json.Unmarshal(data, &m) if len(m) == 0 { *r = *EmptyConfigSignIn } else { r.Email = res.Email r.PhoneNumber = res.PhoneNumber r.Anonymous = res.Anonymous r.AllowDuplicateEmails = res.AllowDuplicateEmails r.HashConfig = res.HashConfig } return nil } // This object is used to assert a desired state where this ConfigSignIn is // empty. Go lacks global const objects, but this object should be treated // as one. Modifying this object will have undesirable results. var EmptyConfigSignIn *ConfigSignIn = &ConfigSignIn{empty: true} func (r *ConfigSignIn) Empty() bool { return r.empty } func (r *ConfigSignIn) String() string { return dcl.SprintResource(r) } func (r *ConfigSignIn) HashCode() string { // Placeholder for a more complex hash method that handles ordering, etc // Hash resource body for easy comparison later hash := sha256.New().Sum([]byte(r.String())) return fmt.Sprintf("%x", hash) } type ConfigSignInEmail struct { empty bool `json:"-"` Enabled *bool `json:"enabled"` PasswordRequired *bool `json:"passwordRequired"` HashConfig *ConfigSignInEmailHashConfig `json:"hashConfig"` } type jsonConfigSignInEmail ConfigSignInEmail func (r *ConfigSignInEmail) UnmarshalJSON(data []byte) error { var res jsonConfigSignInEmail if err := json.Unmarshal(data, &res); err != nil { return err } var m map[string]interface{} json.Unmarshal(data, &m) if len(m) == 0 { *r = *EmptyConfigSignInEmail } else { r.Enabled = res.Enabled r.PasswordRequired = res.PasswordRequired r.HashConfig = res.HashConfig } return nil } // This object is used to assert a desired state where this ConfigSignInEmail is // empty. Go lacks global const objects, but this object should be treated // as one. Modifying this object will have undesirable results. var EmptyConfigSignInEmail *ConfigSignInEmail = &ConfigSignInEmail{empty: true} func (r *ConfigSignInEmail) Empty() bool { return r.empty } func (r *ConfigSignInEmail) String() string { return dcl.SprintResource(r) } func (r *ConfigSignInEmail) HashCode() string { // Placeholder for a more complex hash method that handles ordering, etc // Hash resource body for easy comparison later hash := sha256.New().Sum([]byte(r.String())) return fmt.Sprintf("%x", hash) } type ConfigSignInEmailHashConfig struct { empty bool `json:"-"` Algorithm *ConfigSignInEmailHashConfigAlgorithmEnum `json:"algorithm"` SignerKey *string `json:"signerKey"` SaltSeparator *string `json:"saltSeparator"` Rounds *int64 `json:"rounds"` MemoryCost *int64 `json:"memoryCost"` } type jsonConfigSignInEmailHashConfig ConfigSignInEmailHashConfig func (r *ConfigSignInEmailHashConfig) UnmarshalJSON(data []byte) error { var res jsonConfigSignInEmailHashConfig if err := json.Unmarshal(data, &res); err != nil { return err } var m map[string]interface{} json.Unmarshal(data, &m) if len(m) == 0 { *r = *EmptyConfigSignInEmailHashConfig } else { r.Algorithm = res.Algorithm r.SignerKey = res.SignerKey r.SaltSeparator = res.SaltSeparator r.Rounds = res.Rounds r.MemoryCost = res.MemoryCost } return nil } // This object is used to assert a desired state where this ConfigSignInEmailHashConfig is // empty. Go lacks global const objects, but this object should be treated // as one. Modifying this object will have undesirable results. var EmptyConfigSignInEmailHashConfig *ConfigSignInEmailHashConfig = &ConfigSignInEmailHashConfig{empty: true} func (r *ConfigSignInEmailHashConfig) Empty() bool { return r.empty } func (r *ConfigSignInEmailHashConfig) String() string { return dcl.SprintResource(r) } func (r *ConfigSignInEmailHashConfig) HashCode() string { // Placeholder for a more complex hash method that handles ordering, etc // Hash resource body for easy comparison later hash := sha256.New().Sum([]byte(r.String())) return fmt.Sprintf("%x", hash) } type ConfigSignInPhoneNumber struct { empty bool `json:"-"` Enabled *bool `json:"enabled"` TestPhoneNumbers map[string]string `json:"testPhoneNumbers"` } type jsonConfigSignInPhoneNumber ConfigSignInPhoneNumber func (r *ConfigSignInPhoneNumber) UnmarshalJSON(data []byte) error { var res jsonConfigSignInPhoneNumber if err := json.Unmarshal(data, &res); err != nil { return err } var m map[string]interface{} json.Unmarshal(data, &m) if len(m) == 0 { *r = *EmptyConfigSignInPhoneNumber } else { r.Enabled = res.Enabled r.TestPhoneNumbers = res.TestPhoneNumbers } return nil } // This object is used to assert a desired state where this ConfigSignInPhoneNumber is // empty. Go lacks global const objects, but this object should be treated // as one. Modifying this object will have undesirable results. var EmptyConfigSignInPhoneNumber *ConfigSignInPhoneNumber = &ConfigSignInPhoneNumber{empty: true} func (r *ConfigSignInPhoneNumber) Empty() bool { return r.empty } func (r *ConfigSignInPhoneNumber) String() string { return dcl.SprintResource(r) } func (r *ConfigSignInPhoneNumber) HashCode() string { // Placeholder for a more complex hash method that handles ordering, etc // Hash resource body for easy comparison later hash := sha256.New().Sum([]byte(r.String())) return fmt.Sprintf("%x", hash) } type ConfigSignInAnonymous struct { empty bool `json:"-"` Enabled *bool `json:"enabled"` } type jsonConfigSignInAnonymous ConfigSignInAnonymous func (r *ConfigSignInAnonymous) UnmarshalJSON(data []byte) error { var res jsonConfigSignInAnonymous if err := json.Unmarshal(data, &res); err != nil { return err } var m map[string]interface{} json.Unmarshal(data, &m) if len(m) == 0 { *r = *EmptyConfigSignInAnonymous } else { r.Enabled = res.Enabled } return nil } // This object is used to assert a desired state where this ConfigSignInAnonymous is // empty. Go lacks global const objects, but this object should be treated // as one. Modifying this object will have undesirable results. var EmptyConfigSignInAnonymous *ConfigSignInAnonymous = &ConfigSignInAnonymous{empty: true} func (r *ConfigSignInAnonymous) Empty() bool { return r.empty } func (r *ConfigSignInAnonymous) String() string { return dcl.SprintResource(r) } func (r *ConfigSignInAnonymous) HashCode() string { // Placeholder for a more complex hash method that handles ordering, etc // Hash resource body for easy comparison later hash := sha256.New().Sum([]byte(r.String())) return fmt.Sprintf("%x", hash) } type ConfigSignInHashConfig struct { empty bool `json:"-"` Algorithm *ConfigSignInHashConfigAlgorithmEnum `json:"algorithm"` SignerKey *string `json:"signerKey"` SaltSeparator *string `json:"saltSeparator"` Rounds *int64 `json:"rounds"` MemoryCost *int64 `json:"memoryCost"` } type jsonConfigSignInHashConfig ConfigSignInHashConfig func (r *ConfigSignInHashConfig) UnmarshalJSON(data []byte) error { var res jsonConfigSignInHashConfig if err := json.Unmarshal(data, &res); err != nil { return err } var m map[string]interface{} json.Unmarshal(data, &m) if len(m) == 0 { *r = *EmptyConfigSignInHashConfig } else { r.Algorithm = res.Algorithm r.SignerKey = res.SignerKey r.SaltSeparator = res.SaltSeparator r.Rounds = res.Rounds r.MemoryCost = res.MemoryCost } return nil } // This object is used to assert a desired state where this ConfigSignInHashConfig is // empty. Go lacks global const objects, but this object should be treated // as one. Modifying this object will have undesirable results. var EmptyConfigSignInHashConfig *ConfigSignInHashConfig = &ConfigSignInHashConfig{empty: true} func (r *ConfigSignInHashConfig) Empty() bool { return r.empty } func (r *ConfigSignInHashConfig) String() string { return dcl.SprintResource(r) } func (r *ConfigSignInHashConfig) HashCode() string { // Placeholder for a more complex hash method that handles ordering, etc // Hash resource body for easy comparison later hash := sha256.New().Sum([]byte(r.String())) return fmt.Sprintf("%x", hash) } type ConfigNotification struct { empty bool `json:"-"` SendEmail *ConfigNotificationSendEmail `json:"sendEmail"` SendSms *ConfigNotificationSendSms `json:"sendSms"` DefaultLocale *string `json:"defaultLocale"` } type jsonConfigNotification ConfigNotification func (r *ConfigNotification) UnmarshalJSON(data []byte) error { var res jsonConfigNotification if err := json.Unmarshal(data, &res); err != nil { return err } var m map[string]interface{} json.Unmarshal(data, &m) if len(m) == 0 { *r = *EmptyConfigNotification } else { r.SendEmail = res.SendEmail r.SendSms = res.SendSms r.DefaultLocale = res.DefaultLocale } return nil } // This object is used to assert a desired state where this ConfigNotification is // empty. Go lacks global const objects, but this object should be treated // as one. Modifying this object will have undesirable results. var EmptyConfigNotification *ConfigNotification = &ConfigNotification{empty: true} func (r *ConfigNotification) Empty() bool { return r.empty } func (r *ConfigNotification) String() string { return dcl.SprintResource(r) } func (r *ConfigNotification) HashCode() string { // Placeholder for a more complex hash method that handles ordering, etc // Hash resource body for easy comparison later hash := sha256.New().Sum([]byte(r.String())) return fmt.Sprintf("%x", hash) } type ConfigNotificationSendEmail struct { empty bool `json:"-"` Method *ConfigNotificationSendEmailMethodEnum `json:"method"` Smtp *ConfigNotificationSendEmailSmtp `json:"smtp"` ResetPasswordTemplate *ConfigEmailTemplate `json:"resetPasswordTemplate"` VerifyEmailTemplate *ConfigEmailTemplate `json:"verifyEmailTemplate"` ChangeEmailTemplate *ConfigEmailTemplate `json:"changeEmailTemplate"` CallbackUri *string `json:"callbackUri"` DnsInfo *ConfigNotificationSendEmailDnsInfo `json:"dnsInfo"` RevertSecondFactorAdditionTemplate *ConfigEmailTemplate `json:"revertSecondFactorAdditionTemplate"` } type jsonConfigNotificationSendEmail ConfigNotificationSendEmail func (r *ConfigNotificationSendEmail) UnmarshalJSON(data []byte) error { var res jsonConfigNotificationSendEmail if err := json.Unmarshal(data, &res); err != nil { return err } var m map[string]interface{} json.Unmarshal(data, &m) if len(m) == 0 { *r = *EmptyConfigNotificationSendEmail } else { r.Method = res.Method r.Smtp = res.Smtp r.ResetPasswordTemplate = res.ResetPasswordTemplate r.VerifyEmailTemplate = res.VerifyEmailTemplate r.ChangeEmailTemplate = res.ChangeEmailTemplate r.CallbackUri = res.CallbackUri r.DnsInfo = res.DnsInfo r.RevertSecondFactorAdditionTemplate = res.RevertSecondFactorAdditionTemplate } return nil } // This object is used to assert a desired state where this ConfigNotificationSendEmail is // empty. Go lacks global const objects, but this object should be treated // as one. Modifying this object will have undesirable results. var EmptyConfigNotificationSendEmail *ConfigNotificationSendEmail = &ConfigNotificationSendEmail{empty: true} func (r *ConfigNotificationSendEmail) Empty() bool { return r.empty } func (r *ConfigNotificationSendEmail) String() string { return dcl.SprintResource(r) } func (r *ConfigNotificationSendEmail) HashCode() string { // Placeholder for a more complex hash method that handles ordering, etc // Hash resource body for easy comparison later hash := sha256.New().Sum([]byte(r.String())) return fmt.Sprintf("%x", hash) } type ConfigNotificationSendEmailSmtp struct { empty bool `json:"-"` SenderEmail *string `json:"senderEmail"` Host *string `json:"host"` Port *int64 `json:"port"` Username *string `json:"username"` Password *string `json:"password"` SecurityMode *ConfigNotificationSendEmailSmtpSecurityModeEnum `json:"securityMode"` } type jsonConfigNotificationSendEmailSmtp ConfigNotificationSendEmailSmtp func (r *ConfigNotificationSendEmailSmtp) UnmarshalJSON(data []byte) error { var res jsonConfigNotificationSendEmailSmtp if err := json.Unmarshal(data, &res); err != nil { return err } var m map[string]interface{} json.Unmarshal(data, &m) if len(m) == 0 { *r = *EmptyConfigNotificationSendEmailSmtp } else { r.SenderEmail = res.SenderEmail r.Host = res.Host r.Port = res.Port r.Username = res.Username r.Password = res.Password r.SecurityMode = res.SecurityMode } return nil } // This object is used to assert a desired state where this ConfigNotificationSendEmailSmtp is // empty. Go lacks global const objects, but this object should be treated // as one. Modifying this object will have undesirable results. var EmptyConfigNotificationSendEmailSmtp *ConfigNotificationSendEmailSmtp = &ConfigNotificationSendEmailSmtp{empty: true} func (r *ConfigNotificationSendEmailSmtp) Empty() bool { return r.empty } func (r *ConfigNotificationSendEmailSmtp) String() string { return dcl.SprintResource(r) } func (r *ConfigNotificationSendEmailSmtp) HashCode() string { // Placeholder for a more complex hash method that handles ordering, etc // Hash resource body for easy comparison later hash := sha256.New().Sum([]byte(r.String())) return fmt.Sprintf("%x", hash) } type ConfigEmailTemplate struct { empty bool `json:"-"` SenderLocalPart *string `json:"senderLocalPart"` Subject *string `json:"subject"` SenderDisplayName *string `json:"senderDisplayName"` Body *string `json:"body"` BodyFormat *ConfigEmailTemplateBodyFormatEnum `json:"bodyFormat"` ReplyTo *string `json:"replyTo"` Customized *bool `json:"customized"` } type jsonConfigEmailTemplate ConfigEmailTemplate func (r *ConfigEmailTemplate) UnmarshalJSON(data []byte) error { var res jsonConfigEmailTemplate if err := json.Unmarshal(data, &res); err != nil { return err } var m map[string]interface{} json.Unmarshal(data, &m) if len(m) == 0 { *r = *EmptyConfigEmailTemplate } else { r.SenderLocalPart = res.SenderLocalPart r.Subject = res.Subject r.SenderDisplayName = res.SenderDisplayName r.Body = res.Body r.BodyFormat = res.BodyFormat r.ReplyTo = res.ReplyTo r.Customized = res.Customized } return nil } // This object is used to assert a desired state where this ConfigEmailTemplate is // empty. Go lacks global const objects, but this object should be treated // as one. Modifying this object will have undesirable results. var EmptyConfigEmailTemplate *ConfigEmailTemplate = &ConfigEmailTemplate{empty: true} func (r *ConfigEmailTemplate) Empty() bool { return r.empty } func (r *ConfigEmailTemplate) String() string { return dcl.SprintResource(r) } func (r *ConfigEmailTemplate) HashCode() string { // Placeholder for a more complex hash method that handles ordering, etc // Hash resource body for easy comparison later hash := sha256.New().Sum([]byte(r.String())) return fmt.Sprintf("%x", hash) } type ConfigNotificationSendEmailDnsInfo struct { empty bool `json:"-"` CustomDomain *string `json:"customDomain"` UseCustomDomain *bool `json:"useCustomDomain"` PendingCustomDomain *string `json:"pendingCustomDomain"` CustomDomainState *ConfigNotificationSendEmailDnsInfoCustomDomainStateEnum `json:"customDomainState"` DomainVerificationRequestTime *string `json:"domainVerificationRequestTime"` } type jsonConfigNotificationSendEmailDnsInfo ConfigNotificationSendEmailDnsInfo func (r *ConfigNotificationSendEmailDnsInfo) UnmarshalJSON(data []byte) error { var res jsonConfigNotificationSendEmailDnsInfo if err := json.Unmarshal(data, &res); err != nil { return err } var m map[string]interface{} json.Unmarshal(data, &m) if len(m) == 0 { *r = *EmptyConfigNotificationSendEmailDnsInfo } else { r.CustomDomain = res.CustomDomain r.UseCustomDomain = res.UseCustomDomain r.PendingCustomDomain = res.PendingCustomDomain r.CustomDomainState = res.CustomDomainState r.DomainVerificationRequestTime = res.DomainVerificationRequestTime } return nil } // This object is used to assert a desired state where this ConfigNotificationSendEmailDnsInfo is // empty. Go lacks global const objects, but this object should be treated // as one. Modifying this object will have undesirable results. var EmptyConfigNotificationSendEmailDnsInfo *ConfigNotificationSendEmailDnsInfo = &ConfigNotificationSendEmailDnsInfo{empty: true} func (r *ConfigNotificationSendEmailDnsInfo) Empty() bool { return r.empty } func (r *ConfigNotificationSendEmailDnsInfo) String() string { return dcl.SprintResource(r) } func (r *ConfigNotificationSendEmailDnsInfo) HashCode() string { // Placeholder for a more complex hash method that handles ordering, etc // Hash resource body for easy comparison later hash := sha256.New().Sum([]byte(r.String())) return fmt.Sprintf("%x", hash) } type ConfigNotificationSendSms struct { empty bool `json:"-"` UseDeviceLocale *bool `json:"useDeviceLocale"` SmsTemplate *ConfigNotificationSendSmsSmsTemplate `json:"smsTemplate"` } type jsonConfigNotificationSendSms ConfigNotificationSendSms func (r *ConfigNotificationSendSms) UnmarshalJSON(data []byte) error { var res jsonConfigNotificationSendSms if err := json.Unmarshal(data, &res); err != nil { return err } var m map[string]interface{} json.Unmarshal(data, &m) if len(m) == 0 { *r = *EmptyConfigNotificationSendSms } else { r.UseDeviceLocale = res.UseDeviceLocale r.SmsTemplate = res.SmsTemplate } return nil } // This object is used to assert a desired state where this ConfigNotificationSendSms is // empty. Go lacks global const objects, but this object should be treated // as one. Modifying this object will have undesirable results. var EmptyConfigNotificationSendSms *ConfigNotificationSendSms = &ConfigNotificationSendSms{empty: true} func (r *ConfigNotificationSendSms) Empty() bool { return r.empty } func (r *ConfigNotificationSendSms) String() string { return dcl.SprintResource(r) } func (r *ConfigNotificationSendSms) HashCode() string { // Placeholder for a more complex hash method that handles ordering, etc // Hash resource body for easy comparison later hash := sha256.New().Sum([]byte(r.String())) return fmt.Sprintf("%x", hash) } type ConfigNotificationSendSmsSmsTemplate struct { empty bool `json:"-"` Content *string `json:"content"` } type jsonConfigNotificationSendSmsSmsTemplate ConfigNotificationSendSmsSmsTemplate func (r *ConfigNotificationSendSmsSmsTemplate) UnmarshalJSON(data []byte) error { var res jsonConfigNotificationSendSmsSmsTemplate if err := json.Unmarshal(data, &res); err != nil { return err } var m map[string]interface{} json.Unmarshal(data, &m) if len(m) == 0 { *r = *EmptyConfigNotificationSendSmsSmsTemplate } else { r.Content = res.Content } return nil } // This object is used to assert a desired state where this ConfigNotificationSendSmsSmsTemplate is // empty. Go lacks global const objects, but this object should be treated // as one. Modifying this object will have undesirable results. var EmptyConfigNotificationSendSmsSmsTemplate *ConfigNotificationSendSmsSmsTemplate = &ConfigNotificationSendSmsSmsTemplate{empty: true} func (r *ConfigNotificationSendSmsSmsTemplate) Empty() bool { return r.empty } func (r *ConfigNotificationSendSmsSmsTemplate) String() string { return dcl.SprintResource(r) } func (r *ConfigNotificationSendSmsSmsTemplate) HashCode() string { // Placeholder for a more complex hash method that handles ordering, etc // Hash resource body for easy comparison later hash := sha256.New().Sum([]byte(r.String())) return fmt.Sprintf("%x", hash) } type ConfigQuota struct { empty bool `json:"-"` SignUpQuotaConfig *ConfigQuotaSignUpQuotaConfig `json:"signUpQuotaConfig"` } type jsonConfigQuota ConfigQuota func (r *ConfigQuota) UnmarshalJSON(data []byte) error { var res jsonConfigQuota if err := json.Unmarshal(data, &res); err != nil { return err } var m map[string]interface{} json.Unmarshal(data, &m) if len(m) == 0 { *r = *EmptyConfigQuota } else { r.SignUpQuotaConfig = res.SignUpQuotaConfig } return nil } // This object is used to assert a desired state where this ConfigQuota is // empty. Go lacks global const objects, but this object should be treated // as one. Modifying this object will have undesirable results. var EmptyConfigQuota *ConfigQuota = &ConfigQuota{empty: true} func (r *ConfigQuota) Empty() bool { return r.empty } func (r *ConfigQuota) String() string { return dcl.SprintResource(r) } func (r *ConfigQuota) HashCode() string { // Placeholder for a more complex hash method that handles ordering, etc // Hash resource body for easy comparison later hash := sha256.New().Sum([]byte(r.String())) return fmt.Sprintf("%x", hash) } type ConfigQuotaSignUpQuotaConfig struct { empty bool `json:"-"` Quota *int64 `json:"quota"` StartTime *string `json:"startTime"` QuotaDuration *string `json:"quotaDuration"` } type jsonConfigQuotaSignUpQuotaConfig ConfigQuotaSignUpQuotaConfig func (r *ConfigQuotaSignUpQuotaConfig) UnmarshalJSON(data []byte) error { var res jsonConfigQuotaSignUpQuotaConfig if err := json.Unmarshal(data, &res); err != nil { return err } var m map[string]interface{} json.Unmarshal(data, &m) if len(m) == 0 { *r = *EmptyConfigQuotaSignUpQuotaConfig } else { r.Quota = res.Quota r.StartTime = res.StartTime r.QuotaDuration = res.QuotaDuration } return nil } // This object is used to assert a desired state where this ConfigQuotaSignUpQuotaConfig is // empty. Go lacks global const objects, but this object should be treated // as one. Modifying this object will have undesirable results. var EmptyConfigQuotaSignUpQuotaConfig *ConfigQuotaSignUpQuotaConfig = &ConfigQuotaSignUpQuotaConfig{empty: true} func (r *ConfigQuotaSignUpQuotaConfig) Empty() bool { return r.empty } func (r *ConfigQuotaSignUpQuotaConfig) String() string { return dcl.SprintResource(r) } func (r *ConfigQuotaSignUpQuotaConfig) HashCode() string { // Placeholder for a more complex hash method that handles ordering, etc // Hash resource body for easy comparison later hash := sha256.New().Sum([]byte(r.String())) return fmt.Sprintf("%x", hash) } type ConfigMonitoring struct { empty bool `json:"-"` RequestLogging *ConfigMonitoringRequestLogging `json:"requestLogging"` } type jsonConfigMonitoring ConfigMonitoring func (r *ConfigMonitoring) UnmarshalJSON(data []byte) error { var res jsonConfigMonitoring if err := json.Unmarshal(data, &res); err != nil { return err } var m map[string]interface{} json.Unmarshal(data, &m) if len(m) == 0 { *r = *EmptyConfigMonitoring } else { r.RequestLogging = res.RequestLogging } return nil } // This object is used to assert a desired state where this ConfigMonitoring is // empty. Go lacks global const objects, but this object should be treated // as one. Modifying this object will have undesirable results. var EmptyConfigMonitoring *ConfigMonitoring = &ConfigMonitoring{empty: true} func (r *ConfigMonitoring) Empty() bool { return r.empty } func (r *ConfigMonitoring) String() string { return dcl.SprintResource(r) } func (r *ConfigMonitoring) HashCode() string { // Placeholder for a more complex hash method that handles ordering, etc // Hash resource body for easy comparison later hash := sha256.New().Sum([]byte(r.String())) return fmt.Sprintf("%x", hash) } type ConfigMonitoringRequestLogging struct { empty bool `json:"-"` Enabled *bool `json:"enabled"` } type jsonConfigMonitoringRequestLogging ConfigMonitoringRequestLogging func (r *ConfigMonitoringRequestLogging) UnmarshalJSON(data []byte) error { var res jsonConfigMonitoringRequestLogging if err := json.Unmarshal(data, &res); err != nil { return err } var m map[string]interface{} json.Unmarshal(data, &m) if len(m) == 0 { *r = *EmptyConfigMonitoringRequestLogging } else { r.Enabled = res.Enabled } return nil } // This object is used to assert a desired state where this ConfigMonitoringRequestLogging is // empty. Go lacks global const objects, but this object should be treated // as one. Modifying this object will have undesirable results. var EmptyConfigMonitoringRequestLogging *ConfigMonitoringRequestLogging = &ConfigMonitoringRequestLogging{empty: true} func (r *ConfigMonitoringRequestLogging) Empty() bool { return r.empty } func (r *ConfigMonitoringRequestLogging) String() string { return dcl.SprintResource(r) } func (r *ConfigMonitoringRequestLogging) HashCode() string { // Placeholder for a more complex hash method that handles ordering, etc // Hash resource body for easy comparison later hash := sha256.New().Sum([]byte(r.String())) return fmt.Sprintf("%x", hash) } type ConfigMultiTenant struct { empty bool `json:"-"` AllowTenants *bool `json:"allowTenants"` DefaultTenantLocation *string `json:"defaultTenantLocation"` } type jsonConfigMultiTenant ConfigMultiTenant func (r *ConfigMultiTenant) UnmarshalJSON(data []byte) error { var res jsonConfigMultiTenant if err := json.Unmarshal(data, &res); err != nil { return err } var m map[string]interface{} json.Unmarshal(data, &m) if len(m) == 0 { *r = *EmptyConfigMultiTenant } else { r.AllowTenants = res.AllowTenants r.DefaultTenantLocation = res.DefaultTenantLocation } return nil } // This object is used to assert a desired state where this ConfigMultiTenant is // empty. Go lacks global const objects, but this object should be treated // as one. Modifying this object will have undesirable results. var EmptyConfigMultiTenant *ConfigMultiTenant = &ConfigMultiTenant{empty: true} func (r *ConfigMultiTenant) Empty() bool { return r.empty } func (r *ConfigMultiTenant) String() string { return dcl.SprintResource(r) } func (r *ConfigMultiTenant) HashCode() string { // Placeholder for a more complex hash method that handles ordering, etc // Hash resource body for easy comparison later hash := sha256.New().Sum([]byte(r.String())) return fmt.Sprintf("%x", hash) } type ConfigClient struct { empty bool `json:"-"` ApiKey *string `json:"apiKey"` Permissions *ConfigClientPermissions `json:"permissions"` FirebaseSubdomain *string `json:"firebaseSubdomain"` } type jsonConfigClient ConfigClient func (r *ConfigClient) UnmarshalJSON(data []byte) error { var res jsonConfigClient if err := json.Unmarshal(data, &res); err != nil { return err } var m map[string]interface{} json.Unmarshal(data, &m) if len(m) == 0 { *r = *EmptyConfigClient } else { r.ApiKey = res.ApiKey r.Permissions = res.Permissions r.FirebaseSubdomain = res.FirebaseSubdomain } return nil } // This object is used to assert a desired state where this ConfigClient is // empty. Go lacks global const objects, but this object should be treated // as one. Modifying this object will have undesirable results. var EmptyConfigClient *ConfigClient = &ConfigClient{empty: true} func (r *ConfigClient) Empty() bool { return r.empty } func (r *ConfigClient) String() string { return dcl.SprintResource(r) } func (r *ConfigClient) HashCode() string { // Placeholder for a more complex hash method that handles ordering, etc // Hash resource body for easy comparison later hash := sha256.New().Sum([]byte(r.String())) return fmt.Sprintf("%x", hash) } type ConfigClientPermissions struct { empty bool `json:"-"` DisabledUserSignup *bool `json:"disabledUserSignup"` DisabledUserDeletion *bool `json:"disabledUserDeletion"` } type jsonConfigClientPermissions ConfigClientPermissions func (r *ConfigClientPermissions) UnmarshalJSON(data []byte) error { var res jsonConfigClientPermissions if err := json.Unmarshal(data, &res); err != nil { return err } var m map[string]interface{} json.Unmarshal(data, &m) if len(m) == 0 { *r = *EmptyConfigClientPermissions } else { r.DisabledUserSignup = res.DisabledUserSignup r.DisabledUserDeletion = res.DisabledUserDeletion } return nil } // This object is used to assert a desired state where this ConfigClientPermissions is // empty. Go lacks global const objects, but this object should be treated // as one. Modifying this object will have undesirable results. var EmptyConfigClientPermissions *ConfigClientPermissions = &ConfigClientPermissions{empty: true} func (r *ConfigClientPermissions) Empty() bool { return r.empty } func (r *ConfigClientPermissions) String() string { return dcl.SprintResource(r) } func (r *ConfigClientPermissions) HashCode() string { // Placeholder for a more complex hash method that handles ordering, etc // Hash resource body for easy comparison later hash := sha256.New().Sum([]byte(r.String())) return fmt.Sprintf("%x", hash) } type ConfigMfa struct { empty bool `json:"-"` State *ConfigMfaStateEnum `json:"state"` } type jsonConfigMfa ConfigMfa func (r *ConfigMfa) UnmarshalJSON(data []byte) error { var res jsonConfigMfa if err := json.Unmarshal(data, &res); err != nil { return err } var m map[string]interface{} json.Unmarshal(data, &m) if len(m) == 0 { *r = *EmptyConfigMfa } else { r.State = res.State } return nil } // This object is used to assert a desired state where this ConfigMfa is // empty. Go lacks global const objects, but this object should be treated // as one. Modifying this object will have undesirable results. var EmptyConfigMfa *ConfigMfa = &ConfigMfa{empty: true} func (r *ConfigMfa) Empty() bool { return r.empty } func (r *ConfigMfa) String() string { return dcl.SprintResource(r) } func (r *ConfigMfa) HashCode() string { // Placeholder for a more complex hash method that handles ordering, etc // Hash resource body for easy comparison later hash := sha256.New().Sum([]byte(r.String())) return fmt.Sprintf("%x", hash) } type ConfigBlockingFunctions struct { empty bool `json:"-"` Triggers map[string]ConfigBlockingFunctionsTriggers `json:"triggers"` } type jsonConfigBlockingFunctions ConfigBlockingFunctions func (r *ConfigBlockingFunctions) UnmarshalJSON(data []byte) error { var res jsonConfigBlockingFunctions if err := json.Unmarshal(data, &res); err != nil { return err } var m map[string]interface{} json.Unmarshal(data, &m) if len(m) == 0 { *r = *EmptyConfigBlockingFunctions } else { r.Triggers = res.Triggers } return nil } // This object is used to assert a desired state where this ConfigBlockingFunctions is // empty. Go lacks global const objects, but this object should be treated // as one. Modifying this object will have undesirable results. var EmptyConfigBlockingFunctions *ConfigBlockingFunctions = &ConfigBlockingFunctions{empty: true} func (r *ConfigBlockingFunctions) Empty() bool { return r.empty } func (r *ConfigBlockingFunctions) String() string { return dcl.SprintResource(r) } func (r *ConfigBlockingFunctions) HashCode() string { // Placeholder for a more complex hash method that handles ordering, etc // Hash resource body for easy comparison later hash := sha256.New().Sum([]byte(r.String())) return fmt.Sprintf("%x", hash) } type ConfigBlockingFunctionsTriggers struct { empty bool `json:"-"` FunctionUri *string `json:"functionUri"` UpdateTime *string `json:"updateTime"` } type jsonConfigBlockingFunctionsTriggers ConfigBlockingFunctionsTriggers func (r *ConfigBlockingFunctionsTriggers) UnmarshalJSON(data []byte) error { var res jsonConfigBlockingFunctionsTriggers if err := json.Unmarshal(data, &res); err != nil { return err } var m map[string]interface{} json.Unmarshal(data, &m) if len(m) == 0 { *r = *EmptyConfigBlockingFunctionsTriggers } else { r.FunctionUri = res.FunctionUri r.UpdateTime = res.UpdateTime } return nil } // This object is used to assert a desired state where this ConfigBlockingFunctionsTriggers is // empty. Go lacks global const objects, but this object should be treated // as one. Modifying this object will have undesirable results. var EmptyConfigBlockingFunctionsTriggers *ConfigBlockingFunctionsTriggers = &ConfigBlockingFunctionsTriggers{empty: true} func (r *ConfigBlockingFunctionsTriggers) Empty() bool { return r.empty } func (r *ConfigBlockingFunctionsTriggers) String() string { return dcl.SprintResource(r) } func (r *ConfigBlockingFunctionsTriggers) HashCode() string { // Placeholder for a more complex hash method that handles ordering, etc // Hash resource body for easy comparison later hash := sha256.New().Sum([]byte(r.String())) return fmt.Sprintf("%x", hash) } // Describe returns a simple description of this resource to ensure that automated tools // can identify it. func (r *Config) Describe() dcl.ServiceTypeVersion { return dcl.ServiceTypeVersion{ Service: "identity_toolkit", Type: "Config", Version: "alpha", } } func (r *Config) ID() (string, error) { if err := extractConfigFields(r); err != nil { return "", err } nr := r.urlNormalized() params := map[string]interface{}{ "signIn": dcl.ValueOrEmptyString(nr.SignIn), "notification": dcl.ValueOrEmptyString(nr.Notification), "quota": dcl.ValueOrEmptyString(nr.Quota), "monitoring": dcl.ValueOrEmptyString(nr.Monitoring), "multiTenant": dcl.ValueOrEmptyString(nr.MultiTenant), "authorizedDomains": dcl.ValueOrEmptyString(nr.AuthorizedDomains), "subtype": dcl.ValueOrEmptyString(nr.Subtype), "client": dcl.ValueOrEmptyString(nr.Client), "mfa": dcl.ValueOrEmptyString(nr.Mfa), "blockingFunctions": dcl.ValueOrEmptyString(nr.BlockingFunctions), "project": dcl.ValueOrEmptyString(nr.Project), } return dcl.Nprintf("projects/{{project}}/config", params), nil } const ConfigMaxPage = -1 type ConfigList struct { Items []*Config nextToken string resource *Config } func (c *Client) GetConfig(ctx context.Context, r *Config) (*Config, error) { ctx = dcl.ContextWithRequestID(ctx) ctx, cancel := context.WithTimeout(ctx, c.Config.TimeoutOr(0*time.Second)) defer cancel() // This is *purposefully* supressing errors. // This function is used with url-normalized values + not URL normalized values. // URL Normalized values will throw unintentional errors, since those values are not of the proper parent form. extractConfigFields(r) b, err := c.getConfigRaw(ctx, r) if err != nil { if dcl.IsNotFound(err) { return nil, &googleapi.Error{ Code: 404, Message: err.Error(), } } return nil, err } result, err := unmarshalConfig(b, c, r) if err != nil { return nil, err } result.Project = r.Project c.Config.Logger.InfoWithContextf(ctx, "Retrieved raw result state: %v", result) c.Config.Logger.InfoWithContextf(ctx, "Canonicalizing with specified state: %v", r) result, err = canonicalizeConfigNewState(c, result, r) if err != nil { return nil, err } if err := postReadExtractConfigFields(result); err != nil { return result, err } c.Config.Logger.InfoWithContextf(ctx, "Created result state: %v", result) return result, nil } func (c *Client) ApplyConfig(ctx context.Context, rawDesired *Config, opts ...dcl.ApplyOption) (*Config, error) { ctx, cancel := context.WithTimeout(ctx, c.Config.TimeoutOr(0*time.Second)) defer cancel() ctx = dcl.ContextWithRequestID(ctx) var resultNewState *Config err := dcl.Do(ctx, func(ctx context.Context) (*dcl.RetryDetails, error) { newState, err := applyConfigHelper(c, ctx, rawDesired, opts...) resultNewState = newState if err != nil { // If the error is 409, there is conflict in resource update. // Here we want to apply changes based on latest state. if dcl.IsConflictError(err) { return &dcl.RetryDetails{}, dcl.OperationNotDone{Err: err} } return nil, err } return nil, nil }, c.Config.RetryProvider) return resultNewState, err } func applyConfigHelper(c *Client, ctx context.Context, rawDesired *Config, opts ...dcl.ApplyOption) (*Config, error) { c.Config.Logger.InfoWithContext(ctx, "Beginning ApplyConfig...") c.Config.Logger.InfoWithContextf(ctx, "User specified desired state: %v", rawDesired) // 1.1: Validation of user-specified fields in desired state. if err := rawDesired.validate(); err != nil { return nil, err } if err := extractConfigFields(rawDesired); err != nil { return nil, err } initial, desired, fieldDiffs, err := c.configDiffsForRawDesired(ctx, rawDesired, opts...) if err != nil { return nil, fmt.Errorf("failed to create a diff: %w", err) } diffs, err := convertFieldDiffsToConfigDiffs(c.Config, fieldDiffs, opts) if err != nil { return nil, err } // TODO(magic-modules-eng): 2.2 Feasibility check (all updates are feasible so far). // 2.3: Lifecycle Directive Check lp := dcl.FetchLifecycleParams(opts) if initial == nil { return nil, dcl.ApplyInfeasibleError{Message: "No initial state found for singleton resource."} } else { for _, d := range diffs { if d.UpdateOp == nil { return nil, dcl.ApplyInfeasibleError{ Message: fmt.Sprintf("infeasible update: (%v) no update method found for field", d), } } if dcl.HasLifecycleParam(lp, dcl.BlockModification) { return nil, dcl.ApplyInfeasibleError{Message: fmt.Sprintf("Modification blocked, diff (%v) unresolvable.", d)} } } } var ops []configApiOperation for _, d := range diffs { ops = append(ops, d.UpdateOp) } c.Config.Logger.InfoWithContextf(ctx, "Created plan: %#v", ops) // 2.5 Request Actuation for _, op := range ops { c.Config.Logger.InfoWithContextf(ctx, "Performing operation %T %+v", op, op) if err := op.do(ctx, desired, c); err != nil { c.Config.Logger.InfoWithContextf(ctx, "Failed operation %T %+v: %v", op, op, err) return nil, err } c.Config.Logger.InfoWithContextf(ctx, "Finished operation %T %+v", op, op) } return applyConfigDiff(c, ctx, desired, rawDesired, ops, opts...) } func applyConfigDiff(c *Client, ctx context.Context, desired *Config, rawDesired *Config, ops []configApiOperation, opts ...dcl.ApplyOption) (*Config, error) { // 3.1, 3.2a Retrieval of raw new state & canonicalization with desired state c.Config.Logger.InfoWithContext(ctx, "Retrieving raw new state...") rawNew, err := c.GetConfig(ctx, desired.urlNormalized()) if err != nil { return nil, err } c.Config.Logger.InfoWithContextf(ctx, "Canonicalizing with raw desired state: %v", rawDesired) // 3.2b Canonicalization of raw new state using raw desired state newState, err := canonicalizeConfigNewState(c, rawNew, rawDesired) if err != nil { return rawNew, err } c.Config.Logger.InfoWithContextf(ctx, "Created canonical new state: %v", newState) // 3.3 Comparison of the new state and raw desired state. // TODO(magic-modules-eng): EVENTUALLY_CONSISTENT_UPDATE newDesired, err := canonicalizeConfigDesiredState(rawDesired, newState) if err != nil { return newState, err } if err := postReadExtractConfigFields(newState); err != nil { return newState, err } // Need to ensure any transformations made here match acceptably in differ. if err := postReadExtractConfigFields(newDesired); err != nil { return newState, err } c.Config.Logger.InfoWithContextf(ctx, "Diffing using canonicalized desired state: %v", newDesired) newDiffs, err := diffConfig(c, newDesired, newState) if err != nil { return newState, err } if len(newDiffs) == 0 { c.Config.Logger.InfoWithContext(ctx, "No diffs found. Apply was successful.") } else { c.Config.Logger.InfoWithContextf(ctx, "Found diffs: %v", newDiffs)<|fim▁hole|> diffMessages := make([]string, len(newDiffs)) for i, d := range newDiffs { diffMessages[i] = fmt.Sprintf("%v", d) } return newState, dcl.DiffAfterApplyError{Diffs: diffMessages} } c.Config.Logger.InfoWithContext(ctx, "Done Apply.") return newState, nil }<|fim▁end|>
<|file_name|>metrics.rs<|end_file_name|><|fim▁begin|>// Copyright 2021 TiKV Project Authors. Licensed under Apache-2.0. use prometheus_static_metric::*; make_auto_flush_static_metric! { pub label_enum GcKeysCF { default, lock,<|fim▁hole|> } pub label_enum GcKeysDetail { processed_keys, get, next, prev, seek, seek_for_prev, over_seek_bound, next_tombstone, prev_tombstone, seek_tombstone, seek_for_prev_tombstone, ttl_tombstone, } pub struct GcKeysCounterVec: LocalIntCounter { "cf" => GcKeysCF, "tag" => GcKeysDetail, } }<|fim▁end|>
write,
<|file_name|>memory.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use rustc_serialize::json; use std::net::TcpStream; use actor::{Actor, ActorRegistry, ActorMessageStatus};<|fim▁hole|> jsStringSize: u64, jsOtherSize: u64, domSize: u64, styleSize: u64, otherSize: u64, totalSize: u64, jsMilliseconds: f64, nonJSMilliseconds: f64, } pub struct MemoryActor { pub name: String, } impl Actor for MemoryActor { fn name(&self) -> String { self.name.clone() } fn handle_message(&self, _registry: &ActorRegistry, _msg_type: &str, _msg: &json::Object, _stream: &mut TcpStream) -> Result<ActorMessageStatus, ()> { Ok(ActorMessageStatus::Ignored) } } impl MemoryActor { /// return name of actor pub fn create(registry: &ActorRegistry) -> String { let actor_name = registry.new_name("memory"); let actor = MemoryActor { name: actor_name.clone() }; registry.register_later(box actor); actor_name } pub fn measure(&self) -> TimelineMemoryReply { //TODO: TimelineMemoryReply { jsObjectSize: 1, jsStringSize: 1, jsOtherSize: 1, domSize: 1, styleSize: 1, otherSize: 1, totalSize: 1, jsMilliseconds: 1.1, nonJSMilliseconds: 1.1, } } }<|fim▁end|>
#[derive(RustcEncodable)] pub struct TimelineMemoryReply { jsObjectSize: u64,
<|file_name|>texgen_emboss.py<|end_file_name|><|fim▁begin|>'''OpenGL extension NV.texgen_emboss This module customises the behaviour of the OpenGL.raw.GL.NV.texgen_emboss to provide a more Python-friendly API Overview (from the spec) This extension provides a new texture coordinate generation mode suitable for multitexture-based embossing (or bump mapping) effects. Given two texture units, this extension generates the texture coordinates of a second texture unit (an odd-numbered texture unit) as a perturbation of a first texture unit (an even-numbered texture unit one less than the second texture unit). The perturbation is based on the normal, tangent, and light vectors. The normal vector is supplied by glNormal; the light vector is supplied as a direction vector to a specified OpenGL light's position; and the tanget vector is supplied by the second texture unit's current texture coordinate. The perturbation is also scaled by program-supplied scaling constants. If both texture units are bound to the same texture representing a height field, by subtracting the difference between the resulting two filtered texels, programs can achieve a per-pixel embossing effect. The official definition of this extension is available here: http://www.opengl.org/registry/specs/NV/texgen_emboss.txt ''' from OpenGL import platform, constant, arrays from OpenGL import extensions, wrapper import ctypes from OpenGL.raw.GL import _types, _glgets from OpenGL.raw.GL.NV.texgen_emboss import * from OpenGL.raw.GL.NV.texgen_emboss import _EXTENSION_NAME def glInitTexgenEmbossNV(): '''Return boolean indicating whether this extension is available''' from OpenGL import extensions return extensions.hasGLExtension( _EXTENSION_NAME ) <|fim▁hole|> ### END AUTOGENERATED SECTION<|fim▁end|>
<|file_name|>htmliframeelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ use crate::document_loader::{LoadBlocker, LoadType}; use crate::dom::attr::Attr; use crate::dom::bindings::cell::DomRefCell; use crate::dom::bindings::codegen::Bindings::HTMLIFrameElementBinding::HTMLIFrameElementMethods; use crate::dom::bindings::codegen::Bindings::WindowBinding::WindowBinding::WindowMethods; use crate::dom::bindings::inheritance::Castable; use crate::dom::bindings::refcounted::Trusted; use crate::dom::bindings::reflector::DomObject; use crate::dom::bindings::root::{DomRoot, LayoutDom, MutNullableDom}; use crate::dom::bindings::str::{DOMString, USVString}; use crate::dom::document::Document; use crate::dom::domtokenlist::DOMTokenList; use crate::dom::element::{AttributeMutation, Element, LayoutElementHelpers}; use crate::dom::eventtarget::EventTarget; use crate::dom::globalscope::GlobalScope; use crate::dom::htmlelement::HTMLElement; use crate::dom::node::{ document_from_node, window_from_node, BindContext, Node, NodeDamage, UnbindContext, }; use crate::dom::virtualmethods::VirtualMethods; use crate::dom::window::ReflowReason; use crate::dom::windowproxy::WindowProxy; use crate::script_thread::ScriptThread; use crate::task_source::TaskSource; use dom_struct::dom_struct; use html5ever::{LocalName, Prefix}; use ipc_channel::ipc; use msg::constellation_msg::{BrowsingContextId, PipelineId, TopLevelBrowsingContextId}; use profile_traits::ipc as ProfiledIpc; use script_layout_interface::message::ReflowGoal; use script_traits::IFrameSandboxState::{IFrameSandboxed, IFrameUnsandboxed}; use script_traits::{ HistoryEntryReplacement, IFrameLoadInfo, IFrameLoadInfoWithData, JsEvalResult, LoadData, LoadOrigin, UpdatePipelineIdReason, WindowSizeData, }; use script_traits::{NewLayoutInfo, ScriptMsg}; use servo_atoms::Atom; use servo_url::ServoUrl; use std::cell::Cell; use style::attr::{AttrValue, LengthOrPercentageOrAuto}; bitflags! { #[derive(JSTraceable, MallocSizeOf)] struct SandboxAllowance: u8 { const ALLOW_NOTHING = 0x00; const ALLOW_SAME_ORIGIN = 0x01; const ALLOW_TOP_NAVIGATION = 0x02; const ALLOW_FORMS = 0x04; const ALLOW_SCRIPTS = 0x08; const ALLOW_POINTER_LOCK = 0x10; const ALLOW_POPUPS = 0x20; } } #[derive(PartialEq)] pub enum NavigationType { InitialAboutBlank, Regular,<|fim▁hole|>} #[derive(PartialEq)] enum ProcessingMode { FirstTime, NotFirstTime, } #[dom_struct] pub struct HTMLIFrameElement { htmlelement: HTMLElement, top_level_browsing_context_id: Cell<Option<TopLevelBrowsingContextId>>, browsing_context_id: Cell<Option<BrowsingContextId>>, pipeline_id: Cell<Option<PipelineId>>, pending_pipeline_id: Cell<Option<PipelineId>>, about_blank_pipeline_id: Cell<Option<PipelineId>>, sandbox: MutNullableDom<DOMTokenList>, sandbox_allowance: Cell<Option<SandboxAllowance>>, load_blocker: DomRefCell<Option<LoadBlocker>>, visibility: Cell<bool>, } impl HTMLIFrameElement { pub fn is_sandboxed(&self) -> bool { self.sandbox_allowance.get().is_some() } /// <https://html.spec.whatwg.org/multipage/#otherwise-steps-for-iframe-or-frame-elements>, /// step 1. fn get_url(&self) -> ServoUrl { let element = self.upcast::<Element>(); element .get_attribute(&ns!(), &local_name!("src")) .and_then(|src| { let url = src.value(); if url.is_empty() { None } else { document_from_node(self).base_url().join(&url).ok() } }) .unwrap_or_else(|| ServoUrl::parse("about:blank").unwrap()) } pub fn navigate_or_reload_child_browsing_context( &self, mut load_data: LoadData, nav_type: NavigationType, replace: HistoryEntryReplacement, ) { let sandboxed = if self.is_sandboxed() { IFrameSandboxed } else { IFrameUnsandboxed }; let browsing_context_id = match self.browsing_context_id() { None => return warn!("Navigating unattached iframe."), Some(id) => id, }; let top_level_browsing_context_id = match self.top_level_browsing_context_id() { None => return warn!("Navigating unattached iframe."), Some(id) => id, }; let document = document_from_node(self); { let mut load_blocker = self.load_blocker.borrow_mut(); // Any oustanding load is finished from the point of view of the blocked // document; the new navigation will continue blocking it. LoadBlocker::terminate(&mut load_blocker); } if load_data.url.scheme() == "javascript" { let window_proxy = self.GetContentWindow(); if let Some(window_proxy) = window_proxy { // Important re security. See https://github.com/servo/servo/issues/23373 // TODO: check according to https://w3c.github.io/webappsec-csp/#should-block-navigation-request if ScriptThread::check_load_origin(&load_data.load_origin, &document.url().origin()) { ScriptThread::eval_js_url(&window_proxy.global(), &mut load_data); } } } match load_data.js_eval_result { Some(JsEvalResult::NoContent) => (), _ => { let mut load_blocker = self.load_blocker.borrow_mut(); *load_blocker = Some(LoadBlocker::new( &*document, LoadType::Subframe(load_data.url.clone()), )); }, }; let window = window_from_node(self); let old_pipeline_id = self.pipeline_id(); let new_pipeline_id = PipelineId::new(); self.pending_pipeline_id.set(Some(new_pipeline_id)); let global_scope = window.upcast::<GlobalScope>(); let load_info = IFrameLoadInfo { parent_pipeline_id: global_scope.pipeline_id(), browsing_context_id: browsing_context_id, top_level_browsing_context_id: top_level_browsing_context_id, new_pipeline_id: new_pipeline_id, is_private: false, // FIXME replace: replace, }; let window_size = WindowSizeData { initial_viewport: window .inner_window_dimensions_query(browsing_context_id) .unwrap_or_default(), device_pixel_ratio: window.device_pixel_ratio(), }; match nav_type { NavigationType::InitialAboutBlank => { let (pipeline_sender, pipeline_receiver) = ipc::channel().unwrap(); self.about_blank_pipeline_id.set(Some(new_pipeline_id)); let load_info = IFrameLoadInfoWithData { info: load_info, load_data: load_data.clone(), old_pipeline_id: old_pipeline_id, sandbox: sandboxed, window_size, }; global_scope .script_to_constellation_chan() .send(ScriptMsg::ScriptNewIFrame(load_info, pipeline_sender)) .unwrap(); let new_layout_info = NewLayoutInfo { parent_info: Some(global_scope.pipeline_id()), new_pipeline_id: new_pipeline_id, browsing_context_id: browsing_context_id, top_level_browsing_context_id: top_level_browsing_context_id, opener: None, load_data: load_data, pipeline_port: pipeline_receiver, window_size, }; self.pipeline_id.set(Some(new_pipeline_id)); ScriptThread::process_attach_layout(new_layout_info, document.origin().clone()); }, NavigationType::Regular => { let load_info = IFrameLoadInfoWithData { info: load_info, load_data: load_data, old_pipeline_id: old_pipeline_id, sandbox: sandboxed, window_size, }; global_scope .script_to_constellation_chan() .send(ScriptMsg::ScriptLoadedURLInIFrame(load_info)) .unwrap(); }, } } /// <https://html.spec.whatwg.org/multipage/#process-the-iframe-attributes> fn process_the_iframe_attributes(&self, mode: ProcessingMode) { if self .upcast::<Element>() .has_attribute(&local_name!("srcdoc")) { let url = ServoUrl::parse("about:srcdoc").unwrap(); let document = document_from_node(self); let window = window_from_node(self); let pipeline_id = Some(window.upcast::<GlobalScope>().pipeline_id()); let mut load_data = LoadData::new( LoadOrigin::Script(document.origin().immutable().clone()), url, pipeline_id, window.upcast::<GlobalScope>().get_referrer(), document.get_referrer_policy(), ); let element = self.upcast::<Element>(); load_data.srcdoc = String::from(element.get_string_attribute(&local_name!("srcdoc"))); self.navigate_or_reload_child_browsing_context( load_data, NavigationType::Regular, HistoryEntryReplacement::Disabled, ); return; } let window = window_from_node(self); // https://html.spec.whatwg.org/multipage/#attr-iframe-name // Note: the spec says to set the name 'when the nested browsing context is created'. // The current implementation sets the name on the window, // when the iframe attributes are first processed. if mode == ProcessingMode::FirstTime { if let Some(window) = self.GetContentWindow() { window.set_name( self.upcast::<Element>() .get_name() .map_or(DOMString::from(""), |n| DOMString::from(&*n)), ); } } // https://github.com/whatwg/html/issues/490 if mode == ProcessingMode::FirstTime && !self.upcast::<Element>().has_attribute(&local_name!("src")) { let this = Trusted::new(self); let pipeline_id = self.pipeline_id().unwrap(); // FIXME(nox): Why are errors silenced here? let _ = window.task_manager().dom_manipulation_task_source().queue( task!(iframe_load_event_steps: move || { this.root().iframe_load_event_steps(pipeline_id); }), window.upcast(), ); return; } let url = self.get_url(); // TODO(#25748): // By spec, we return early if there's an ancestor browsing context // "whose active document's url, ignoring fragments, is equal". // However, asking about ancestor browsing contexts is more nuanced than // it sounds and not implemented here. // Within a single origin, we can do it by walking window proxies, // and this check covers only that single-origin case, protecting // against simple typo self-includes but nothing more elaborate. let mut ancestor = window.GetParent(); while let Some(a) = ancestor { if let Some(ancestor_url) = a.document().map(|d| d.url()) { if ancestor_url.scheme() == url.scheme() && ancestor_url.username() == url.username() && ancestor_url.password() == url.password() && ancestor_url.host() == url.host() && ancestor_url.port() == url.port() && ancestor_url.path() == url.path() && ancestor_url.query() == url.query() { return; } } ancestor = a.parent().map(|p| DomRoot::from_ref(p)); } let creator_pipeline_id = if url.as_str() == "about:blank" { Some(window.upcast::<GlobalScope>().pipeline_id()) } else { None }; let document = document_from_node(self); let load_data = LoadData::new( LoadOrigin::Script(document.origin().immutable().clone()), url, creator_pipeline_id, window.upcast::<GlobalScope>().get_referrer(), document.get_referrer_policy(), ); let pipeline_id = self.pipeline_id(); // If the initial `about:blank` page is the current page, load with replacement enabled, // see https://html.spec.whatwg.org/multipage/#the-iframe-element:about:blank-3 let is_about_blank = pipeline_id.is_some() && pipeline_id == self.about_blank_pipeline_id.get(); let replace = if is_about_blank { HistoryEntryReplacement::Enabled } else { HistoryEntryReplacement::Disabled }; self.navigate_or_reload_child_browsing_context(load_data, NavigationType::Regular, replace); } fn create_nested_browsing_context(&self) { // Synchronously create a new context and navigate it to about:blank. let url = ServoUrl::parse("about:blank").unwrap(); let document = document_from_node(self); let window = window_from_node(self); let pipeline_id = Some(window.upcast::<GlobalScope>().pipeline_id()); let load_data = LoadData::new( LoadOrigin::Script(document.origin().immutable().clone()), url, pipeline_id, window.upcast::<GlobalScope>().get_referrer(), document.get_referrer_policy(), ); let browsing_context_id = BrowsingContextId::new(); let top_level_browsing_context_id = window.window_proxy().top_level_browsing_context_id(); self.pipeline_id.set(None); self.pending_pipeline_id.set(None); self.top_level_browsing_context_id .set(Some(top_level_browsing_context_id)); self.browsing_context_id.set(Some(browsing_context_id)); self.navigate_or_reload_child_browsing_context( load_data, NavigationType::InitialAboutBlank, HistoryEntryReplacement::Disabled, ); } fn destroy_nested_browsing_context(&self) { self.pipeline_id.set(None); self.pending_pipeline_id.set(None); self.about_blank_pipeline_id.set(None); self.top_level_browsing_context_id.set(None); self.browsing_context_id.set(None); } pub fn update_pipeline_id(&self, new_pipeline_id: PipelineId, reason: UpdatePipelineIdReason) { if self.pending_pipeline_id.get() != Some(new_pipeline_id) && reason == UpdatePipelineIdReason::Navigation { return; } self.pipeline_id.set(Some(new_pipeline_id)); // Only terminate the load blocker if the pipeline id was updated due to a traversal. // The load blocker will be terminated for a navigation in iframe_load_event_steps. if reason == UpdatePipelineIdReason::Traversal { let mut blocker = self.load_blocker.borrow_mut(); LoadBlocker::terminate(&mut blocker); } self.upcast::<Node>().dirty(NodeDamage::OtherNodeDamage); let window = window_from_node(self); window.reflow(ReflowGoal::Full, ReflowReason::FramedContentChanged); } fn new_inherited( local_name: LocalName, prefix: Option<Prefix>, document: &Document, ) -> HTMLIFrameElement { HTMLIFrameElement { htmlelement: HTMLElement::new_inherited(local_name, prefix, document), browsing_context_id: Cell::new(None), top_level_browsing_context_id: Cell::new(None), pipeline_id: Cell::new(None), pending_pipeline_id: Cell::new(None), about_blank_pipeline_id: Cell::new(None), sandbox: Default::default(), sandbox_allowance: Cell::new(None), load_blocker: DomRefCell::new(None), visibility: Cell::new(true), } } #[allow(unrooted_must_root)] pub fn new( local_name: LocalName, prefix: Option<Prefix>, document: &Document, ) -> DomRoot<HTMLIFrameElement> { Node::reflect_node( Box::new(HTMLIFrameElement::new_inherited( local_name, prefix, document, )), document, ) } #[inline] pub fn pipeline_id(&self) -> Option<PipelineId> { self.pipeline_id.get() } #[inline] pub fn browsing_context_id(&self) -> Option<BrowsingContextId> { self.browsing_context_id.get() } #[inline] pub fn top_level_browsing_context_id(&self) -> Option<TopLevelBrowsingContextId> { self.top_level_browsing_context_id.get() } pub fn change_visibility_status(&self, visibility: bool) { if self.visibility.get() != visibility { self.visibility.set(visibility); } } /// https://html.spec.whatwg.org/multipage/#iframe-load-event-steps steps 1-4 pub fn iframe_load_event_steps(&self, loaded_pipeline: PipelineId) { // TODO(#9592): assert that the load blocker is present at all times when we // can guarantee that it's created for the case of iframe.reload(). if Some(loaded_pipeline) != self.pending_pipeline_id.get() { return; } // TODO A cross-origin child document would not be easily accessible // from this script thread. It's unclear how to implement // steps 2, 3, and 5 efficiently in this case. // TODO Step 2 - check child document `mute iframe load` flag // TODO Step 3 - set child document `mut iframe load` flag // Step 4 self.upcast::<EventTarget>().fire_event(atom!("load")); let mut blocker = self.load_blocker.borrow_mut(); LoadBlocker::terminate(&mut blocker); // TODO Step 5 - unset child document `mut iframe load` flag let window = window_from_node(self); window.reflow(ReflowGoal::Full, ReflowReason::IFrameLoadEvent); } } pub trait HTMLIFrameElementLayoutMethods { fn pipeline_id(self) -> Option<PipelineId>; fn browsing_context_id(self) -> Option<BrowsingContextId>; fn get_width(self) -> LengthOrPercentageOrAuto; fn get_height(self) -> LengthOrPercentageOrAuto; } impl HTMLIFrameElementLayoutMethods for LayoutDom<'_, HTMLIFrameElement> { #[inline] #[allow(unsafe_code)] fn pipeline_id(self) -> Option<PipelineId> { unsafe { (*self.unsafe_get()).pipeline_id.get() } } #[inline] #[allow(unsafe_code)] fn browsing_context_id(self) -> Option<BrowsingContextId> { unsafe { (*self.unsafe_get()).browsing_context_id.get() } } fn get_width(self) -> LengthOrPercentageOrAuto { self.upcast::<Element>() .get_attr_for_layout(&ns!(), &local_name!("width")) .map(AttrValue::as_dimension) .cloned() .unwrap_or(LengthOrPercentageOrAuto::Auto) } fn get_height(self) -> LengthOrPercentageOrAuto { self.upcast::<Element>() .get_attr_for_layout(&ns!(), &local_name!("height")) .map(AttrValue::as_dimension) .cloned() .unwrap_or(LengthOrPercentageOrAuto::Auto) } } impl HTMLIFrameElementMethods for HTMLIFrameElement { // https://html.spec.whatwg.org/multipage/#dom-iframe-src make_url_getter!(Src, "src"); // https://html.spec.whatwg.org/multipage/#dom-iframe-src make_url_setter!(SetSrc, "src"); // https://html.spec.whatwg.org/multipage/#dom-iframe-srcdoc make_getter!(Srcdoc, "srcdoc"); // https://html.spec.whatwg.org/multipage/#dom-iframe-srcdoc make_setter!(SetSrcdoc, "srcdoc"); // https://html.spec.whatwg.org/multipage/#dom-iframe-sandbox fn Sandbox(&self) -> DomRoot<DOMTokenList> { self.sandbox.or_init(|| { DOMTokenList::new( self.upcast::<Element>(), &local_name!("sandbox"), Some(vec![ Atom::from("allow-same-origin"), Atom::from("allow-forms"), Atom::from("allow-pointer-lock"), Atom::from("allow-popups"), Atom::from("allow-scripts"), Atom::from("allow-top-navigation"), ]), ) }) } // https://html.spec.whatwg.org/multipage/#dom-iframe-contentwindow fn GetContentWindow(&self) -> Option<DomRoot<WindowProxy>> { self.browsing_context_id .get() .and_then(|browsing_context_id| ScriptThread::find_window_proxy(browsing_context_id)) } // https://html.spec.whatwg.org/multipage/#dom-iframe-contentdocument // https://html.spec.whatwg.org/multipage/#concept-bcc-content-document fn GetContentDocument(&self) -> Option<DomRoot<Document>> { // Step 1. let pipeline_id = self.pipeline_id.get()?; // Step 2-3. // Note that this lookup will fail if the document is dissimilar-origin, // so we should return None in that case. let document = ScriptThread::find_document(pipeline_id)?; // Step 4. let current = GlobalScope::current() .expect("No current global object") .as_window() .Document(); if !current.origin().same_origin_domain(document.origin()) { return None; } // Step 5. Some(document) } // https://html.spec.whatwg.org/multipage/#attr-iframe-allowfullscreen make_bool_getter!(AllowFullscreen, "allowfullscreen"); // https://html.spec.whatwg.org/multipage/#attr-iframe-allowfullscreen make_bool_setter!(SetAllowFullscreen, "allowfullscreen"); // https://html.spec.whatwg.org/multipage/#dom-dim-width make_getter!(Width, "width"); // https://html.spec.whatwg.org/multipage/#dom-dim-width make_dimension_setter!(SetWidth, "width"); // https://html.spec.whatwg.org/multipage/#dom-dim-height make_getter!(Height, "height"); // https://html.spec.whatwg.org/multipage/#dom-dim-height make_dimension_setter!(SetHeight, "height"); // https://html.spec.whatwg.org/multipage/#other-elements,-attributes-and-apis:attr-iframe-frameborder make_getter!(FrameBorder, "frameborder"); // https://html.spec.whatwg.org/multipage/#other-elements,-attributes-and-apis:attr-iframe-frameborder make_setter!(SetFrameBorder, "frameborder"); // https://html.spec.whatwg.org/multipage/#dom-iframe-name // A child browsing context checks the name of its iframe only at the time // it is created; subsequent name sets have no special effect. make_atomic_setter!(SetName, "name"); // https://html.spec.whatwg.org/multipage/#dom-iframe-name // This is specified as reflecting the name content attribute of the // element, not the name of the child browsing context. make_getter!(Name, "name"); } impl VirtualMethods for HTMLIFrameElement { fn super_type(&self) -> Option<&dyn VirtualMethods> { Some(self.upcast::<HTMLElement>() as &dyn VirtualMethods) } fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) { self.super_type().unwrap().attribute_mutated(attr, mutation); match attr.local_name() { &local_name!("sandbox") => { self.sandbox_allowance .set(mutation.new_value(attr).map(|value| { let mut modes = SandboxAllowance::ALLOW_NOTHING; for token in value.as_tokens() { modes |= match &*token.to_ascii_lowercase() { "allow-same-origin" => SandboxAllowance::ALLOW_SAME_ORIGIN, "allow-forms" => SandboxAllowance::ALLOW_FORMS, "allow-pointer-lock" => SandboxAllowance::ALLOW_POINTER_LOCK, "allow-popups" => SandboxAllowance::ALLOW_POPUPS, "allow-scripts" => SandboxAllowance::ALLOW_SCRIPTS, "allow-top-navigation" => SandboxAllowance::ALLOW_TOP_NAVIGATION, _ => SandboxAllowance::ALLOW_NOTHING, }; } modes })); }, &local_name!("srcdoc") => { // https://html.spec.whatwg.org/multipage/#the-iframe-element:the-iframe-element-9 // "Whenever an iframe element with a non-null nested browsing context has its // srcdoc attribute set, changed, or removed, the user agent must process the // iframe attributes." // but we can't check that directly, since the child browsing context // may be in a different script thread. Instead, we check to see if the parent // is in a document tree and has a browsing context, which is what causes // the child browsing context to be created. // trigger the processing of iframe attributes whenever "srcdoc" attribute is set, changed or removed if self.upcast::<Node>().is_connected_with_browsing_context() { debug!("iframe srcdoc modified while in browsing context."); self.process_the_iframe_attributes(ProcessingMode::NotFirstTime); } }, &local_name!("src") => { // https://html.spec.whatwg.org/multipage/#the-iframe-element // "Similarly, whenever an iframe element with a non-null nested browsing context // but with no srcdoc attribute specified has its src attribute set, changed, or removed, // the user agent must process the iframe attributes," // but we can't check that directly, since the child browsing context // may be in a different script thread. Instead, we check to see if the parent // is in a document tree and has a browsing context, which is what causes // the child browsing context to be created. if self.upcast::<Node>().is_connected_with_browsing_context() { debug!("iframe src set while in browsing context."); self.process_the_iframe_attributes(ProcessingMode::NotFirstTime); } }, _ => {}, } } fn parse_plain_attribute(&self, name: &LocalName, value: DOMString) -> AttrValue { match name { &local_name!("sandbox") => AttrValue::from_serialized_tokenlist(value.into()), &local_name!("width") => AttrValue::from_dimension(value.into()), &local_name!("height") => AttrValue::from_dimension(value.into()), _ => self .super_type() .unwrap() .parse_plain_attribute(name, value), } } fn bind_to_tree(&self, context: &BindContext) { if let Some(ref s) = self.super_type() { s.bind_to_tree(context); } let tree_connected = context.tree_connected; let iframe = Trusted::new(self); document_from_node(self).add_delayed_task(task!(IFrameDelayedInitialize: move || { let this = iframe.root(); // https://html.spec.whatwg.org/multipage/#the-iframe-element // "When an iframe element is inserted into a document that has // a browsing context, the user agent must create a new // browsing context, set the element's nested browsing context // to the newly-created browsing context, and then process the // iframe attributes for the "first time"." if this.upcast::<Node>().is_connected_with_browsing_context() { debug!("iframe bound to browsing context."); debug_assert!(tree_connected, "is_connected_with_bc, but not tree_connected"); this.create_nested_browsing_context(); this.process_the_iframe_attributes(ProcessingMode::FirstTime); } })); } fn unbind_from_tree(&self, context: &UnbindContext) { self.super_type().unwrap().unbind_from_tree(context); let mut blocker = self.load_blocker.borrow_mut(); LoadBlocker::terminate(&mut blocker); // https://html.spec.whatwg.org/multipage/#a-browsing-context-is-discarded let window = window_from_node(self); let (sender, receiver) = ProfiledIpc::channel(self.global().time_profiler_chan().clone()).unwrap(); // Ask the constellation to remove the iframe, and tell us the // pipeline ids of the closed pipelines. let browsing_context_id = match self.browsing_context_id() { None => return warn!("Unbinding already unbound iframe."), Some(id) => id, }; debug!("Unbinding frame {}.", browsing_context_id); let msg = ScriptMsg::RemoveIFrame(browsing_context_id, sender); window .upcast::<GlobalScope>() .script_to_constellation_chan() .send(msg) .unwrap(); let exited_pipeline_ids = receiver.recv().unwrap(); // The spec for discarding is synchronous, // so we need to discard the browsing contexts now, rather than // when the `PipelineExit` message arrives. for exited_pipeline_id in exited_pipeline_ids { // https://html.spec.whatwg.org/multipage/#a-browsing-context-is-discarded if let Some(exited_document) = ScriptThread::find_document(exited_pipeline_id) { debug!( "Discarding browsing context for pipeline {}", exited_pipeline_id ); let exited_window = exited_document.window(); exited_window.discard_browsing_context(); for exited_iframe in exited_document.iter_iframes() { debug!("Discarding nested browsing context"); exited_iframe.destroy_nested_browsing_context(); } } } // Resetting the pipeline_id to None is required here so that // if this iframe is subsequently re-added to the document // the load doesn't think that it's a navigation, but instead // a new iframe. Without this, the constellation gets very // confused. self.destroy_nested_browsing_context(); } }<|fim▁end|>
<|file_name|>multiwriter.go<|end_file_name|><|fim▁begin|>/*<|fim▁hole|> you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Package multiwriter provides an io.Writer that duplicates its writes to multiple writers concurrently. package multiwriter import ( "io" ) // multiWriter duplicates writes to multiple writers. type multiWriter []io.Writer // New returns an io.Writer that duplicates writes to all provided writers. func New(w ...io.Writer) io.Writer { return multiWriter(w) } // Write writes p to all writers concurrently. If any errors occur, the shortest write is returned. func (mw multiWriter) Write(p []byte) (int, error) { done := make(chan result, len(mw)) for _, w := range mw { go send(w, p, done) } endResult := result{n: len(p)} for _ = range mw { res := <-done if res.err != nil && (endResult.err == nil || res.n < endResult.n) { endResult = res } } return endResult.n, endResult.err } func send(w io.Writer, p []byte, done chan<- result) { var res result res.n, res.err = w.Write(p) if res.n < len(p) && res.err == nil { res.err = io.ErrShortWrite } done <- res } type result struct { n int err error }<|fim▁end|>
Copyright 2014 Google Inc. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License");
<|file_name|>generic-tup.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at<|fim▁hole|>// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. fn get_third<T>(t: (T, T, T)) -> T { let (_, _, x) = t; return x; } pub fn main() { info!(get_third((1, 2, 3))); assert_eq!(get_third((1, 2, 3)), 3); assert_eq!(get_third((5u8, 6u8, 7u8)), 7u8); }<|fim▁end|>
// http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
<|file_name|>rules_default_values_of_correct_type_test.go<|end_file_name|><|fim▁begin|>package graphql_test import ( "testing" "github.com/housinganywhere/graphql" "github.com/housinganywhere/graphql/gqlerrors" "github.com/housinganywhere/graphql/testutil" ) func TestValidate_VariableDefaultValuesOfCorrectType_VariablesWithNoDefaultValues(t *testing.T) { testutil.ExpectPassesRule(t, graphql.DefaultValuesOfCorrectTypeRule, ` query NullableValues($a: Int, $b: String, $c: ComplexInput) { dog { name } } `) } func TestValidate_VariableDefaultValuesOfCorrectType_RequiredVariablesWithoutDefaultValues(t *testing.T) { testutil.ExpectPassesRule(t, graphql.DefaultValuesOfCorrectTypeRule, ` query RequiredValues($a: Int!, $b: String!) { dog { name } } `) } func TestValidate_VariableDefaultValuesOfCorrectType_VariablesWithValidDefaultValues(t *testing.T) { testutil.ExpectPassesRule(t, graphql.DefaultValuesOfCorrectTypeRule, ` query WithDefaultValues( $a: Int = 1, $b: String = "ok", $c: ComplexInput = { requiredField: true, intField: 3 } ) { dog { name } } `) } func TestValidate_VariableDefaultValuesOfCorrectType_NoRequiredVariablesWithDefaultValues(t *testing.T) { testutil.ExpectFailsRule(t, graphql.DefaultValuesOfCorrectTypeRule, ` query UnreachableDefaultValues($a: Int! = 3, $b: String! = "default") { dog { name } } `, []gqlerrors.FormattedError{ testutil.RuleError( `Variable "$a" of type "Int!" is required and will not `+ `use the default value. Perhaps you meant to use type "Int".`, 2, 49, ), testutil.RuleError( `Variable "$b" of type "String!" is required and will not `+ `use the default value. Perhaps you meant to use type "String".`, 2, 66, ), }) } func TestValidate_VariableDefaultValuesOfCorrectType_VariablesWithInvalidDefaultValues(t *testing.T) { testutil.ExpectFailsRule(t, graphql.DefaultValuesOfCorrectTypeRule, ` query InvalidDefaultValues( $a: Int = "one", $b: String = 4, $c: ComplexInput = "notverycomplex" ) { dog { name } } `, []gqlerrors.FormattedError{ testutil.RuleError(`Variable "$a" has invalid default value: "one".`+ "\nExpected type \"Int\", found \"one\".", 3, 19), testutil.RuleError(`Variable "$b" has invalid default value: 4.`+ "\nExpected type \"String\", found 4.", 4, 22), testutil.RuleError( `Variable "$c" has invalid default value: "notverycomplex".`+ "\nExpected \"ComplexInput\", found not an object.", 5, 28), }) } func TestValidate_VariableDefaultValuesOfCorrectType_ComplexVariablesMissingRequiredField(t *testing.T) { testutil.ExpectFailsRule(t, graphql.DefaultValuesOfCorrectTypeRule, ` query MissingRequiredField($a: ComplexInput = {intField: 3}) { dog { name } } `, []gqlerrors.FormattedError{<|fim▁hole|> testutil.RuleError( `Variable "$a" has invalid default value: {intField: 3}.`+ "\nIn field \"requiredField\": Expected \"Boolean!\", found null.", 2, 53), }) } func TestValidate_VariableDefaultValuesOfCorrectType_ListVariablesWithInvalidItem(t *testing.T) { testutil.ExpectFailsRule(t, graphql.DefaultValuesOfCorrectTypeRule, ` query InvalidItem($a: [String] = ["one", 2]) { dog { name } } `, []gqlerrors.FormattedError{ testutil.RuleError( `Variable "$a" has invalid default value: ["one", 2].`+ "\nIn element #1: Expected type \"String\", found 2.", 2, 40), }) }<|fim▁end|>
<|file_name|>cliente_gui.py<|end_file_name|><|fim▁begin|># coding: utf-8 import pygame import sys from pygame.locals import * from gui import * from conexao import * from jogador import * from Queue import Queue from threading import Thread """ Cliente Tp de Redes - Truco UFSJ Carlos Magno Lucas Geraldo Requisitos: *python 2.7 *pygame Modulo Principal. """ class Principal(Gui): """ Classe Principal """ def __init__(self): #---HABILITAR BOTAO TRUCO--- # Ative para ativar a opção de pedir truco.. self.truco_habilitado = 1 #-------------------- self.mensagem_servidor = "" self.carta_selecionada = -1 self.sua_vez = 0 self.conexao = Conexao() self.conexao.conectar() self.gui = Gui() self.jogador = Jogador() self.recebe_cartas() self.gui.carrega_cartas() #-------------------- self.pede_truco = "0" self.rodada = 1 self.gui.valor_rodada = "0" self.flag_truco = 0 self.gui.pontos = "0000" self.gui.partidas = "000" self.question_truco = "0" self.proposta_truco_equipe = "0" self.resposta_proposta_truco = "0" self.mesa_jogo = "000000" self.gui.mensagem_vez = "Aguarde..." self.gui.cont_cartas = 3 #----------------- self.quee = Queue() self.verifica = Thread(target=self.verifica_resposta_servidor, args=( self.quee, self.conexao)) self.verifica.daemon = True self.verifica.start() def atualiza_mensagem(self): "Atualiza o campo de mensagens.." if(self.sua_vez is 0): self.gui.mensagem_vez = "Aguarde..." self.gui.escrever(self.gui.mensagem_vez, (40, 430), (255, 0, 0)) if(self.sua_vez is 1): self.gui.mensagem_vez = "Sua Vez..." self.gui.escrever(self.gui.mensagem_vez, (40, 430), (0, 255, 0)) def agrupa_cartas(self, lista): """Agrupa as cartas recebidas do servidor""" final = "" c1 = "" for i in lista: c1 = c1 + i if(len(c1) == 2): final = final + c1 + "," c1 = "" lista = final.split(',') lista.pop() return lista def recebe_cartas(self): """ Carrega as cartas recebidas do servidor. Extrai os dados iniciais da primeira conexão. """ self.mensagem_servidor = self.conexao.ler_socket()<|fim▁hole|> #--Extrai os dados iniciais... self.jogador.id = self.mensagem_servidor[0:1] self.jogador.equipe = self.mensagem_servidor[1:2] self.sua_vez = int(self.mensagem_servidor[2:3]) cartas = self.mensagem_servidor[4:10] print "ID ", self.jogador.id, "Equipe ", self.jogador.equipe, "Sua Vez ", self.sua_vez self.jogador.cartas_mao = cartas cartas = self.agrupa_cartas(cartas) for i in cartas: self.gui.cartas_recebidas.append(i) def verifica_resposta_servidor(self, fila, conexao): """Verifica a conexao..""" while (True): palavra = conexao.ler_socket() if(palavra is not None): self.quee.put(palavra) def verifica_erro_mensagem(self,lista): """Verifica e corrige erro na mensagem recebida""" tamanho=len(lista) if(tamanho<30): lista = lista[ :0] + "00" + lista[1:] print "Mensagem corrigida ",lista return lista def processa_resposta(self, lista): """Vai processar a mensagem recebida""" self.mensagem_servidor = lista if(lista is not None): print "resposta vinda do servidor ", lista #lista = self.verifica_erro_mensagem(lista) self.sua_vez = int(lista[2:3]) self.atualiza_mensagem() self.finaliza_rodada(int(lista[3:4])) self.rodada = int(lista[3:4]) cartas = lista[4:10] if(cartas is not "000000"): pass else: # Considerando que nos decorrer das partida o servidor não envia as # cartas. Redefine a mão do jogador. self.gui.cartas_recebidas = [] self.jogador.cartas_mao = cartas cartas = self.agrupa_cartas(cartas) for i in cartas: self.gui.cartas_recebidas.append(i) self.gui.pontos = lista[10:14] self.gui.partidas = lista[14:17] self.gui.valor_rodada = lista[17:19] self.question_truco = lista[19:20] self.proposta_truco_equipe = lista[20:21] self.mesa_jogo = lista[22:30] self.renderiza_mesa() print self.sua_vez if(self.gui.cont_cartas > 1): self.gui.cont_cartas = self.gui.cont_cartas - 1 def renderiza_mesa(self): """Função que renderiza_mesa""" # 00 00 00 00 self.gui.caminho_cartas print self.mensagem_servidor cartas = self.agrupa_cartas(self.mesa_jogo) print "Cartas Mesa ", cartas cont = 0 for i in cartas: if not (i == "00" or i == "0"): i = self.gui.caminho_cartas + i + ".png" if(self.jogador.id == "0"): if cont is 0: self.gui.renderiza_cartas_jogadas( i, self.gui.sua_pos_carta) if cont is 1: self.gui.renderiza_cartas_jogadas( i, self.gui.pos_cartas_jog_1) self.gui.update_card_adversario( 1, self.gui.cont_cartas) if cont is 2: self.gui.renderiza_cartas_jogadas( i, self.gui.pos_cartas_jog_2) self.gui.update_card_adversario( 2, self.gui.cont_cartas) if cont is 3: self.gui.renderiza_cartas_jogadas( i, self.gui.pos_cartas_jog_3) self.gui.update_card_adversario( 3, self.gui.cont_cartas) elif(self.jogador.id == "1"): if cont is 0: self.gui.renderiza_cartas_jogadas( i, self.gui.pos_cartas_jog_3) self.gui.update_card_adversario( 3, self.gui.cont_cartas) elif cont is 1: self.gui.renderiza_cartas_jogadas( i, self.gui.sua_pos_carta) elif cont is 2: self.gui.renderiza_cartas_jogadas( i, self.gui.pos_cartas_jog_1) self.gui.update_card_adversario( 1, self.gui.cont_cartas) elif cont is 3: self.gui.renderiza_cartas_jogadas( i, self.gui.pos_cartas_jog_2) self.gui.update_card_adversario( 3, self.gui.cont_cartas) elif(self.jogador.id == "2"): if cont is 0: self.gui.renderiza_cartas_jogadas( i, self.gui.pos_cartas_jog_2) self.gui.update_card_adversario( 2, self.gui.cont_cartas) elif cont is 1: self.gui.renderiza_cartas_jogadas( i, self.gui.pos_cartas_jog_3) self.gui.update_card_adversario( 3, self.gui.cont_cartas) elif cont is 2: self.gui.renderiza_cartas_jogadas( i, self.gui.sua_pos_carta) elif cont is 3: self.gui.renderiza_cartas_jogadas( i, self.gui.pos_cartas_jog_1) self.gui.update_card_adversario( 1, self.gui.cont_cartas) elif (self.jogador.id == "3"): if cont is 0: self.gui.renderiza_cartas_jogadas( i, self.gui.pos_cartas_jog_1) self.gui.update_card_adversario( 1, self.gui.cont_cartas) elif cont is 1: self.gui.renderiza_cartas_jogadas( i, self.gui.pos_cartas_jog_2) self.gui.update_card_adversario( 2, self.gui.cont_cartas) elif cont is 2: self.gui.renderiza_cartas_jogadas( i, self.gui.pos_cartas_jog_3) self.gui.update_card_adversario( 3, self.gui.cont_cartas) elif cont is 3: self.gui.renderiza_cartas_jogadas( i, self.gui.sua_pos_carta) cont = cont + 1 def finaliza_rodada(self, valor): """Verifica se a rodada terminou e limpa a tela""" if(int(self.rodada) is not valor): self.gui.tela_padrao(self.jogador.equipe) print "Limpando a rodada" def prepara_mensagem(self, carta_jogada): """Prepara uma mensagem da carta jogada para o envio""" # Acerta a posicao da carta na mesa if(int(self.jogador.id) is 0): self.mensagem_servidor = self.mensagem_servidor[ :22] + carta_jogada + self.mensagem_servidor[24:] if(int(self.jogador.id) is 1): self.mensagem_servidor = self.mensagem_servidor[ :24] + carta_jogada + self.mensagem_servidor[26:] if(int(self.jogador.id) is 2): self.mensagem_servidor = self.mensagem_servidor[ :26] + carta_jogada + self.mensagem_servidor[28:] if(int(self.jogador.id) is 3): self.mensagem_servidor = self.mensagem_servidor[ :28] + carta_jogada + self.mensagem_servidor[30:] def verifica_proposta_truco(self): """Exibe a tela de Truco""" if(self.question_truco == "1") and self.sua_vez is 1: self.gui.tela_truco() self.flag_truco = 1 def solicita_truco(self): """Solicitar Truco""" if(self.sua_vez is 1): print "Solicitando Truco.." self.mensagem_servidor = self.mensagem_servidor[ :19] + self.pede_truco + self.mensagem_servidor[20:] print "Mensagem enviada na solicitação de Truco..", self.mensagem_servidor self.conexao.envia_mensagem(self.mensagem_servidor) self.pede_truco = "0" def responde_truco(self): """Envia uma mensagem para o servidor com a resposta do truco""" self.mensagem_servidor = self.mensagem_servidor[ :21] + self.resposta_proposta_truco + self.mensagem_servidor[22:] print "Enviando a Seguinte resposta de Truco ", self.mensagem_servidor self.conexao.envia_mensagem(self.mensagem_servidor) def envia_carta_servidor(self, carta_jogada): """Dispara cartas para o servidor e altera os campos necessarios..""" if carta_jogada is not None: carta_jogada = carta_jogada.split("/")[1].split(".")[0] # 1(ID)|a(Equipe)|0(vez)|0(rodada)|4p7c7o(mao)|0000(placar_jogo)|000(placar_rodada)|00(valor # rodada)|0(question)|0(equipe question)|0(resposta # truco)|00000000(mesa)|0(virada) self.prepara_mensagem(carta_jogada) # envia a mensagem para o servidor.. print "mensagem para o envio ", self.mensagem_servidor self.conexao.envia_mensagem(self.mensagem_servidor) def main(self): """Realiza a renderização..""" pygame.init() pygame.display.set_caption("Truco") pygame.DOUBLEBUF self.gui.iniciar() self.carta_selecionada = -1 select = 0 # print "Mensagem das Cartas ",self.mensagem_servidor while True: for event in pygame.event.get(): self.gui.mostra_pontuacao(self.jogador.equipe) self.gui.rodadas(self.jogador.equipe) self.atualiza_mensagem() self.verifica_proposta_truco() self.gui.desenha_botao_truco( self.gui.valor_rodada, self.proposta_truco_equipe) if event.type == QUIT: print "Encerrando conexão...." pygame.quit() sys.exit() self.verifica.exit() self.quee.join() if event.type == KEYDOWN and self.sua_vez == 1: op = event.unicode print op op = str(op) if op is "": op = str(event.key) print op if op == "1": self.gui.update_card( self.gui.mao[0], self.gui.pos_cartas_jog) self.carta_selecionada = 0 if op == "2": self.gui.update_card( self.gui.mao[1], self.gui.pos_cartas_jog) self.carta_selecionada = 1 if op == "3": self.gui.update_card( self.gui.mao[2], self.gui.pos_cartas_jog) self.carta_selecionada = 2 if (op == "275" or op == "276") and self.rodada is not 1: """Teclas de Seta esq e dir carta oculta """ self.gui.update_card( self.gui.mao[3], self.gui.pos_cartas_jog) self.carta_selecionada = 3 else: print "Jogada não permitida." if op == "273": print "carta jogada", self.gui.mao[self.carta_selecionada] if (self.carta_selecionada != -1): self.sua_vez = 1 # Bloqueia a mão .. self.envia_carta_servidor( self.gui.mao[self.carta_selecionada]) if self.carta_selecionada is not 3: self.gui.mao[self.carta_selecionada] = None self.gui.verifica_mao(self.gui.mao, self.conexao) if event.type == MOUSEBUTTONDOWN and select == 0: """Define a mudança da tela""" print event.button, event.pos fundo = pygame.image.load( self.gui.caminho_background + "fundo.jpg") self.gui.novo_tamanho_janela() self.gui.tela.blit(fundo, [0, 0]) self.gui.update_card_adversario(0, 3) self.gui.escrever( "Para selecionar cartas escolha [1,2,3]", (30, 30), self.gui.branco) self.gui.escrever( "Para Jogar a carta utilize seta para frente", ( 30, 50), self.gui.branco) self.gui.escrever( "Utilize as setas direcionais para ocultar", (30, 70), self.gui.branco) select = 1 if event.type == MOUSEBUTTONDOWN and self.sua_vez == 1: pos = event.pos print "Posicao ", pos if (pos[0] > 670 and pos[0] < 780): if(pos[1] > 471 and pos[1] < 471 + 20): # self.gui.desenha_botao_truco(self.gui.valor_rodada) if (self.truco_habilitado is 1): print "entrouuu" print "Variaveis do truco Sua Vez ", self.sua_vez, type(self.sua_vez), "Minha equipe ", self.jogador.equipe, type(self.jogador.equipe), "Proposta truco equipe ", self.proposta_truco_equipe, type(self.proposta_truco_equipe) if(self.sua_vez is 1 and (self.jogador.equipe == self.proposta_truco_equipe or self.proposta_truco_equipe == "0")): print "pedindo truco" self.pede_truco = "1" self.solicita_truco() self.flag_truco = 1 else: print self.gui.mao print "Não é permitido pedir truco na mão de 12" else: print "A opção de truco não está Habilitada." if (pos[0] > 363 and pos[0] < 392) and self.flag_truco is 1: if (pos[1] > 236 and pos[1] < 276): print "Truco Aceito" self.resposta_proposta_truco = "1" self.responde_truco() self.gui.tela_padrao(self.jogador.equipe) self.flag_truco = 0 if (pos[0] > 410 and pos[0] < 441) and self.flag_truco is 1: if (pos[1] > 237 and pos[1] < 266): print "Truco Não Foi aceito" self.gui.tela_padrao(self.jogador.equipe) self.resposta_proposta_truco = "0" se.responde_truco() self.flag_truco = 0 # self.cartas_jogadas() pygame.display.update() for i in range(0, 1): # Percorre a fila lendo as mensagens recebidas do servidor if not self.quee.empty(): retorno = self.quee.get(i) self.verifica_erro_mensagem(retorno) self.processa_resposta(retorno) # Adiciona um evento na pilha de eventos para atualizar a # tela. evento = pygame.event.Event(USEREVENT) pygame.event.post(evento) if __name__ == '__main__': new = Principal() new.main()<|fim▁end|>
<|file_name|>processor.py<|end_file_name|><|fim▁begin|># coding: utf-8 import os import shutil from nxdrive.client.base_automation_client import DOWNLOAD_TMP_FILE_PREFIX, \ DOWNLOAD_TMP_FILE_SUFFIX from nxdrive.engine.processor import Processor as OldProcessor from nxdrive.logging_config import get_logger log = get_logger(__name__) class Processor(OldProcessor): def __init__(self, engine, item_getter, name=None): super(Processor, self).__init__(engine, item_getter, name) def acquire_state(self, row_id): log.warning("acquire...") result = super(Processor, self).acquire_state(row_id) if result is not None and self._engine.get_local_watcher().is_pending_scan(result.local_parent_path): self._dao.release_processor(self._thread_id) # Postpone pair for watcher delay self._engine.get_queue_manager().postpone_pair(result, self._engine.get_local_watcher().get_scan_delay()) return None log.warning("Acquired: %r", result) return result def _get_partial_folders(self): local_client = self._engine.get_local_client() if not local_client.exists('/.partials'): local_client.make_folder('/', '.partials') return local_client.abspath('/.partials') def _download_content(self, local_client, remote_client, doc_pair, file_path): # TODO Should share between threads file_out = os.path.join(self._get_partial_folders(), DOWNLOAD_TMP_FILE_PREFIX + doc_pair.remote_digest + str(self._thread_id) + DOWNLOAD_TMP_FILE_SUFFIX) # Check if the file is already on the HD pair = self._dao.get_valid_duplicate_file(doc_pair.remote_digest) if pair: shutil.copy(local_client.abspath(pair.local_path), file_out) return file_out tmp_file = remote_client.stream_content( doc_pair.remote_ref, file_path, parent_fs_item_id=doc_pair.remote_parent_ref, file_out=file_out) self._update_speed_metrics() return tmp_file def _update_remotely(self, doc_pair, local_client, remote_client, is_renaming): log.warning("_update_remotely") os_path = local_client.abspath(doc_pair.local_path) if is_renaming: new_os_path = os.path.join(os.path.dirname(os_path), doc_pair.remote_name) log.debug("Replacing local file '%s' by '%s'.", os_path, new_os_path) else: new_os_path = os_path log.debug("Updating content of local file '%s'.", os_path) tmp_file = self._download_content(local_client, remote_client, doc_pair, new_os_path) # Delete original file and rename tmp file local_client.delete_final(doc_pair.local_path) rel_path = local_client.get_path(tmp_file) local_client.set_remote_id(rel_path, doc_pair.remote_ref) # Move rename updated_info = local_client.move(rel_path, doc_pair.local_parent_path, doc_pair.remote_name) doc_pair.local_digest = updated_info.get_digest() self._dao.update_last_transfer(doc_pair.id, "download") self._refresh_local_state(doc_pair, updated_info) def _create_remotely(self, local_client, remote_client, doc_pair, parent_pair, name): local_parent_path = parent_pair.local_path # TODO Shared this locking system / Can have concurrent lock self._unlock_readonly(local_client, local_parent_path) tmp_file = None try: if doc_pair.folderish: log.debug("Creating local folder '%s' in '%s'", name, local_client.abspath(parent_pair.local_path)) # Might want do temp name to original path = local_client.make_folder(local_parent_path, name) else: path, os_path, name = local_client.get_new_file(local_parent_path,<|fim▁hole|> # Move file to its folder - might want to split it in two for events local_client.move(local_client.get_path(tmp_file),local_parent_path, name) self._dao.update_last_transfer(doc_pair.id, "download") finally: self._lock_readonly(local_client, local_parent_path) # Clean .nxpart if needed if tmp_file is not None and os.path.exists(tmp_file): os.remove(tmp_file) return path<|fim▁end|>
name) tmp_file = self._download_content(local_client, remote_client, doc_pair, os_path) log.debug("Creating local file '%s' in '%s'", name, local_client.abspath(parent_pair.local_path))
<|file_name|>pyplot_index_formatter.py<|end_file_name|><|fim▁begin|>import numpy as np import matplotlib.pyplot as plt import matplotlib.mlab as mlab import matplotlib.ticker as ticker r = mlab.csv2rec('data/imdb.csv') r.sort() r = r[-30:] # get the last 30 days<|fim▁hole|>def format_date(x, pos=None): thisind = np.clip(int(x+0.5), 0, N-1) return r.date[thisind].strftime('%Y-%m-%d') fig = plt.figure() ax = fig.add_subplot(111) ax.plot(ind, r.adj_close, 'o-') ax.xaxis.set_major_formatter(ticker.FuncFormatter(format_date)) fig.autofmt_xdate() plt.show()<|fim▁end|>
N = len(r) ind = np.arange(N) # the evenly spaced plot indices
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2013-2015 Sandstorm Development Group, Inc. and contributors // Licensed under the MIT License: // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. //! # Cap'n Proto Runtime Library //! //! [Cap'n Proto](https://capnproto.org) is an extremely efficient protocol for //! sharing data and capabilities. //! //! The Rust implementation is split into three separate crates. //! //! Code generation is handled by [capnpc-rust](https://github.com/dwrensha/capnpc-rust). //! //! The present crate is the runtime library required by that generated code. It is hosted on Github //! [here](https://github.com/dwrensha/capnproto-rust). //! //! [capnp-rpc-rust](https://github.com/dwrensha/capnp-rpc-rust) is an implementation of a //! distributed object-capability layer. #![allow(raw_pointer_derive)] #![crate_name="capnp"] #![crate_type = "lib"] extern crate byteorder; #[cfg(test)] extern crate quickcheck; pub mod any_pointer; pub mod capability; pub mod data; pub mod data_list; pub mod enum_list; pub mod list_list; pub mod message; pub mod primitive_list; pub mod private; pub mod serialize; pub mod serialize_packed; pub mod struct_list; pub mod text; pub mod text_list; pub mod traits; mod util; /// Eight bytes of memory with opaque interior. /// /// This type is used to ensure that the data of a message is properly aligned. #[derive(Clone, Copy, Debug, PartialEq, Eq)] #[repr(C)] pub struct Word(u64); impl Word { /// Does this, but faster: /// `::std::iter::repeat(Word(0)).take(length).collect()` pub fn allocate_zeroed_vec(length: usize) -> Vec<Word> { let mut result : Vec<Word> = Vec::with_capacity(length); unsafe { result.set_len(length); let p : *mut u8 = result.as_mut_ptr() as *mut u8; ::std::ptr::write_bytes(p, 0u8, length * ::std::mem::size_of::<Word>()); } return result; } pub fn bytes_to_words<'a>(bytes: &'a [u8]) -> &'a [Word] { unsafe { ::std::slice::from_raw_parts(bytes.as_ptr() as *const Word, bytes.len() / 8) } } pub fn words_to_bytes<'a>(words: &'a [Word]) -> &'a [u8] { unsafe { ::std::slice::from_raw_parts(words.as_ptr() as *const u8, words.len() * 8) } } pub fn words_to_bytes_mut<'a>(words: &'a mut [Word]) -> &'a mut [u8] { unsafe { ::std::slice::from_raw_parts_mut(words.as_mut_ptr() as *mut u8, words.len() * 8) } } #[cfg(test)] pub fn from(n: u64) -> Word { Word(n) } } #[cfg(test)] impl quickcheck::Arbitrary for Word { fn arbitrary<G: quickcheck::Gen>(g: &mut G) -> Word { Word(quickcheck::Arbitrary::arbitrary(g)) } fn shrink(&self) -> Box<Iterator<Item=Word>+'static> { Box::new(quickcheck::Arbitrary::shrink(&self.0).map(|value| Word(value))) } } /// Size of a message. Every generated struct has a method `.total_size()` that returns this. #[derive(Clone, Copy, PartialEq)] pub struct MessageSize {<|fim▁hole|> pub cap_count : u32 } impl MessageSize { pub fn plus_eq(&mut self, other : MessageSize) { self.word_count += other.word_count; self.cap_count += other.cap_count; } } /// An enum value or union discriminant that was not found among those defined in a schema. #[derive(PartialEq, Clone, Copy, Debug)] pub struct NotInSchema(pub u16); impl ::std::fmt::Display for NotInSchema { fn fmt(&self, fmt : &mut ::std::fmt::Formatter) -> ::std::result::Result<(), ::std::fmt::Error> { write!(fmt, "Enum value or union discriminant {} was not present in the schema.", self.0) } } impl ::std::error::Error for NotInSchema { fn description(&self) -> &str { "Enum value or union disriminant was not present in schema." } } /// Because messages are lazily validated, the return type of any method that reads a pointer field /// must be wrapped in a Result. pub type Result<T> = ::std::result::Result<T, Error>; /// Things that can go wrong when you read a message. #[derive(Debug)] pub enum Error { Decode { description : &'static str, detail : Option<String> }, Io(std::io::Error), } impl Error { pub fn new_decode_error(description : &'static str, detail : Option<String>) -> Error { Error::Decode { description : description, detail : detail} } } impl ::std::convert::From<::std::io::Error> for Error { fn from(err : ::std::io::Error) -> Error { Error::Io(err) } } impl ::std::convert::From<NotInSchema> for Error { fn from(e : NotInSchema) -> Error { Error::new_decode_error("Enum value or union discriminant was not present in schema.", Some(format!("value : {}", e.0))) } } impl ::std::fmt::Display for Error { fn fmt(&self, fmt : &mut ::std::fmt::Formatter) -> ::std::result::Result<(), ::std::fmt::Error> { match *self { Error::Decode { ref description, detail : Some(ref detail) } => { write!(fmt, "{} {}", description, detail) }, Error::Decode { ref description, .. } => write!(fmt, "{}", description), Error::Io(ref io) => io.fmt(fmt), } } } impl ::std::error::Error for Error { fn description(&self) -> &str { match *self { Error::Decode { ref description, .. } => description, Error::Io(ref io) => ::std::error::Error::description(io), } } fn cause(&self) -> Option<&::std::error::Error> { match *self { Error::Decode { .. } => None, Error::Io(ref io) => io.cause(), } } } /// Helper struct that allows `MessageBuilder::get_segments_for_output()` to avoid heap allocations /// in the single-segment case. pub enum OutputSegments<'a> { #[doc(hidden)] SingleSegment([&'a [Word]; 1]), #[doc(hidden)] MultiSegment(Vec<&'a [Word]>), } impl <'a> ::std::ops::Deref for OutputSegments<'a> { type Target = [&'a [Word]]; fn deref<'b>(&'b self) -> &'b [&'a [Word]] { match self { &OutputSegments::SingleSegment(ref s) => { s } &OutputSegments::MultiSegment(ref v) => { &*v } } } }<|fim▁end|>
pub word_count : u64, /// Size of the capability table.
<|file_name|>index.test.js<|end_file_name|><|fim▁begin|>/* eslint-env jest */ import fs from 'fs-extra' import { join } from 'path' import { killApp, findPort, launchApp, nextStart, nextBuild, fetchViaHTTP, } from 'next-test-utils' import webdriver from 'next-webdriver' import cheerio from 'cheerio' jest.setTimeout(1000 * 60 * 2) const appDir = join(__dirname, '../') const gip404Err = /`pages\/404` can not have getInitialProps\/getServerSideProps/ let appPort let app describe('404 Page Support with _app', () => { describe('production mode', () => { afterAll(() => killApp(app)) it('should build successfully', async () => { const { code, stderr, stdout } = await nextBuild(appDir, [], { stderr: true, stdout: true, }) expect(code).toBe(0) expect(stderr).not.toMatch(gip404Err) expect(stdout).not.toMatch(gip404Err) appPort = await findPort() app = await nextStart(appDir, appPort) }) it('should not output static 404 if _app has getInitialProps', async () => { const browser = await webdriver(appPort, '/404') const isAutoExported = await browser.eval('__NEXT_DATA__.autoExport') expect(isAutoExported).toBe(null) }) it('specify to use the 404 page still in the routes-manifest', async () => { const manifest = await fs.readJSON( join(appDir, '.next/routes-manifest.json') ) expect(manifest.pages404).toBe(true) }) it('should still use 404 page', async () => { const res = await fetchViaHTTP(appPort, '/abc') expect(res.status).toBe(404) const $ = cheerio.load(await res.text()) expect($('#404-title').text()).toBe('Hi There') }) }) describe('dev mode', () => { let stderr = '' let stdout = '' beforeAll(async () => { appPort = await findPort() app = await launchApp(appDir, appPort, { onStderr(msg) { stderr += msg }, onStdout(msg) { stdout += msg }, }) }) afterAll(() => killApp(app))<|fim▁hole|> const res = await fetchViaHTTP(appPort, '/abc') expect(res.status).toBe(404) const $ = cheerio.load(await res.text()) expect($('#404-title').text()).toBe('Hi There') expect(stderr).not.toMatch(gip404Err) expect(stdout).not.toMatch(gip404Err) }) }) })<|fim▁end|>
it('should not show pages/404 GIP error if _app has GIP', async () => {
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|># -*- coding: utf-8 -*- from plaso.parsers.bencode_plugins import transmission from plaso.parsers.bencode_plugins import utorrent<|fim▁end|>
<|file_name|>conf.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # # complexity documentation build configuration file, created by # sphinx-quickstart on Tue Jul 9 22:26:36 2013. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os # If extensions (or modules to document with autodoc) are in another # directory, add these directories to sys.path here. If the directory is # relative to the documentation root, use os.path.abspath to make it # absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # Get the project root dir, which is the parent dir of this cwd = os.getcwd() project_root = os.path.dirname(cwd) # Insert the project root dir as the first element in the PYTHONPATH. # This lets us ensure that the source package is imported, and that its # version is used. sys.path.insert(0, project_root) import mavlog # -- General configuration --------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'MAVLog' copyright = u'2014, Gareth R' # The version info for the project you're documenting, acts as replacement # for |version| and |release|, also used in various other places throughout # the built documents. # # The short X.Y version. version = mavlog.__version__ # The full version, including alpha/beta/rc tags. release = mavlog.__version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to # some non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built # documents. #keep_warnings = False # -- Options for HTML output ------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a # theme further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as # html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the # top of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon # of the docs. This file should be a Windows icon file (.ico) being # 16x16 or 32x32 pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) # here, relative to this directory. They are copied after the builtin # static files, so a file named "default.css" will overwrite the builtin # "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page # bottom, using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names # to template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. # Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. # Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages # will contain a <link> tag referring to it. The value of this option # must be the base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'mavlogdoc' # -- Options for LaTeX output ------------------------------------------ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass # [howto/manual]). latex_documents = [ ('index', 'mavlog.tex', u'MAVLog Documentation', u'Gareth R', 'manual'), ] # The name of an image file (relative to this directory) to place at # the top of the title page. #latex_logo = None<|fim▁hole|># are parts, not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output ------------------------------------ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'mavlog', u'MAVLog Documentation', [u'Gareth R'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ---------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'mavlog', u'MAVLog Documentation', u'Gareth R', 'mavlog', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False<|fim▁end|>
# For "manual" documents, if this is true, then toplevel headings
<|file_name|>form.e2e-spec.ts<|end_file_name|><|fim▁begin|>import { browser, element, by } from 'protractor'; describe('Form Component', function () { var _modelName: string, _modelDesc: string, _modelStatus: string; beforeEach(function () { }); it('should display: the correct url', function(){ browser.get('addTicket'); browser.getCurrentUrl().then(function (url) { expect(url).toEqual(browser.baseUrl + 'addTicket'); }); }); it('should display: Title', function () { expect(element(by.css('h1')).getText()).toEqual('Novo Ticket'); }); it('should display: Subtitle', function () { expect(element(by.css('p')).getText()).toEqual('Formulário para adição de um novo ticket.'); }); it('should display: form', function () { var _form = element.all(by.tagName('label')); expect(_form.get(0).getText()).toEqual('Nome:'); expect(_form.get(1).getText()).toEqual('Descrição:'); expect(_form.get(2).getText()).toEqual('Status:'); }); it('should display: input name', function () { var _name = browser.findElement(by.name('nome')); _name.sendKeys('A'); var _model = browser.findElement(by.name('nome')); _model.getAttribute('value').then(function (value) { _modelName = value; expect(value).toEqual('A'); }); }); it('should display: input description', function () { var _desc = browser.findElement(by.name('descrição')); _desc.sendKeys('Primeira letra do alfabeto'); var _model = browser.findElement(by.name('descrição')); _model.getAttribute('value').then(function (value) { _modelDesc = value; expect(value).toEqual('Primeira letra do alfabeto'); }); }); it('should display: input status', function () { var _sta = browser.findElement(by.name('status')); _sta.sendKeys('TO DO'); var _model = browser.findElement(by.name('status')); _model.getAttribute('value').then(function (value) { _modelStatus = value; expect(value).toEqual('TO DO'); }); }); it('should display: add ticket', function () { var _button = browser.findElement(by.name('salvar')); _button.click().then(function () { var el = element.all(by.css('.table tbody tr')); el.count().then(function (count){ expect(count === 1).toBeTruthy();; }); }); }); it('should display: edit ticket', function () { var _button = browser.findElement(by.name('editar')); _button.click().then(function () { expect(_modelStatus).toEqual('TO DO'); }); }); it('should display: delete ticket', function () { var _button = browser.findElement(by.name('remover')); _button.click().then(function () { var el = element.all(by.css('.table tbody tr')); el.count().then(function (count){ expect(count === 0).toBeTruthy(); }); }); }); it('should display: titles of table', function () { var _elementTable = element.all(by.css('.table th')); expect(_elementTable.get(0).getText()).toEqual('Id'); expect(_elementTable.get(1).getText()).toEqual('Nome'); expect(_elementTable.get(2).getText()).toEqual('Descrição'); expect(_elementTable.get(3).getText()).toEqual('Status');<|fim▁hole|><|fim▁end|>
}); });
<|file_name|>premailer_from_file_test.go<|end_file_name|><|fim▁begin|>package premailer import ( "testing" "github.com/stretchr/testify/assert" ) func TestBasicHTMLFromFile(t *testing.T) { p, err := NewPremailerFromFile("data/markup_test.html", nil) assert.Nil(t, err) resultHTML, err := p.Transform() assert.Nil(t, err) assert.Contains(t, resultHTML, "<h1 style=\"width:50px;color:red\" width=\"50\">Hi!</h1>")<|fim▁hole|>} func TestFromFileNotFound(t *testing.T) { p, err := NewPremailerFromFile("data/blablabla.html", nil) assert.NotNil(t, err) assert.Nil(t, p) }<|fim▁end|>
assert.Contains(t, resultHTML, "<h2 style=\"vertical-align:top\">There</h2>") assert.Contains(t, resultHTML, "<h3 style=\"text-align:right\">Hello</h3>") assert.Contains(t, resultHTML, "<p><strong style=\"text-decoration:none\">Yes!</strong></p>") assert.Contains(t, resultHTML, "<div style=\"background-color:green\">Green color</div>")
<|file_name|>NfcCorkboard_hi.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?> <!DOCTYPE TS> <TS version="2.0" language="hi_IN"> <defaultcodec>UTF-8</defaultcodec> <context> <name>NdefManager</name> <message> <location filename="../ndefmanager.cpp" line="95"/> <location filename="../ndefmanager.cpp" line="321"/> <source>NFC hardware is available but currently switched off.</source> <translation type="unfinished"></translation> </message> <message><|fim▁hole|> </message> <message> <location filename="../ndefmanager.cpp" line="102"/> <source>Update device firmware to enable NFC support.</source> <translation>यह अनुप्रयोग चलाने के लिए कृपया अपनी डिवाइस फार्मबेयार अपडेट करें</translation> </message> <message> <location filename="../ndefmanager.cpp" line="106"/> <source>NFC not supported by this device.</source> <translation type="unfinished"></translation> </message> <message> <location filename="../ndefmanager.cpp" line="305"/> <source>NFC is not currently supported.</source> <translation type="unfinished"></translation> </message> <message> <location filename="../ndefmanager.cpp" line="309"/> <source>NFC is supported, but the current mode is unknown at this time.</source> <translation type="unfinished"></translation> </message> <message> <location filename="../ndefmanager.cpp" line="317"/> <source>NFC hardware is available and currently in card emulation mode.</source> <translation type="unfinished"></translation> </message> <message> <location filename="../ndefmanager.cpp" line="337"/> <source>Unable to query NFC feature support.</source> <translation type="unfinished"></translation> </message> <message> <location filename="../ndefmanager.cpp" line="341"/> <source>Unable to query device software version.</source> <translation type="unfinished"></translation> </message> <message> <location filename="../ndefmanager.cpp" line="345"/> <source>Unable to request NFC mode change notifications.</source> <translation type="unfinished"></translation> </message> <message> <location filename="../ndefmanager.cpp" line="349"/> <source>NFC mode change notification was received, but caused an error.</source> <translation type="unfinished"></translation> </message> <message> <location filename="../ndefmanager.cpp" line="353"/> <source>Unable to retrieve current NFC mode.</source> <translation type="unfinished"></translation> </message> </context> </TS><|fim▁end|>
<location filename="../ndefmanager.cpp" line="97"/> <location filename="../ndefmanager.cpp" line="313"/> <source>NFC is supported and switched on.</source> <translation type="unfinished"></translation>
<|file_name|>UpdatedVersionListenerNotifier.java<|end_file_name|><|fim▁begin|>/******************************************************************************* * This file is part of RedReader. * * RedReader is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * RedReader is distributed in the hope that it will be useful,<|fim▁hole|> * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with RedReader. If not, see <http://www.gnu.org/licenses/>. ******************************************************************************/ package org.quantumbadger.redreader.io; import org.quantumbadger.redreader.common.collections.WeakReferenceListManager; public class UpdatedVersionListenerNotifier<K, V extends WritableObject<K>> implements WeakReferenceListManager.ArgOperator<UpdatedVersionListener<K, V>, V> { @Override public void operate(final UpdatedVersionListener<K, V> listener, final V data) { listener.onUpdatedVersion(data); } }<|fim▁end|>
* but WITHOUT ANY WARRANTY; without even the implied warranty of
<|file_name|>do3.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. fn f(f: @fn(int) -> int) -> int { f(10) }<|fim▁hole|><|fim▁end|>
pub fn main() { assert!(do f |i| { i } == 10); }
<|file_name|>EmployeeDepartment.java<|end_file_name|><|fim▁begin|>package com.github.kuros.random.jpa.testUtil.entity; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.Table; @Entity @Table(name = "employee_department") public class EmployeeDepartment { @Id @GeneratedValue(strategy = GenerationType.IDENTITY) private Long id; @Column(name = "employee_id") private Long employeeId; @Column(name = "shift_id") private Integer shiftId; @Column(name = "department_id") private Integer departmentId; public Long getId() { return id; } public void setId(final Long id) { this.id = id; } public Long getEmployeeId() { return employeeId; } public void setEmployeeId(final Long employeeId) { this.employeeId = employeeId; } public Integer getShiftId() { return shiftId; }<|fim▁hole|> public void setShiftId(final Integer shiftId) { this.shiftId = shiftId; } public Integer getDepartmentId() { return departmentId; } public void setDepartmentId(final Integer departmentId) { this.departmentId = departmentId; } @Override public boolean equals(final Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; final EmployeeDepartment that = (EmployeeDepartment) o; return !(id != null ? !id.equals(that.id) : that.id != null); } @Override public int hashCode() { return id != null ? id.hashCode() : 0; } }<|fim▁end|>
<|file_name|>notes.go<|end_file_name|><|fim▁begin|>package main import ( "net/http" "sync" "time" "github.com/1lann/airlift/airlift" humanize "github.com/dustin/go-humanize" "github.com/gin-gonic/contrib/renders/multitemplate" "github.com/gin-gonic/contrib/sessions" "github.com/gin-gonic/gin" ) func formatBasicTime(t time.Time) string { return getDay(t) + " " + t.Format("January 2006 at 3:04 PM") } func init() { registers = append(registers, func(r *gin.RouterGroup, t multitemplate.Render) { t.AddFromFiles("notes", viewsPath+"/notes.tmpl", viewsPath+"/components/base.tmpl") r.GET("/notes", viewUserNotes) t.AddFromFiles("view-note", viewsPath+"/view-note.tmpl", viewsPath+"/components/base.tmpl") r.GET("/notes/:id", viewNote) r.POST("/notes/:id/star", func(c *gin.Context) { starred := c.PostForm("starred") == "true" username := c.MustGet("user").(airlift.User).Username err := airlift.SetNoteStar(c.Param("id"), username, starred) if err != nil { panic(err) } c.JSON(http.StatusOK, gin.H{"status": "ok"}) }) }) } func viewNote(c *gin.Context) { id := c.Param("id") user := c.MustGet("user").(airlift.User) note, err := airlift.GetFullNote(id, user.Username) if err != nil { panic(err) } if note.Title == "" { c.HTML(http.StatusNotFound, "not-found", nil) return } files := []fileCard{ { Name: "Notes", Size: humanize.Bytes(note.Size), URL: "/download/notes/" + note.ID, }, } session := sessions.Default(c) uploadFlashes := session.Flashes("upload") uploadSuccess := "" if len(uploadFlashes) > 0 { uploadSuccess = uploadFlashes[0].(string) } session.Save() htmlOK(c, "view-note", gin.H{ "ActiveMenu": "notes", "Note": note, "Files": files, "IsUploader": note.Uploader == user.Username, "UploadTime": formatBasicTime(note.UploadTime), "UpdatedTime": formatBasicTime(note.UpdatedTime), "UploadSuccess": uploadSuccess, }) } func viewUserNotes(c *gin.Context) { user := c.MustGet("user").(airlift.User) wg := new(sync.WaitGroup) wg.Add(2) var starred []airlift.Note go func() { defer func() { wg.Done() }() var err error starred, err = airlift.GetStarredNotes(user.Username) if err != nil { panic(err) } }() var uploaded []airlift.Note go func() { defer func() { wg.Done() }() var err error<|fim▁hole|> } }() deleted := false session := sessions.Default(c) uploadFlashes := session.Flashes("upload") if len(uploadFlashes) > 0 && uploadFlashes[0] == "delete" { deleted = true } session.Save() wg.Wait() htmlOK(c, "notes", gin.H{ "ActiveMenu": "notes", "Starred": starred, "Uploaded": uploaded, "Deleted": deleted, }) }<|fim▁end|>
uploaded, err = airlift.GetUploadedNotes(user.Username) if err != nil { panic(err)
<|file_name|>config.go<|end_file_name|><|fim▁begin|>package run import ( "errors" "fmt" "os" "os/user" "path/filepath" "reflect" "strconv" "strings" "time" "github.com/influxdata/kapacitor/services/alerta" "github.com/influxdata/kapacitor/services/deadman" "github.com/influxdata/kapacitor/services/hipchat" "github.com/influxdata/kapacitor/services/httpd" "github.com/influxdata/kapacitor/services/influxdb" "github.com/influxdata/kapacitor/services/logging" "github.com/influxdata/kapacitor/services/opsgenie" "github.com/influxdata/kapacitor/services/pagerduty" "github.com/influxdata/kapacitor/services/replay" "github.com/influxdata/kapacitor/services/reporting" "github.com/influxdata/kapacitor/services/sensu" "github.com/influxdata/kapacitor/services/slack" "github.com/influxdata/kapacitor/services/smtp" "github.com/influxdata/kapacitor/services/stats" "github.com/influxdata/kapacitor/services/talk" "github.com/influxdata/kapacitor/services/task_store" "github.com/influxdata/kapacitor/services/udf" "github.com/influxdata/kapacitor/services/udp" "github.com/influxdata/kapacitor/services/victorops" "github.com/influxdata/influxdb/services/collectd" "github.com/influxdata/influxdb/services/graphite" "github.com/influxdata/influxdb/services/opentsdb" ) // Config represents the configuration format for the kapacitord binary. type Config struct { HTTP httpd.Config `toml:"http"` Replay replay.Config `toml:"replay"` Task task_store.Config `toml:"task"` InfluxDB []influxdb.Config `toml:"influxdb"` Logging logging.Config `toml:"logging"` Graphites []graphite.Config `toml:"graphite"` Collectd collectd.Config `toml:"collectd"` OpenTSDB opentsdb.Config `toml:"opentsdb"` UDPs []udp.Config `toml:"udp"` SMTP smtp.Config `toml:"smtp"` OpsGenie opsgenie.Config `toml:"opsgenie"` VictorOps victorops.Config `toml:"victorops"` PagerDuty pagerduty.Config `toml:"pagerduty"` Sensu sensu.Config `toml:"sensu"` Slack slack.Config `toml:"slack"` HipChat hipchat.Config `toml:"hipchat"` Alerta alerta.Config `toml:"alerta"` Reporting reporting.Config `toml:"reporting"` Stats stats.Config `toml:"stats"` UDF udf.Config `toml:"udf"` Deadman deadman.Config `toml:"deadman"` Talk talk.Config `toml:"talk"` Hostname string `toml:"hostname"` DataDir string `toml:"data_dir"` // The index of the default InfluxDB config defaultInfluxDB int } // NewConfig returns an instance of Config with reasonable defaults. func NewConfig() *Config { c := &Config{ Hostname: "localhost", } c.HTTP = httpd.NewConfig() c.Replay = replay.NewConfig() c.Task = task_store.NewConfig() c.Logging = logging.NewConfig() c.Collectd = collectd.NewConfig() c.OpenTSDB = opentsdb.NewConfig() c.SMTP = smtp.NewConfig() c.OpsGenie = opsgenie.NewConfig() c.VictorOps = victorops.NewConfig() c.PagerDuty = pagerduty.NewConfig() c.Sensu = sensu.NewConfig() c.Slack = slack.NewConfig() c.HipChat = hipchat.NewConfig() c.Alerta = alerta.NewConfig() c.Reporting = reporting.NewConfig() c.Stats = stats.NewConfig() c.UDF = udf.NewConfig() c.Deadman = deadman.NewConfig() c.Talk = talk.NewConfig() return c } // Once the config has been created and decoded, you can call this method // to initialize ARRAY attributes. // All ARRAY attributes have to be init after toml decode // See: https://github.com/BurntSushi/toml/pull/68 func (c *Config) PostInit() { if len(c.InfluxDB) == 0 { i := influxdb.NewConfig() c.InfluxDB = []influxdb.Config{i} c.InfluxDB[0].Name = "default" c.InfluxDB[0].URLs = []string{"http://localhost:8086"} } else if len(c.InfluxDB) == 1 && c.InfluxDB[0].Name == "" { c.InfluxDB[0].Name = "default" } } // NewDemoConfig returns the config that runs when no config is specified. func NewDemoConfig() (*Config, error) { c := NewConfig() c.PostInit() var homeDir string // By default, store meta and data files in current users home directory u, err := user.Current() if err == nil { homeDir = u.HomeDir } else if os.Getenv("HOME") != "" { homeDir = os.Getenv("HOME") } else { return nil, fmt.Errorf("failed to determine current user for storage") } c.Replay.Dir = filepath.Join(homeDir, ".kapacitor", c.Replay.Dir) c.Task.Dir = filepath.Join(homeDir, ".kapacitor", c.Task.Dir) c.DataDir = filepath.Join(homeDir, ".kapacitor", c.DataDir) return c, nil } // Validate returns an error if the config is invalid. func (c *Config) Validate() error { if c.Hostname == "" { return fmt.Errorf("must configure valid hostname") } if c.DataDir == "" { return fmt.Errorf("must configure valid data dir") } err := c.Replay.Validate() if err != nil { return err } err = c.Task.Validate() if err != nil { return err } c.defaultInfluxDB = -1 names := make(map[string]bool, len(c.InfluxDB)) for i := 0; i < len(c.InfluxDB); i++ { config := c.InfluxDB[i] if !config.Enabled { c.InfluxDB = append(c.InfluxDB[0:i], c.InfluxDB[i+1:]...) i-- continue } if names[config.Name] { return fmt.Errorf("duplicate name %q for influxdb configs", config.Name) } names[config.Name] = true err = config.Validate() if err != nil { return err } if config.Default { if c.defaultInfluxDB != -1 { return fmt.Errorf("More than one InfluxDB default was specified: %s %s", config.Name, c.InfluxDB[c.defaultInfluxDB].Name) } c.defaultInfluxDB = i } } // Set default if it is the only one if len(c.InfluxDB) == 1 { c.defaultInfluxDB = 0 } if len(c.InfluxDB) > 0 && c.defaultInfluxDB == -1 { return errors.New("at least one InfluxDB cluster must be marked as default.") } err = c.UDF.Validate() if err != nil { return err } err = c.Sensu.Validate() if err != nil { return err } err = c.Talk.Validate() if err != nil { return err } for _, g := range c.Graphites { if err := g.Validate(); err != nil { return fmt.Errorf("invalid graphite config: %v", err) } } return nil } func (c *Config) ApplyEnvOverrides() error { return c.applyEnvOverrides("KAPACITOR", "", reflect.ValueOf(c)) }<|fim▁hole|> // If we have a pointer, dereference it s := spec if spec.Kind() == reflect.Ptr { s = spec.Elem() } var value string if s.Kind() != reflect.Struct { value = os.Getenv(prefix) // Skip any fields we don't have a value to set if value == "" { return nil } if fieldDesc != "" { fieldDesc = " to " + fieldDesc } } switch s.Kind() { case reflect.String: s.SetString(value) case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: var intValue int64 // Handle toml.Duration if s.Type().Name() == "Duration" { dur, err := time.ParseDuration(value) if err != nil { return fmt.Errorf("failed to apply %v%v using type %v and value '%v'", prefix, fieldDesc, s.Type().String(), value) } intValue = dur.Nanoseconds() } else { var err error intValue, err = strconv.ParseInt(value, 0, s.Type().Bits()) if err != nil { return fmt.Errorf("failed to apply %v%v using type %v and value '%v'", prefix, fieldDesc, s.Type().String(), value) } } s.SetInt(intValue) case reflect.Bool: boolValue, err := strconv.ParseBool(value) if err != nil { return fmt.Errorf("failed to apply %v%v using type %v and value '%v'", prefix, fieldDesc, s.Type().String(), value) } s.SetBool(boolValue) case reflect.Float32, reflect.Float64: floatValue, err := strconv.ParseFloat(value, s.Type().Bits()) if err != nil { return fmt.Errorf("failed to apply %v%v using type %v and value '%v'", prefix, fieldDesc, s.Type().String(), value) } s.SetFloat(floatValue) case reflect.Struct: c.applyEnvOverridesToStruct(prefix, s) } return nil } func (c *Config) applyEnvOverridesToStruct(prefix string, s reflect.Value) error { typeOfSpec := s.Type() for i := 0; i < s.NumField(); i++ { f := s.Field(i) // Get the toml tag to determine what env var name to use configName := typeOfSpec.Field(i).Tag.Get("toml") // Replace hyphens with underscores to avoid issues with shells configName = strings.Replace(configName, "-", "_", -1) fieldName := typeOfSpec.Field(i).Name // Skip any fields that we cannot set if f.CanSet() || f.Kind() == reflect.Slice { // Use the upper-case prefix and toml name for the env var key := strings.ToUpper(configName) if prefix != "" { key = strings.ToUpper(fmt.Sprintf("%s_%s", prefix, configName)) } // If the type is s slice, apply to each using the index as a suffix // e.g. GRAPHITE_0 if f.Kind() == reflect.Slice || f.Kind() == reflect.Array { for i := 0; i < f.Len(); i++ { if err := c.applyEnvOverrides(fmt.Sprintf("%s_%d", key, i), fieldName, f.Index(i)); err != nil { return err } } } else if err := c.applyEnvOverrides(key, fieldName, f); err != nil { return err } } } return nil }<|fim▁end|>
func (c *Config) applyEnvOverrides(prefix string, fieldDesc string, spec reflect.Value) error {
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! Various builders to generate/alter wasm components mod invoke; mod module; mod code; mod misc; mod import; mod memory; mod table; mod export; mod global; mod data; pub use self::code::{<|fim▁hole|>pub use self::export::{export, ExportBuilder, ExportInternalBuilder}; pub use self::global::{global, GlobalBuilder}; pub use self::import::{import, ImportBuilder}; pub use self::invoke::Identity; pub use self::memory::MemoryBuilder; pub use self::module::{module, from_module, ModuleBuilder, CodeLocation}; pub use self::table::{TableBuilder, TableDefinition, TableEntryDefinition};<|fim▁end|>
signatures, signature, function, SignatureBuilder, SignaturesBuilder, FunctionBuilder, TypeRefBuilder, FuncBodyBuilder, FunctionDefinition, }; pub use self::data::DataSegmentBuilder;
<|file_name|>cpu_profile.cc<|end_file_name|><|fim▁begin|>#include "cpu_profile.h" #include "cpu_profile_node.h" namespace nodex { using v8::Array; using v8::CpuProfile; using v8::CpuProfileNode; using v8::Handle; using v8::Number; using v8::Integer; using v8::Local; using v8::Object; using v8::ObjectTemplate; using v8::FunctionTemplate; using v8::String; using v8::Function; using v8::Value; Nan::Persistent<ObjectTemplate> Profile::profile_template_; Nan::Persistent<Object> Profile::profiles; uint32_t Profile::uid_counter = 0; NAN_METHOD(Profile_EmptyMethod) { } void Profile::Initialize () { Nan::HandleScope scope; Local<FunctionTemplate> f = Nan::New<FunctionTemplate>(Profile_EmptyMethod); Local<ObjectTemplate> o = f->InstanceTemplate(); o->SetInternalFieldCount(1); Nan::SetMethod(o, "delete", Profile::Delete); profile_template_.Reset(o); } NAN_METHOD(Profile::Delete) { Local<Object> self = info.This(); void* ptr = Nan::GetInternalFieldPointer(self, 0); Local<Object> profiles = Nan::New<Object>(Profile::profiles); Local<Value> _uid = info.This()->Get(Nan::New<String>("uid").ToLocalChecked()); Local<String> uid = Nan::To<String>(_uid).ToLocalChecked(); profiles->Delete(uid); static_cast<CpuProfile*>(ptr)->Delete(); } Local<Value> Profile::New (const CpuProfile* node) { Nan::EscapableHandleScope scope; if (profile_template_.IsEmpty()) { Profile::Initialize(); } uid_counter++; Local<Object> profile = Nan::New(profile_template_)->NewInstance(); Nan::SetInternalFieldPointer(profile, 0, const_cast<CpuProfile*>(node)); const uint32_t uid_length = (((sizeof uid_counter) * 8) + 2)/3 + 2; char _uid[uid_length]; sprintf(_uid, "%d", uid_counter); Local<Value> CPU = Nan::New<String>("CPU").ToLocalChecked(); Local<Value> uid = Nan::New<String>(_uid).ToLocalChecked(); #if (NODE_MODULE_VERSION >= 45) Local<String> title = node->GetTitle(); #else Local<String> title = Nan::New(node->GetTitle()); #endif if (!title->Length()) { char _title[8 + uid_length]; sprintf(_title, "Profile %i", uid_counter); title = Nan::New<String>(_title).ToLocalChecked(); } Local<Value> head = ProfileNode::New(node->GetTopDownRoot()); profile->Set(Nan::New<String>("typeId").ToLocalChecked(), CPU); profile->Set(Nan::New<String>("uid").ToLocalChecked(), uid); profile->Set(Nan::New<String>("title").ToLocalChecked(), title); profile->Set(Nan::New<String>("head").ToLocalChecked(), head); <|fim▁hole|> Local<Array> samples = Nan::New<Array>(); Local<Array> timestamps = Nan::New<Array>(); uint32_t count = node->GetSamplesCount(); for (uint32_t index = 0; index < count; ++index) { samples->Set(index, Nan::New<Integer>(node->GetSample(index)->GetNodeId())); timestamps->Set(index, Nan::New<Number>(static_cast<double>(node->GetSampleTimestamp(index)))); } profile->Set(Nan::New<String>("startTime").ToLocalChecked(), start_time); profile->Set(Nan::New<String>("endTime").ToLocalChecked(), end_time); profile->Set(Nan::New<String>("samples").ToLocalChecked(), samples); profile->Set(Nan::New<String>("timestamps").ToLocalChecked(), timestamps); #endif Local<Object> profiles = Nan::New<Object>(Profile::profiles); profiles->Set(uid, profile); return scope.Escape(profile); } }<|fim▁end|>
#if (NODE_MODULE_VERSION > 0x000B) Local<Value> start_time = Nan::New<Number>(node->GetStartTime()/1000000); Local<Value> end_time = Nan::New<Number>(node->GetEndTime()/1000000);
<|file_name|>abstract_colormap.py<|end_file_name|><|fim▁begin|>""" Defines the base class for color maps """ from traits.api import Enum, HasTraits, Instance from data_range_1d import DataRange1D class AbstractColormap(HasTraits): """ Abstract class for color maps, which map from scalar values to color values. """ # The data-space bounds of the mapper. range = Instance(DataRange1D) # The color depth of the colors to use. color_depth = Enum('rgba', 'rgb') def map_screen(self, val): """ map_screen(val) -> color Maps an array of values to an array of colors. If the input array is NxM, the returned array is NxMx3 or NxMx4, depending on the **color_depth** setting. """ raise NotImplementedError() def map_data(self, ary): """ map_data(ary) -> color_array Returns an array of values containing the colors mapping to the values in *ary*. If the input array is NxM, the returned array is NxMx3 or NxMx4, depending on the **color_depth** setting. """ # XXX this seems bogus: by analogy with AbstractMapper, this should map # colors to data values, and that will be generally hard to do well. # no subclass implements this - CJW raise NotImplementedError() def map_index(self, ary): """ map_index(ary) -> index into color_bands This method is like map_screen(), but it returns an array of indices into the color map's color bands instead of an array of colors. If the input array is NxM, then the output is NxM integer indices. This method might not apply to all color maps. Ones that cannot define a static set of color bands (e.g., function-defined color maps) are not able to implement this function. """ raise NotImplementedError() def map_uint8(self, val): """ map_uint8(val) -> rgb24 or rgba32 color<|fim▁hole|> Maps a single value to a single color. Color is represented as either length-3 or length-4 array of rgb(a) uint8 values, depending on the **color_depth** setting. """ # default implementation (not efficient) return (self.map_screen(val)*255.0).astype('uint8') # EOF<|fim▁end|>
<|file_name|>image_tags.py<|end_file_name|><|fim▁begin|># Copyright 2012 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import glance_store from oslo_log import log as logging from oslo_utils import encodeutils import webob.exc from glance.api import policy from glance.api.v2 import images as v2_api<|fim▁hole|>import glance.db import glance.gateway from glance.i18n import _ import glance.notifier LOG = logging.getLogger(__name__) class Controller(object): def __init__(self, db_api=None, policy_enforcer=None, notifier=None, store_api=None): self.db_api = db_api or glance.db.get_api() self.policy = policy_enforcer or policy.Enforcer() self.notifier = notifier or glance.notifier.Notifier() self.store_api = store_api or glance_store self.gateway = glance.gateway.Gateway(self.db_api, self.store_api, self.notifier, self.policy) @utils.mutating def update(self, req, image_id, tag_value): image_repo = self.gateway.get_repo(req.context) try: image = image_repo.get(image_id) image.tags.add(tag_value) image_repo.save(image) except exception.NotFound: msg = _("Image %s not found.") % image_id LOG.warning(msg) raise webob.exc.HTTPNotFound(explanation=msg) except exception.Forbidden: msg = _("Not allowed to update tags for image %s.") % image_id LOG.warning(msg) raise webob.exc.HTTPForbidden(explanation=msg) except exception.Invalid as e: msg = (_("Could not update image: %s") % encodeutils.exception_to_unicode(e)) LOG.warning(msg) raise webob.exc.HTTPBadRequest(explanation=msg) except exception.ImageTagLimitExceeded as e: msg = (_("Image tag limit exceeded for image %(id)s: %(e)s:") % {"id": image_id, "e": encodeutils.exception_to_unicode(e)}) LOG.warning(msg) raise webob.exc.HTTPRequestEntityTooLarge(explanation=msg) @utils.mutating def delete(self, req, image_id, tag_value): image_repo = self.gateway.get_repo(req.context) try: image = image_repo.get(image_id) if tag_value not in image.tags: raise webob.exc.HTTPNotFound() image.tags.remove(tag_value) image_repo.save(image) except exception.NotFound: msg = _("Image %s not found.") % image_id LOG.warning(msg) raise webob.exc.HTTPNotFound(explanation=msg) except exception.Forbidden: msg = _("Not allowed to delete tags for image %s.") % image_id LOG.warning(msg) raise webob.exc.HTTPForbidden(explanation=msg) class ResponseSerializer(wsgi.JSONResponseSerializer): def update(self, response, result): response.status_int = 204 def delete(self, response, result): response.status_int = 204 class RequestDeserializer(wsgi.JSONRequestDeserializer): def update(self, request): try: schema = v2_api.get_schema() schema_format = {"tags": [request.urlvars.get('tag_value')]} schema.validate(schema_format) except exception.InvalidObject as e: raise webob.exc.HTTPBadRequest(explanation=e.msg) return super(RequestDeserializer, self).default(request) def create_resource(): """Images resource factory method""" serializer = ResponseSerializer() deserializer = RequestDeserializer() controller = Controller() return wsgi.Resource(controller, deserializer, serializer)<|fim▁end|>
from glance.common import exception from glance.common import utils from glance.common import wsgi
<|file_name|>MM.py<|end_file_name|><|fim▁begin|>''' Modulo Movimiento Nanometros @author: P1R0 import ObjSerial, sys; ObjSer = ObjSerial.ObjSerial(0,9600) ObjSer.cts = True ObjSer.dtr = True ObjSer.bytesize = 8 ''' SxN = 59.71 #Constante de Calibracion del Motor #Funcion para inicializar Monocromador def init(ObjSer,A): ObjSer.flushOutput() ObjSer.write(unicode("A\r\n")) echo(ObjSer) ObjSer.write(unicode("0A\r\n")) echo(ObjSer) ObjSer.write(unicode("A\r\n")) echo(ObjSer) ObjSer.write(unicode("0A\r\n")) echo(ObjSer) ObjSer.write(unicode("0R\r\n")) echo(ObjSer) ObjSer.write(unicode("0U1\r\n")) echo(ObjSer) ObjSer.write(unicode("0V1\r\n")) echo(ObjSer) ObjSer.write(unicode("0T400\r\n")) echo(ObjSer) ObjSer.write(unicode("0K1\r\n")) echo(ObjSer) ObjSer.write(unicode("0Y1\r\n")) echo(ObjSer) ObjSer.write(unicode("0Y0\r\n")) echo(ObjSer) ObjSer.write(unicode("0K0\r\n")) echo(ObjSer) ObjSer.write(unicode("0V1\r\n")) echo(ObjSer) ObjSer.write(unicode("0T1000\r\n"))<|fim▁hole|> ObjSer.write(unicode("0V1\r\n")) echo(ObjSer) ObjSer.write(unicode("0T400\r\n")) echo(ObjSer) ObjSer.write(unicode("0K1\r\n")) echo(ObjSer) ObjSer.write(unicode("0V1\r\n")) echo(ObjSer) ObjSer.write(unicode("0T4000\r\n")) echo(ObjSer) ObjSer.write(unicode("0K0\r\n")) echo(ObjSer) ObjSer.write(unicode("0M99999\r\n")) echo(ObjSer) ObjSer.write(unicode("0K1\r\n")) echo(ObjSer) ObjSer.write(unicode("0V1\r\n")) echo(ObjSer) ObjSer.write(unicode("0T400\r\n")) echo(ObjSer) #en la posicion cero ObjSer.write(unicode("0M-3925\r\n")) echo(ObjSer) #En de estar fuera de rango mandamos como parametro 1 if A == 1: ObjSer.write(unicode("0M3925\r\n")) echo(ObjSer) return 0 #funcion para aproximar errores metodo de interpolacion def Error(x): Y = [0, 0.010373807, -0.05124284, -0.227092782, -0.572418858, -1.150211522, -2.019461229, -3.247663205, -4.904050745, -7.062119076, -9.803353877, -13.21724083, -17.39877039, -22.45717585, -28.51818573, -35.71928571, -44.22644716, -54.22539859, -65.94810183, -79.66102345, 95.70661095, -114.4980595, -136.5895354, -162.693691, -193.8151306, -231.3914014, -277.6754313, -336.5191712, -415.6610186, -536.5034235, -763.8268297, -804.7677106]; X = [0, 50.002, 99.999, 149.999, 199.997, 249.997, 300.007, 349.993, 400.003, 449.997, 499.994, 550.005, 600.002, 649.993, 700.003, 749.995, 800.004, 849.995, 900.004, 949.999, 1000.006, 1049.997, 1100.004, 1150.001, 1200.005, 1250.002, 1300, 1349.999, 1399.998, 449.998, 1490, 1492]; i = 0; while x > X[i]: x0=X[i]; y0=Y[i]; x1=X[i+1]; y1=Y[i+1]; i=i+1; r=y1-y0; d=r/(x1-x0); y=y0+(d*(x-x0)); return y #funcion para calcular y mover el motor def Calcula(ObjSer,Nm,LastPos): Er=Error(Nm); NmyEr = Nm - Er; uS = NmyEr * SxN; dif = uS - int(uS); if dif > 0.5: uS = int(uS) + 1; else: uS = int(uS); Mover = uS - LastPos; print "La diferencia a mover es: %d" % Mover; Mueve(ObjSer,Mover); LastPos = uS; return LastPos #Funcion para llamar al eco del ObjSerial def echo(ObjSer): line = ObjSer.readline() print line #Funcion para mover el motor def Mueve(ObjSer, Mover): #mover Full Step cuando recibe como parametros microSteps MoverFS = ((Mover-3) / 5); ObjSer.flushOutput(); ObjSer.write(unicode("0U0\r\n")); echo(ObjSer); ObjSer.write(unicode("0V1\r\n")); echo(ObjSer); ObjSer.write(unicode("0T1000\r\n")); echo(ObjSer); ObjSer.write(unicode("0M%d\r\n" % MoverFS)); echo(ObjSer); ObjSer.write(unicode("0U1\r\n")); echo(ObjSer); ObjSer.write(unicode("0V1\r\n")); echo(ObjSer); ObjSer.write(unicode("0T400\r\n")); echo(ObjSer); #ultimos 3 microsteps para una aproximacion mas suave. ObjSer.write(unicode("0M3\r\n")); echo(ObjSer); ''' if __name__ == "__main__": N = 0; LastPos = 0; init(0); while 1: while type(N)!= float: try: N = raw_input("Ingresa Nanometros o quit para cerrar:"); if N == "quit": ObjSer.close(); sys.exit(0); N = float(N); except (ValueError, TypeError): print "error, el valor debe ObjSer entero o flotante"; LastPos = Calcula(N,LastPos); print "los microspasos totales son: %d" % LastPos; N=0 '''<|fim▁end|>
echo(ObjSer) ObjSer.write(unicode("0F-\r\n")) echo(ObjSer)
<|file_name|>page.py<|end_file_name|><|fim▁begin|># Copyright (C) 2004 Jeremy S. Sanders # Email: Jeremy Sanders <[email protected]> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. ############################################################################## """Widget that represents a page in the document.""" import collections import textwrap import numpy as N from .. import qtall as qt from .. import document from .. import setting from .. import utils from . import widget from . import controlgraph def _(text, disambiguation=None, context='Page'): """Translate text.""" return qt.QCoreApplication.translate(context, text, disambiguation) defaultrange = [1e99, -1e99] def _resolveLinkedAxis(axis): """Follow a chain of axis function dependencies.""" loopcheck = set() while axis is not None and axis.isLinked(): loopcheck.add(axis) axis = axis.getLinkedAxis() if axis in loopcheck: # fail if loop return None return axis class AxisDependHelper: """A class to work out the dependency of widgets on axes and vice versa, in terms of ranges of the axes. Note: Here a widget is really (widget, depname), as each widget can have a different dependency (e.g. sx and sy dependencies for plotters). It then works out the ranges for each of the axes from the plotters. connection types: plotter->axis : axis needs to know data range axis->plotter : plotter needs to know axis range axis<->axis : axes are mutually dependent aim: calculate ranges of axes given plotters problem: cycles in the graph f1<-x: function f1 depends on axis x f2<-y: function f2 depends on axis y y<-f1: axis y depends on function f1 x<-f2: axis x depends on function f2 solution: break dependency cycle: choose somewhere - probably better to choose where widget depends on axis however, axis<->axis cycle can't be broken additional solution: convert all dependencies on axis1 or axis2 to<|fim▁hole|> For linked axes (e.g. AxisFunction): * Don't keep track of range separately -> propagate to real axis * For dependency order resolution, use real axis * In self.deps, use axisfunction axis so we know which axis to use """ def __init__(self): # map widgets to widgets it depends on self.deps = collections.defaultdict(list) # list of axes self.axes = [] # list of plotters associated with each axis self.axis_plotter_map = collections.defaultdict(list) # ranges for each axis self.ranges = {} # pairs of dependent widgets self.pairs = [] # track axes which map from one axis to another self.axis_to_axislinked = {} self.axislinked_to_axis = {} def recursivePlotterSearch(self, widget): """Find a list of plotters below widget. Builds up a dict of "nodes" representing each widget: plotter/axis Each node is a list of tuples saying which widgets need evaling first The tuples are (widget, depname), where depname is a name for the part of the plotter, e.g. "sx" or "sy" for x or y. """ if widget.isplotter: # keep track of which widgets depend on which axes widgetaxes = {} for axname in widget.getAxesNames(): axis = widget.lookupAxis(axname) widgetaxes[axname] = axis self.axis_plotter_map[axis].append(widget) # if the widget is a plotter, find which axes the widget # can provide range information about for axname, depname in widget.affectsAxisRange(): origaxis = widgetaxes[axname] resolvedaxis = _resolveLinkedAxis(origaxis) if resolvedaxis is not None and resolvedaxis.usesAutoRange(): # only add dependency if axis has an automatic range self.deps[(origaxis, None)].append((widget, depname)) self.pairs.append( ( (widget, depname), (resolvedaxis, None)) ) # find which axes the plotter needs information from for depname, axname in widget.requiresAxisRange(): origaxis = widgetaxes[axname] resolvedaxis = _resolveLinkedAxis(origaxis) if resolvedaxis is not None and resolvedaxis.usesAutoRange(): self.deps[(widget, depname)].append((origaxis, None)) self.pairs.append( ( (resolvedaxis, None), (widget, depname)) ) elif widget.isaxis: if widget.isaxis and widget.isLinked(): # function of another axis linked = widget.getLinkedAxis() if linked is not None: self.axis_to_axislinked[linked] = widget self.axislinked_to_axis[widget] = linked else: # make a range for a normal axis self.axes.append(widget) self.ranges[widget] = list(defaultrange) for c in widget.children: self.recursivePlotterSearch(c) def breakCycles(self, origcyclic): """Remove cycles if possible.""" numcyclic = len(origcyclic) best = -1 for i in range(len(self.pairs)): if not self.pairs[i][0][0].isaxis: p = self.pairs[:i] + self.pairs[i+1:] ordered, cyclic = utils.topological_sort(p) if len(cyclic) <= numcyclic: numcyclic = len(cyclic) best = i # delete best, or last one if none better found p = self.pairs[best] del self.pairs[best] try: idx = self.deps[p[1]].index(p[0]) del self.deps[p[1]][idx] except ValueError: pass def _updateAxisAutoRange(self, axis): """Update auto range for axis.""" # set actual range on axis, as axis no longer has a # dependency axrange = self.ranges[axis] if axrange == defaultrange: axrange = None axis.setAutoRange(axrange) del self.ranges[axis] def _updateRangeFromPlotter(self, axis, plotter, plotterdep): """Update the range for axis from the plotter.""" if axis.isLinked(): # take range and map back to real axis therange = list(defaultrange) plotter.getRange(axis, plotterdep, therange) if therange != defaultrange: # follow up chain loopcheck = set() while axis.isLinked(): loopcheck.add(axis) therange = axis.invertFunctionVals(therange) axis = axis.getLinkedAxis() if axis in loopcheck: axis = None if axis is not None and therange is not None: self.ranges[axis] = [ N.nanmin((self.ranges[axis][0], therange[0])), N.nanmax((self.ranges[axis][1], therange[1])) ] else: plotter.getRange(axis, plotterdep, self.ranges[axis]) def processWidgetDeps(self, dep): """Process dependencies for a single widget.""" widget, widget_dep = dep # iterate over dependent widgets for widgetd, widgetd_dep in self.deps[dep]: if ( widgetd.isplotter and (not widgetd.settings.isSetting('hide') or not widgetd.settings.hide) ): self._updateRangeFromPlotter(widget, widgetd, widgetd_dep) elif widgetd.isaxis: axis = _resolveLinkedAxis(widgetd) if axis in self.ranges: self._updateAxisAutoRange(axis) def processDepends(self): """Go through dependencies of widget. If the dependency has no dependency itself, then update the axis with the widget or vice versa Algorithm: Iterate over dependencies for widget. If the widget has a dependency on a widget which doesn't have a dependency itself, update range from that widget. Then delete that depency from the dependency list. """ # get ordered list, breaking cycles while True: ordered, cyclic = utils.topological_sort(self.pairs) if not cyclic: break self.breakCycles(cyclic) # iterate over widgets in order for dep in ordered: self.processWidgetDeps(dep) # process deps for any axis functions while dep[0] in self.axis_to_axislinked: dep = (self.axis_to_axislinked[dep[0]], None) self.processWidgetDeps(dep) def findAxisRanges(self): """Find the ranges from the plotters and set the axis ranges. Follows the dependencies calculated above. """ self.processDepends() # set any remaining ranges for axis in list(self.ranges.keys()): self._updateAxisAutoRange(axis) class Page(widget.Widget): """A class for representing a page of plotting.""" typename='page' allowusercreation = True description=_('Blank page') @classmethod def addSettings(klass, s): widget.Widget.addSettings(s) # page sizes are initially linked to the document page size s.add( setting.DistancePhysical( 'width', setting.Reference('/width'), descr=_('Width of page'), usertext=_('Page width'), formatting=True) ) s.add( setting.DistancePhysical( 'height', setting.Reference('/height'), descr=_('Height of page'), usertext=_('Page height'), formatting=True) ) s.add( setting.Notes( 'notes', '', descr=_('User-defined notes'), usertext=_('Notes')) ) s.add( setting.PageBrush( 'Background', descr = _('Background page fill'), usertext=_('Background')), pixmap='settings_bgfill', ) @classmethod def allowedParentTypes(klass): from . import root return (root.Root,) @property def userdescription(self): """Return user-friendly description.""" return textwrap.fill(self.settings.notes, 60) def draw(self, parentposn, painthelper, outerbounds=None): """Draw the plotter. Clip graph inside bounds.""" s = self.settings # document should pass us the page bounds x1, y1, x2, y2 = parentposn # find ranges of axes axisdependhelper = AxisDependHelper() axisdependhelper.recursivePlotterSearch(self) axisdependhelper.findAxisRanges() # store axis->plotter mappings in painthelper painthelper.axisplottermap.update(axisdependhelper.axis_plotter_map) # reverse mapping pamap = collections.defaultdict(list) for axis, plotters in painthelper.axisplottermap.items(): for plot in plotters: pamap[plot].append(axis) painthelper.plotteraxismap.update(pamap) if s.hide: bounds = self.computeBounds(parentposn, painthelper) return bounds # clip to page painter = painthelper.painter(self, parentposn) with painter: # w and h are non integer w = self.settings.get('width').convert(painter) h = self.settings.get('height').convert(painter) if not s.Background.hide: path = qt.QPainterPath() path.addRect(qt.QRectF(0, 0, w, h)) utils.brushExtFillPath(painter, s.Background, path) painthelper.setControlGraph(self, [ controlgraph.ControlMarginBox( self, [0, 0, w, h], [-10000, -10000, 10000, 10000], painthelper, ismovable=False) ] ) bounds = widget.Widget.draw( self, parentposn, painthelper, parentposn) return bounds def updateControlItem(self, cgi): """Call helper to set page size.""" cgi.setPageSize() # allow the factory to instantiate this document.thefactory.register(Page)<|fim▁end|>
axiscomb x <-> axis1 <-> axis2
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>use std::env; use std::fs; use std::io::prelude::*; use std::fs::File; use std::path::Path; extern crate toml; #[macro_use] extern crate serde_derive; // error in rust compiler. Bugfix requested in Sept. 2017 // these are used, but the compiler is not seeing it for // some reason #[allow(unused_imports)] #[macro_use] extern crate serde_json; // error in rust compiler. Bugfix has been submitted in Sept. 2017 #[allow(unused_imports)] #[macro_use] extern crate serde; // used in formatting the Cargo.toml file #[derive(Deserialize, Debug)] struct Tomlfile { contents: Contents, } // used in formatting the Cargo.toml file #[derive(Deserialize, Debug)] struct Metadata { deb: Deb, } // used in formatting the Cargo.toml file #[derive(Deserialize, Debug)] struct Deb { revision: Option<String>, } // used in formatting the Cargo.toml file #[derive(Deserialize, Debug)] struct Package { name: String, version: Option<String>, metadata: Metadata, } // used in formatting the Cargo.toml file #[derive(Deserialize, Debug)] struct Contents { package: Package, dependencies: Option<toml::Value>, } fn main() { let target = env::var("TARGET").unwrap(); println!("target={}", target); if let Ok(_mode) = env::var("LIBINDY_STATIC") { let libindy_lib_path = env::var("LIBINDY_DIR").unwrap(); println!("cargo:rustc-link-search=native={}",libindy_lib_path); println!("cargo:rustc-link-lib=static=indy"); }else if target.contains("aarch64-linux-android") || target.contains("armv7-linux-androideabi") || target.contains("arm-linux-androideabi") || target.contains("i686-linux-android") || target.contains("x86_64-linux-android") || target.contains("aarch64-apple-ios") || target.contains("armv7-apple-ios") || target.contains("armv7s-apple-ios") || target.contains("i386-apple-ios") || target.contains("x86_64-apple-ios") { let libindy_lib_path = match env::var("LIBINDY_DIR"){ Ok(val) => val, Err(..) => panic!("Missing required environment variable LIBINDY_DIR") }; let openssl = match env::var("OPENSSL_LIB_DIR") { Ok(val) => val, Err(..) => match env::var("OPENSSL_DIR") { Ok(dir) => Path::new(&dir[..]).join("/lib").to_string_lossy().into_owned(), Err(..) => panic!("Missing required environment variables OPENSSL_DIR or OPENSSL_LIB_DIR") } }; println!("cargo:rustc-link-search=native={}",libindy_lib_path); println!("cargo:rustc-link-lib=static=indy"); println!("cargo:rustc-link-search=native={}", openssl); println!("cargo:rustc-link-lib=static=crypto"); println!("cargo:rustc-link-lib=static=ssl"); }else if target.contains("darwin"){ //OSX specific logic println!("cargo:rustc-link-lib=indy"); //OSX does not allow 3rd party libs to be installed in /usr/lib. Instead install it in /usr/local/lib println!("cargo:rustc-link-search=native=/usr/local/lib"); }else if target.contains("-linux-"){ //Linux specific logic<|fim▁hole|> println!("cargo:rustc-link-search=native=/usr/lib"); }else if target.contains("-windows-") { println!("cargo:rustc-link-lib=indy.dll"); let profile = env::var("PROFILE").unwrap(); println!("profile={}", profile); let output_dir = env::var("OUT_DIR").unwrap(); println!("output_dir={}", output_dir); let output_dir = Path::new(output_dir.as_str()); let indy_dir = env::var("INDY_DIR").unwrap_or(format!("..\\..\\libindy\\target\\{}", profile)); println!("indy_dir={}", indy_dir); let indy_dir = Path::new(indy_dir.as_str()); let dst = output_dir.join("..\\..\\..\\.."); println!("cargo:rustc-flags=-L {}", indy_dir.as_os_str().to_str().unwrap()); let files = vec!["indy.dll", "libeay32md.dll", "libsodium.dll", "libzmq.dll", "ssleay32md.dll"]; for f in files.iter() { if let Ok(_) = fs::copy(&indy_dir.join(f), &dst.join(f)) { println!("copy {} -> {}", &indy_dir.join(f).display(), &dst.join(f).display()); } } } match env::var("CARGO_FEATURE_CI") { Ok(_) => { println!("injecting version information"); // Leaving as unwrap, this is in the build script. let revision = get_revision().unwrap(); write_variables(&revision); }, Err(_) => {println!("NOT injecting version information"); }, }; } // Writes to the file 'src/utils/version_constants.rs' for use // in outputing the version dynamically. fn write_variables(revision:&str) { let out_dir = "src/utils/"; let dest_path = Path::new(&out_dir).join("version_constants.rs"); let mut f = File::create(&dest_path).unwrap(); let s = format!("pub const VERSION: &'static str = env!(\"CARGO_PKG_VERSION\");\npub const REVISION: &'static str = \"{}\";\n", revision); if let Err(e) = f.write_all(s.as_bytes()) { panic!("Error creating version_constants.rs: {}", e); }; } // Gets the revision number from the Cargo.toml file. pub fn get_revision() -> Option<String> { let dir = match env::var("CARGO_MANIFEST_DIR"){ Ok(d) => d, Err(_) => panic!("Couldn't Manifest Directory"), }; let filename = "Cargo.toml"; let p = format!("{}/{}",dir,filename); let mut input = String::new(); File::open(p).and_then(|mut f| { f.read_to_string(&mut input)}).unwrap(); let tomlfile:Contents = toml::from_str(&input).unwrap(); let revision:String = match tomlfile.package.metadata.deb.revision { Some(v) => v, None => String::from(""), }; Some(format!("+{}", revision)) }<|fim▁end|>
println!("cargo:rustc-link-lib=indy");
<|file_name|>display-final-after-month-is-over.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # TODO: issues with new oauth2 stuff. Keep using older version of Python for now. # #!/usr/bin/env python from participantCollection import ParticipantCollection import re import datetime import pyperclip # Edit Me!<|fim▁hole|># what goes here. E.g. If this directory is the directory for February, this script gets # run on March 1, and this URL is the URL for the March challenge page. nextMonthURL = "https://www.reddit.com/r/pornfree/comments/ex6nis/stay_clean_february_this_thread_updated_daily/" # If this directory is the directory for November, this script gets run on December 1, # and currentMonthIndex gets the index of November, i.e. 11. currentMonthIndex = datetime.date.today().month - 1 if currentMonthIndex == 0: currentMonthIndex = 12 currentMonthName = {1: 'January', 2: 'February', 3: 'March', 4: 'April', 5: 'May', 6: 'June', 7: 'July', 8: 'August', 9: 'September', 10: 'October', 11: 'November', 12: 'December'}[currentMonthIndex] nextMonthIndex = currentMonthIndex % 12 + 1 nextMonthName = {1: 'January', 2: 'February', 3: 'March', 4: 'April', 5: 'May', 6: 'June', 7: 'July', 8: 'August', 9: 'September', 10: 'October', 11: 'November', 12: 'December'}[nextMonthIndex] participants = ParticipantCollection() numberStillIn = participants.sizeOfParticipantsWhoAreStillIn() initialNumber = participants.size() percentStillIn = int(round(100 * numberStillIn / initialNumber, 0)) def templateForParticipants(): answer = "" for participant in participants.participantsWhoAreStillInAndHaveCheckedIn(): answer += "/u/" + participant.name answer += "\n\n" return answer def templateToUse(): answer = "" answer += "The Stay Clean CURRENT_MONTH_NAME challenge is now over. Join us for **[the NEXT_MONTH_NAME challenge](NEXT_MONTH_URL)**.\n" answer += "\n" answer += "**NUMBER_STILL_IN** out of INITIAL_NUMBER participants made it all the way through the challenge. That's **PERCENT_STILL_IN%**.\n" answer += "\n" answer += "Congratulations to these participants, all of whom were victorious:\n\n" answer += templateForParticipants() return answer def stringToPrint(): answer = templateToUse() answer = re.sub('NUMBER_STILL_IN', str(numberStillIn), answer) answer = re.sub('INITIAL_NUMBER', str(initialNumber), answer) answer = re.sub('PERCENT_STILL_IN', str(percentStillIn), answer) answer = re.sub('CURRENT_MONTH_INDEX', str(currentMonthIndex), answer) answer = re.sub('CURRENT_MONTH_NAME', currentMonthName, answer) answer = re.sub('NEXT_MONTH_INDEX', str(nextMonthIndex), answer) answer = re.sub('NEXT_MONTH_NAME', nextMonthName, answer) answer = re.sub('NEXT_MONTH_URL', nextMonthURL, answer) return answer outputString = stringToPrint() print "=============================================================" print outputString print "=============================================================" pyperclip.copy(outputString)<|fim▁end|>
# This script gets run on the first day of the following month, and that month's URL is
<|file_name|>diff.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Copyright (C) 2014-present Taiga Agile LLC # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. def make_diff(first: dict, second: dict, not_found_value=None, excluded_keys: tuple = ()) -> dict: """ Compute a diff between two dicts. """ diff = {}<|fim▁hole|> # Check all keys in first dict for key in first: if key not in second: diff[key] = (first[key], not_found_value) elif first[key] != second[key]: diff[key] = (first[key], second[key]) # Check all keys in second dict to find missing for key in second: if key not in first: diff[key] = (not_found_value, second[key]) # Remove A -> A changes that usually happens with None -> None for key, value in list(diff.items()): frst, scnd = value if frst == scnd: del diff[key] # Removed excluded keys for key in excluded_keys: if key in diff: del diff[key] return diff<|fim▁end|>
<|file_name|>peer.rs<|end_file_name|><|fim▁begin|>extern crate bitcoin; extern crate rand; use std::thread; use std::time::Duration; use std::u64; use std::sync::mpsc::{channel, Sender, Receiver}; use self::rand::Rng; use self::rand::distributions::{IndependentSample, Range}; use bitcoin::network::socket::Socket; use bitcoin::network::constants::Network; use bitcoin::network::message::NetworkMessage; use bitcoin::network::message::SocketResponse; use bitcoin::network::message_network::VersionMessage; use bitcoin::network::message_blockdata::Inventory; pub fn connect(host: &'static str, port: u16, tx_daemon: Sender<Vec<Inventory>>) -> Socket{ let mut socket = Socket::new(Network::Testnet); match socket.connect(host, port) { Ok(()) => recv_loop(socket.clone(), tx_daemon), Err(e) => { println!("error {:?}", e); } } fn send_ping(tx: Sender<NetworkMessage>) { let ping_interval = Duration::from_secs(10); thread::sleep(ping_interval); let mut rng = rand::thread_rng(); let between = Range::new(u64::MIN, u64::MAX); let nonce = between.ind_sample(&mut rng); let ping_message = NetworkMessage::Ping(nonce); println!("sending ping"); match tx.send(ping_message) { Ok(()) => { println!("ping sent with nonce {:?}", nonce); }, Err(e) => { println!("error {:?}",e); } } } fn send_pong(socket: &mut Socket, nonce: u64) { let pong_message = NetworkMessage::Pong(nonce); match socket.send_message(pong_message) { Ok(()) => { println!("pong sent"); }, Err(e) => { println!("error {:?}",e); } } } fn sender(mut socket: Socket, rx: Receiver<NetworkMessage>) { loop { let message = rx.recv().unwrap(); match socket.send_message(message) { Ok(()) => { println!("sent"); }, Err(e) => { println!("error {:?}",e); } } } } fn pinger(mut socket: Socket, rx: Receiver<NetworkMessage>) { let ping_interval = Duration::from_secs(120); let mut rng = rand::thread_rng(); let between = Range::new(u64::MIN, u64::MAX); loop { let nonce = between.ind_sample(&mut rng); let ping_message = NetworkMessage::Ping(nonce); match socket.send_message(ping_message) { Ok(()) => { println!("sent ping "); }, Err(e) => { println!("error {:?}",e); } } thread::sleep(ping_interval); match rx.try_recv() { Ok(payload) => { println!("pong received by pinger {:?} ", payload); }, Err(e) => { println!("error {:?}",e); break; } } } println!("Peer didn't respond to ping"); } fn receiver(mut socket: Socket, tx: Sender<NetworkMessage>, tx_daemon: Sender<Vec<Inventory>>) { loop { match socket.receive_message() { Ok(payload) => { // println!("received {:?}",payload); let message : NetworkMessage = payload; match message { NetworkMessage::Version(nonce) => { println!("ping nonce {:?}", nonce); } NetworkMessage::Verack => { println!("verack"); } NetworkMessage::Ping(nonce) => { println!("ping nonce {:?}", nonce); send_pong(&mut socket, nonce); } NetworkMessage::Addr(addr) => { println!("addr {:?}", addr); } NetworkMessage::Inv(inv) => { tx_daemon.send(inv); } NetworkMessage::GetData(inv) => { println!("GetData {:?}", inv); } NetworkMessage::NotFound(inv) => { println!("NotFound {:?}", inv); } NetworkMessage::GetBlocks(inv) => { println!("GetBlocks {:?}", inv); } NetworkMessage::GetHeaders(inv) => { println!("GetHeaders {:?}", inv); } NetworkMessage::Tx(inv) => { println!("Tx {:?}", inv); } NetworkMessage::Block(inv) => { println!("Block {:?}", inv); } NetworkMessage::Headers(inv) => { println!("Headers {:?}", inv); } NetworkMessage::Pong(inv) => { println!("Pong {:?}", inv); tx.send(message); } NetworkMessage::MemPool => { println!("MemPool"); } } } Err(e) => { println!("error {:?}",e); } } let ping_interval = Duration::from_secs(1); thread::sleep(ping_interval); } } fn send_version_message(mut socket: Socket, version_message: VersionMessage, tx_daemon: Sender<Vec<Inventory>>) { let network_message = NetworkMessage::Version(version_message); match socket.send_message(network_message) { Ok(()) => { let (txMessageReceiver, rxMessageReceiver) = channel::<NetworkMessage>(); // let (txMessageSender, rxMessageSender) = channel::<NetworkMessage>(); let sender_socket = socket.clone(); let receiver_socket = socket.clone(); thread::spawn( move || { receiver(receiver_socket, txMessageReceiver, tx_daemon); }); thread::spawn( move || { pinger(sender_socket, rxMessageReceiver); }); // thread::spawn( move || { // loop {tx_daemon: Sender<Vec<Inventory>> // send_pong(&mut sender_socket.clone(), 2353); // } // }); } Err(e) => { println!("error {:?}",e); } } } fn recv_loop(socket : Socket, tx_daemon: Sender<Vec<Inventory>>) { thread::spawn( move || { match VersionMessage::new(14213, socket.clone(), 12421, 2048) { Ok(version_message) => send_version_message(socket, version_message, tx_daemon), Err(e) => { println!("error {:?}", e); } } }); }<|fim▁hole|>}<|fim▁end|>
return socket;
<|file_name|>ty.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![allow(non_camel_case_types)] use back::svh::Svh; use driver::session::Session; use lint; use metadata::csearch; use middle::const_eval; use middle::def; use middle::dependency_format; use middle::lang_items::{FnTraitLangItem, FnMutTraitLangItem}; use middle::lang_items::{FnOnceTraitLangItem, TyDescStructLangItem}; use middle::mem_categorization as mc; use middle::resolve; use middle::resolve_lifetime; use middle::stability; use middle::subst::{mod, Subst, Substs, VecPerParamSpace}; use middle::traits; use middle::ty; use middle::typeck; use middle::ty_fold::{mod, TypeFoldable,TypeFolder}; use middle; use util::ppaux::{note_and_explain_region, bound_region_ptr_to_string}; use util::ppaux::{trait_store_to_string, ty_to_string}; use util::ppaux::{Repr, UserString}; use util::common::{indenter, memoized}; use util::nodemap::{NodeMap, NodeSet, DefIdMap, DefIdSet}; use util::nodemap::{FnvHashMap, FnvHashSet}; use std::cell::{Cell, RefCell}; use std::cmp; use std::fmt::{mod, Show}; use std::hash::{Hash, sip, Writer}; use std::mem; use std::ops; use std::rc::Rc; use std::collections::hash_map::{Occupied, Vacant}; use arena::TypedArena; use syntax::abi; use syntax::ast::{CrateNum, DefId, FnStyle, Ident, ItemTrait, LOCAL_CRATE}; use syntax::ast::{MutImmutable, MutMutable, Name, NamedField, NodeId}; use syntax::ast::{Onceness, StmtExpr, StmtSemi, StructField, UnnamedField}; use syntax::ast::{Visibility}; use syntax::ast_util::{mod, is_local, lit_is_str, local_def, PostExpansionMethod}; use syntax::attr::{mod, AttrMetaMethods}; use syntax::codemap::Span; use syntax::parse::token::{mod, InternedString}; use syntax::{ast, ast_map}; use std::collections::enum_set::{EnumSet, CLike}; pub type Disr = u64; pub const INITIAL_DISCRIMINANT_VALUE: Disr = 0; // Data types #[deriving(PartialEq, Eq, Hash)] pub struct field { pub name: ast::Name, pub mt: mt } #[deriving(Clone, Show)] pub enum ImplOrTraitItemContainer { TraitContainer(ast::DefId), ImplContainer(ast::DefId), } impl ImplOrTraitItemContainer { pub fn id(&self) -> ast::DefId { match *self { TraitContainer(id) => id, ImplContainer(id) => id, } } } #[deriving(Clone)] pub enum ImplOrTraitItem { MethodTraitItem(Rc<Method>), TypeTraitItem(Rc<AssociatedType>), } impl ImplOrTraitItem { fn id(&self) -> ImplOrTraitItemId { match *self { MethodTraitItem(ref method) => MethodTraitItemId(method.def_id), TypeTraitItem(ref associated_type) => { TypeTraitItemId(associated_type.def_id) } } } pub fn def_id(&self) -> ast::DefId { match *self { MethodTraitItem(ref method) => method.def_id, TypeTraitItem(ref associated_type) => associated_type.def_id, } } pub fn name(&self) -> ast::Name { match *self { MethodTraitItem(ref method) => method.name, TypeTraitItem(ref associated_type) => associated_type.name, } } pub fn container(&self) -> ImplOrTraitItemContainer { match *self { MethodTraitItem(ref method) => method.container, TypeTraitItem(ref associated_type) => associated_type.container, } } pub fn as_opt_method(&self) -> Option<Rc<Method>> { match *self { MethodTraitItem(ref m) => Some((*m).clone()), TypeTraitItem(_) => None } } } #[deriving(Clone)] pub enum ImplOrTraitItemId { MethodTraitItemId(ast::DefId), TypeTraitItemId(ast::DefId), } impl ImplOrTraitItemId { pub fn def_id(&self) -> ast::DefId { match *self { MethodTraitItemId(def_id) => def_id, TypeTraitItemId(def_id) => def_id, } } } #[deriving(Clone, Show)] pub struct Method { pub name: ast::Name, pub generics: ty::Generics, pub fty: BareFnTy, pub explicit_self: ExplicitSelfCategory, pub vis: ast::Visibility, pub def_id: ast::DefId, pub container: ImplOrTraitItemContainer, // If this method is provided, we need to know where it came from pub provided_source: Option<ast::DefId> } impl Method { pub fn new(name: ast::Name, generics: ty::Generics, fty: BareFnTy, explicit_self: ExplicitSelfCategory, vis: ast::Visibility, def_id: ast::DefId, container: ImplOrTraitItemContainer, provided_source: Option<ast::DefId>) -> Method { Method { name: name, generics: generics, fty: fty, explicit_self: explicit_self, vis: vis, def_id: def_id, container: container, provided_source: provided_source } } pub fn container_id(&self) -> ast::DefId { match self.container { TraitContainer(id) => id, ImplContainer(id) => id, } } } #[deriving(Clone)] pub struct AssociatedType { pub name: ast::Name, pub vis: ast::Visibility, pub def_id: ast::DefId, pub container: ImplOrTraitItemContainer, } #[deriving(Clone, PartialEq, Eq, Hash, Show)] pub struct mt { pub ty: t, pub mutbl: ast::Mutability, } #[deriving(Clone, PartialEq, Eq, Hash, Encodable, Decodable, Show)] pub enum TraitStore { /// Box<Trait> UniqTraitStore, /// &Trait and &mut Trait RegionTraitStore(Region, ast::Mutability), } #[deriving(Clone, Show)] pub struct field_ty { pub name: Name, pub id: DefId, pub vis: ast::Visibility, pub origin: ast::DefId, // The DefId of the struct in which the field is declared. } // Contains information needed to resolve types and (in the future) look up // the types of AST nodes. #[deriving(PartialEq, Eq, Hash)] pub struct creader_cache_key { pub cnum: CrateNum, pub pos: uint, pub len: uint } pub struct intern_key { sty: *const sty, } // NB: Do not replace this with #[deriving(PartialEq)]. The automatically-derived // implementation will not recurse through sty and you will get stack // exhaustion. impl cmp::PartialEq for intern_key { fn eq(&self, other: &intern_key) -> bool { unsafe { *self.sty == *other.sty } } fn ne(&self, other: &intern_key) -> bool { !self.eq(other) } } impl Eq for intern_key {} impl<W:Writer> Hash<W> for intern_key { fn hash(&self, s: &mut W) { unsafe { (*self.sty).hash(s) } } } pub enum ast_ty_to_ty_cache_entry { atttce_unresolved, /* not resolved yet */ atttce_resolved(t) /* resolved to a type, irrespective of region */ } #[deriving(Clone, PartialEq, Decodable, Encodable)] pub struct ItemVariances { pub types: VecPerParamSpace<Variance>, pub regions: VecPerParamSpace<Variance>, } #[deriving(Clone, PartialEq, Decodable, Encodable, Show)] pub enum Variance { Covariant, // T<A> <: T<B> iff A <: B -- e.g., function return type Invariant, // T<A> <: T<B> iff B == A -- e.g., type of mutable cell Contravariant, // T<A> <: T<B> iff B <: A -- e.g., function param type Bivariant, // T<A> <: T<B> -- e.g., unused type parameter } #[deriving(Clone, Show)] pub enum AutoAdjustment { AdjustAddEnv(ty::TraitStore), AdjustDerefRef(AutoDerefRef) } #[deriving(Clone, PartialEq, Show)] pub enum UnsizeKind { // [T, ..n] -> [T], the uint field is n. UnsizeLength(uint), // An unsize coercion applied to the tail field of a struct. // The uint is the index of the type parameter which is unsized. UnsizeStruct(Box<UnsizeKind>, uint), UnsizeVtable(TyTrait, /* the self type of the trait */ ty::t) } #[deriving(Clone, Show)] pub struct AutoDerefRef { pub autoderefs: uint, pub autoref: Option<AutoRef> } #[deriving(Clone, PartialEq, Show)] pub enum AutoRef { /// Convert from T to &T /// The third field allows us to wrap other AutoRef adjustments. AutoPtr(Region, ast::Mutability, Option<Box<AutoRef>>), /// Convert [T, ..n] to [T] (or similar, depending on the kind) AutoUnsize(UnsizeKind), /// Convert Box<[T, ..n]> to Box<[T]> or something similar in a Box. /// With DST and Box a library type, this should be replaced by UnsizeStruct. AutoUnsizeUniq(UnsizeKind), /// Convert from T to *T /// Value to thin pointer /// The second field allows us to wrap other AutoRef adjustments. AutoUnsafe(ast::Mutability, Option<Box<AutoRef>>), } // Ugly little helper function. The first bool in the returned tuple is true if // there is an 'unsize to trait object' adjustment at the bottom of the // adjustment. If that is surrounded by an AutoPtr, then we also return the // region of the AutoPtr (in the third argument). The second bool is true if the // adjustment is unique. fn autoref_object_region(autoref: &AutoRef) -> (bool, bool, Option<Region>) { fn unsize_kind_is_object(k: &UnsizeKind) -> bool { match k { &UnsizeVtable(..) => true, &UnsizeStruct(box ref k, _) => unsize_kind_is_object(k), _ => false } } match autoref { &AutoUnsize(ref k) => (unsize_kind_is_object(k), false, None), &AutoUnsizeUniq(ref k) => (unsize_kind_is_object(k), true, None), &AutoPtr(adj_r, _, Some(box ref autoref)) => { let (b, u, r) = autoref_object_region(autoref); if r.is_some() || u { (b, u, r) } else { (b, u, Some(adj_r)) } } &AutoUnsafe(_, Some(box ref autoref)) => autoref_object_region(autoref), _ => (false, false, None) } } // If the adjustment introduces a borrowed reference to a trait object, then // returns the region of the borrowed reference. pub fn adjusted_object_region(adj: &AutoAdjustment) -> Option<Region> { match adj { &AdjustDerefRef(AutoDerefRef{autoref: Some(ref autoref), ..}) => { let (b, _, r) = autoref_object_region(autoref); if b { r } else { None } } _ => None } } // Returns true if there is a trait cast at the bottom of the adjustment. pub fn adjust_is_object(adj: &AutoAdjustment) -> bool { match adj { &AdjustDerefRef(AutoDerefRef{autoref: Some(ref autoref), ..}) => { let (b, _, _) = autoref_object_region(autoref); b } _ => false } } // If possible, returns the type expected from the given adjustment. This is not // possible if the adjustment depends on the type of the adjusted expression. pub fn type_of_adjust(cx: &ctxt, adj: &AutoAdjustment) -> Option<t> { fn type_of_autoref(cx: &ctxt, autoref: &AutoRef) -> Option<t> { match autoref { &AutoUnsize(ref k) => match k { &UnsizeVtable(TyTrait { ref principal, bounds }, _) => { Some(mk_trait(cx, (*principal).clone(), bounds)) } _ => None }, &AutoUnsizeUniq(ref k) => match k { &UnsizeVtable(TyTrait { ref principal, bounds }, _) => { Some(mk_uniq(cx, mk_trait(cx, (*principal).clone(), bounds))) } _ => None }, &AutoPtr(r, m, Some(box ref autoref)) => { match type_of_autoref(cx, autoref) { Some(t) => Some(mk_rptr(cx, r, mt {mutbl: m, ty: t})), None => None } } &AutoUnsafe(m, Some(box ref autoref)) => { match type_of_autoref(cx, autoref) { Some(t) => Some(mk_ptr(cx, mt {mutbl: m, ty: t})), None => None } } _ => None } } match adj { &AdjustDerefRef(AutoDerefRef{autoref: Some(ref autoref), ..}) => { type_of_autoref(cx, autoref) } _ => None } } /// A restriction that certain types must be the same size. The use of /// `transmute` gives rise to these restrictions. pub struct TransmuteRestriction { /// The span from whence the restriction comes. pub span: Span, /// The type being transmuted from. pub from: t, /// The type being transmuted to. pub to: t, /// NodeIf of the transmute intrinsic. pub id: ast::NodeId, } /// The data structure to keep track of all the information that typechecker /// generates so that so that it can be reused and doesn't have to be redone /// later on. pub struct ctxt<'tcx> { /// The arena that types are allocated from. type_arena: &'tcx TypedArena<t_box_>, /// Specifically use a speedy hash algorithm for this hash map, it's used /// quite often. interner: RefCell<FnvHashMap<intern_key, &'tcx t_box_>>, pub sess: Session, pub def_map: resolve::DefMap, pub named_region_map: resolve_lifetime::NamedRegionMap, pub region_maps: middle::region::RegionMaps, /// Stores the types for various nodes in the AST. Note that this table /// is not guaranteed to be populated until after typeck. See /// typeck::check::fn_ctxt for details. pub node_types: RefCell<NodeMap<t>>, /// Stores the type parameters which were substituted to obtain the type /// of this node. This only applies to nodes that refer to entities /// parameterized by type parameters, such as generic fns, types, or /// other items. pub item_substs: RefCell<NodeMap<ItemSubsts>>, /// Maps from a trait item to the trait item "descriptor" pub impl_or_trait_items: RefCell<DefIdMap<ImplOrTraitItem>>, /// Maps from a trait def-id to a list of the def-ids of its trait items pub trait_item_def_ids: RefCell<DefIdMap<Rc<Vec<ImplOrTraitItemId>>>>, /// A cache for the trait_items() routine pub trait_items_cache: RefCell<DefIdMap<Rc<Vec<ImplOrTraitItem>>>>, pub impl_trait_cache: RefCell<DefIdMap<Option<Rc<ty::TraitRef>>>>, pub trait_refs: RefCell<NodeMap<Rc<TraitRef>>>, pub trait_defs: RefCell<DefIdMap<Rc<TraitDef>>>, /// Maps from node-id of a trait object cast (like `foo as /// Box<Trait>`) to the trait reference. pub object_cast_map: typeck::ObjectCastMap, pub map: ast_map::Map<'tcx>, pub intrinsic_defs: RefCell<DefIdMap<t>>, pub freevars: RefCell<FreevarMap>, pub tcache: RefCell<DefIdMap<Polytype>>, pub rcache: RefCell<FnvHashMap<creader_cache_key, t>>, pub short_names_cache: RefCell<FnvHashMap<t, String>>, pub needs_unwind_cleanup_cache: RefCell<FnvHashMap<t, bool>>, pub tc_cache: RefCell<FnvHashMap<t, TypeContents>>, pub ast_ty_to_ty_cache: RefCell<NodeMap<ast_ty_to_ty_cache_entry>>, pub enum_var_cache: RefCell<DefIdMap<Rc<Vec<Rc<VariantInfo>>>>>, pub ty_param_defs: RefCell<NodeMap<TypeParameterDef>>, pub adjustments: RefCell<NodeMap<AutoAdjustment>>, pub normalized_cache: RefCell<FnvHashMap<t, t>>, pub lang_items: middle::lang_items::LanguageItems, /// A mapping of fake provided method def_ids to the default implementation pub provided_method_sources: RefCell<DefIdMap<ast::DefId>>, pub struct_fields: RefCell<DefIdMap<Rc<Vec<field_ty>>>>, /// Maps from def-id of a type or region parameter to its /// (inferred) variance. pub item_variance_map: RefCell<DefIdMap<Rc<ItemVariances>>>, /// True if the variance has been computed yet; false otherwise. pub variance_computed: Cell<bool>, /// A mapping from the def ID of an enum or struct type to the def ID /// of the method that implements its destructor. If the type is not /// present in this map, it does not have a destructor. This map is /// populated during the coherence phase of typechecking. pub destructor_for_type: RefCell<DefIdMap<ast::DefId>>, /// A method will be in this list if and only if it is a destructor. pub destructors: RefCell<DefIdSet>, /// Maps a trait onto a list of impls of that trait. pub trait_impls: RefCell<DefIdMap<Rc<RefCell<Vec<ast::DefId>>>>>, /// Maps a DefId of a type to a list of its inherent impls. /// Contains implementations of methods that are inherent to a type. /// Methods in these implementations don't need to be exported. pub inherent_impls: RefCell<DefIdMap<Rc<Vec<ast::DefId>>>>, /// Maps a DefId of an impl to a list of its items. /// Note that this contains all of the impls that we know about, /// including ones in other crates. It's not clear that this is the best /// way to do it. pub impl_items: RefCell<DefIdMap<Vec<ImplOrTraitItemId>>>, /// Set of used unsafe nodes (functions or blocks). Unsafe nodes not /// present in this set can be warned about. pub used_unsafe: RefCell<NodeSet>, /// Set of nodes which mark locals as mutable which end up getting used at /// some point. Local variable definitions not in this set can be warned /// about. pub used_mut_nodes: RefCell<NodeSet>, /// The set of external nominal types whose implementations have been read. /// This is used for lazy resolution of methods. pub populated_external_types: RefCell<DefIdSet>, /// The set of external traits whose implementations have been read. This /// is used for lazy resolution of traits. pub populated_external_traits: RefCell<DefIdSet>, /// Borrows pub upvar_borrow_map: RefCell<UpvarBorrowMap>, /// These two caches are used by const_eval when decoding external statics /// and variants that are found. pub extern_const_statics: RefCell<DefIdMap<ast::NodeId>>, pub extern_const_variants: RefCell<DefIdMap<ast::NodeId>>, pub method_map: typeck::MethodMap, pub dependency_formats: RefCell<dependency_format::Dependencies>, /// Records the type of each unboxed closure. The def ID is the ID of the /// expression defining the unboxed closure. pub unboxed_closures: RefCell<DefIdMap<UnboxedClosure>>, pub node_lint_levels: RefCell<FnvHashMap<(ast::NodeId, lint::LintId), lint::LevelSource>>, /// The types that must be asserted to be the same size for `transmute` /// to be valid. We gather up these restrictions in the intrinsicck pass /// and check them in trans. pub transmute_restrictions: RefCell<Vec<TransmuteRestriction>>, /// Maps any item's def-id to its stability index. pub stability: RefCell<stability::Index>, /// Maps closures to their capture clauses. pub capture_modes: RefCell<CaptureModeMap>, /// Maps def IDs to true if and only if they're associated types. pub associated_types: RefCell<DefIdMap<bool>>, /// Caches the results of trait selection. This cache is used /// for things that do not have to do with the parameters in scope. pub selection_cache: traits::SelectionCache, /// Caches the representation hints for struct definitions. pub repr_hint_cache: RefCell<DefIdMap<Rc<Vec<attr::ReprAttr>>>>, } // Flags that we track on types. These flags are propagated upwards // through the type during type construction, so that we can quickly // check whether the type has various kinds of types in it without // recursing over the type itself. bitflags! { flags TypeFlags: u32 { const NO_TYPE_FLAGS = 0b0, const HAS_PARAMS = 0b1, const HAS_SELF = 0b10, const HAS_TY_INFER = 0b100, const HAS_RE_INFER = 0b1000, const HAS_REGIONS = 0b10000, const HAS_TY_ERR = 0b100000, const NEEDS_SUBST = HAS_PARAMS.bits | HAS_SELF.bits | HAS_REGIONS.bits, } } pub type t_box = &'static t_box_; #[deriving(Show)] pub struct t_box_ { pub sty: sty, pub flags: TypeFlags, } impl fmt::Show for TypeFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.bits) } } // To reduce refcounting cost, we're representing types as unsafe pointers // throughout the compiler. These are simply casted t_box values. Use ty::get // to cast them back to a box. (Without the cast, compiler performance suffers // ~15%.) This does mean that a t value relies on the ctxt to keep its box // alive, and using ty::get is unsafe when the ctxt is no longer alive. enum t_opaque {} #[allow(raw_pointer_deriving)] #[deriving(Clone, PartialEq, Eq, Hash)] pub struct t { inner: *const t_opaque } impl fmt::Show for t { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", get(*self)) } } pub fn get(t: t) -> t_box { unsafe { let t2: t_box = mem::transmute(t); t2 } } fn tbox_has_flag(tb: t_box, flag: TypeFlags) -> bool { tb.flags.intersects(flag) } pub fn type_has_params(t: t) -> bool { tbox_has_flag(get(t), HAS_PARAMS) } pub fn type_has_self(t: t) -> bool { tbox_has_flag(get(t), HAS_SELF) } pub fn type_has_ty_infer(t: t) -> bool { tbox_has_flag(get(t), HAS_TY_INFER) } pub fn type_needs_infer(t: t) -> bool { tbox_has_flag(get(t), HAS_TY_INFER | HAS_RE_INFER) } #[deriving(Clone, PartialEq, Eq, Hash, Show)] pub struct BareFnTy { pub fn_style: ast::FnStyle, pub abi: abi::Abi, pub sig: FnSig, } #[deriving(Clone, PartialEq, Eq, Hash, Show)] pub struct ClosureTy { pub fn_style: ast::FnStyle, pub onceness: ast::Onceness, pub store: TraitStore, pub bounds: ExistentialBounds, pub sig: FnSig, pub abi: abi::Abi, } #[deriving(Clone, PartialEq, Eq, Hash)] pub enum FnOutput { FnConverging(ty::t), FnDiverging } impl FnOutput { pub fn unwrap(&self) -> ty::t { match *self { ty::FnConverging(ref t) => *t, ty::FnDiverging => unreachable!() } } } /** * Signature of a function type, which I have arbitrarily * decided to use to refer to the input/output types. * * - `binder_id` is the node id where this fn type appeared; * it is used to identify all the bound regions appearing * in the input/output types that are bound by this fn type * (vs some enclosing or enclosed fn type) * - `inputs` is the list of arguments and their modes. * - `output` is the return type. * - `variadic` indicates whether this is a varidic function. (only true for foreign fns) */ #[deriving(Clone, PartialEq, Eq, Hash)] pub struct FnSig { pub binder_id: ast::NodeId, pub inputs: Vec<t>, pub output: FnOutput, pub variadic: bool } #[deriving(Clone, PartialEq, Eq, Hash, Show)] pub struct ParamTy { pub space: subst::ParamSpace, pub idx: uint, pub def_id: DefId } /// Representation of regions: #[deriving(Clone, PartialEq, Eq, Hash, Encodable, Decodable, Show)] pub enum Region { // Region bound in a type or fn declaration which will be // substituted 'early' -- that is, at the same time when type // parameters are substituted. ReEarlyBound(/* param id */ ast::NodeId, subst::ParamSpace, /*index*/ uint, ast::Name), // Region bound in a function scope, which will be substituted when the // function is called. The first argument must be the `binder_id` of // some enclosing function signature. ReLateBound(/* binder_id */ ast::NodeId, BoundRegion), /// When checking a function body, the types of all arguments and so forth /// that refer to bound region parameters are modified to refer to free /// region parameters. ReFree(FreeRegion), /// A concrete region naming some expression within the current function. ReScope(NodeId), /// Static data that has an "infinite" lifetime. Top in the region lattice. ReStatic, /// A region variable. Should not exist after typeck. ReInfer(InferRegion), /// Empty lifetime is for data that is never accessed. /// Bottom in the region lattice. We treat ReEmpty somewhat /// specially; at least right now, we do not generate instances of /// it during the GLB computations, but rather /// generate an error instead. This is to improve error messages. /// The only way to get an instance of ReEmpty is to have a region /// variable with no constraints. ReEmpty, } /** * Upvars do not get their own node-id. Instead, we use the pair of * the original var id (that is, the root variable that is referenced * by the upvar) and the id of the closure expression. */ #[deriving(Clone, PartialEq, Eq, Hash, Show)] pub struct UpvarId { pub var_id: ast::NodeId, pub closure_expr_id: ast::NodeId, } #[deriving(Clone, PartialEq, Eq, Hash, Show, Encodable, Decodable)] pub enum BorrowKind { /// Data must be immutable and is aliasable. ImmBorrow, /// Data must be immutable but not aliasable. This kind of borrow /// cannot currently be expressed by the user and is used only in /// implicit closure bindings. It is needed when you the closure /// is borrowing or mutating a mutable referent, e.g.: /// /// let x: &mut int = ...; /// let y = || *x += 5; /// /// If we were to try to translate this closure into a more explicit /// form, we'd encounter an error with the code as written: /// /// struct Env { x: & &mut int } /// let x: &mut int = ...; /// let y = (&mut Env { &x }, fn_ptr); // Closure is pair of env and fn /// fn fn_ptr(env: &mut Env) { **env.x += 5; } /// /// This is then illegal because you cannot mutate a `&mut` found /// in an aliasable location. To solve, you'd have to translate with /// an `&mut` borrow: /// /// struct Env { x: & &mut int } /// let x: &mut int = ...; /// let y = (&mut Env { &mut x }, fn_ptr); // changed from &x to &mut x /// fn fn_ptr(env: &mut Env) { **env.x += 5; } /// /// Now the assignment to `**env.x` is legal, but creating a /// mutable pointer to `x` is not because `x` is not mutable. We /// could fix this by declaring `x` as `let mut x`. This is ok in /// user code, if awkward, but extra weird for closures, since the /// borrow is hidden. /// /// So we introduce a "unique imm" borrow -- the referent is /// immutable, but not aliasable. This solves the problem. For /// simplicity, we don't give users the way to express this /// borrow, it's just used when translating closures. UniqueImmBorrow, /// Data is mutable and not aliasable. MutBorrow } /** * Information describing the borrowing of an upvar. This is computed * during `typeck`, specifically by `regionck`. The general idea is * that the compiler analyses treat closures like: * * let closure: &'e fn() = || { * x = 1; // upvar x is assigned to * use(y); // upvar y is read * foo(&z); // upvar z is borrowed immutably * }; * * as if they were "desugared" to something loosely like: * * struct Vars<'x,'y,'z> { x: &'x mut int, * y: &'y const int, * z: &'z int } * let closure: &'e fn() = { * fn f(env: &Vars) { * *env.x = 1; * use(*env.y); * foo(env.z); * } * let env: &'e mut Vars<'x,'y,'z> = &mut Vars { x: &'x mut x, * y: &'y const y, * z: &'z z }; * (env, f) * }; * * This is basically what happens at runtime. The closure is basically * an existentially quantified version of the `(env, f)` pair. * * This data structure indicates the region and mutability of a single * one of the `x...z` borrows. * * It may not be obvious why each borrowed variable gets its own * lifetime (in the desugared version of the example, these are indicated * by the lifetime parameters `'x`, `'y`, and `'z` in the `Vars` definition). * Each such lifetime must encompass the lifetime `'e` of the closure itself, * but need not be identical to it. The reason that this makes sense: * * - Callers are only permitted to invoke the closure, and hence to * use the pointers, within the lifetime `'e`, so clearly `'e` must * be a sublifetime of `'x...'z`. * - The closure creator knows which upvars were borrowed by the closure * and thus `x...z` will be reserved for `'x...'z` respectively. * - Through mutation, the borrowed upvars can actually escape * the closure, so sometimes it is necessary for them to be larger * than the closure lifetime itself. */ #[deriving(PartialEq, Clone, Encodable, Decodable, Show)] pub struct UpvarBorrow { pub kind: BorrowKind, pub region: ty::Region, } pub type UpvarBorrowMap = FnvHashMap<UpvarId, UpvarBorrow>; impl Region { pub fn is_bound(&self) -> bool { match self { &ty::ReEarlyBound(..) => true, &ty::ReLateBound(..) => true, _ => false } } } #[deriving(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Encodable, Decodable, Show)] pub struct FreeRegion { pub scope_id: NodeId, pub bound_region: BoundRegion } #[deriving(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Encodable, Decodable, Show)] pub enum BoundRegion { /// An anonymous region parameter for a given fn (&T) BrAnon(uint), /// Named region parameters for functions (a in &'a T) /// /// The def-id is needed to distinguish free regions in /// the event of shadowing. BrNamed(ast::DefId, ast::Name), /// Fresh bound identifiers created during GLB computations. BrFresh(uint), // Anonymous region for the implicit env pointer parameter // to a closure BrEnv } mod primitives { use super::t_box_; use syntax::ast; macro_rules! def_prim_ty( ($name:ident, $sty:expr) => ( pub static $name: t_box_ = t_box_ { sty: $sty, flags: super::NO_TYPE_FLAGS, }; ) ) def_prim_ty!(TY_NIL, super::ty_nil) def_prim_ty!(TY_BOOL, super::ty_bool) def_prim_ty!(TY_CHAR, super::ty_char) def_prim_ty!(TY_INT, super::ty_int(ast::TyI)) def_prim_ty!(TY_I8, super::ty_int(ast::TyI8)) def_prim_ty!(TY_I16, super::ty_int(ast::TyI16)) def_prim_ty!(TY_I32, super::ty_int(ast::TyI32)) def_prim_ty!(TY_I64, super::ty_int(ast::TyI64)) def_prim_ty!(TY_UINT, super::ty_uint(ast::TyU)) def_prim_ty!(TY_U8, super::ty_uint(ast::TyU8)) def_prim_ty!(TY_U16, super::ty_uint(ast::TyU16)) def_prim_ty!(TY_U32, super::ty_uint(ast::TyU32)) def_prim_ty!(TY_U64, super::ty_uint(ast::TyU64)) def_prim_ty!(TY_F32, super::ty_float(ast::TyF32)) def_prim_ty!(TY_F64, super::ty_float(ast::TyF64)) pub static TY_ERR: t_box_ = t_box_ { sty: super::ty_err, flags: super::HAS_TY_ERR, }; } // NB: If you change this, you'll probably want to change the corresponding // AST structure in libsyntax/ast.rs as well. #[deriving(Clone, PartialEq, Eq, Hash, Show)] pub enum sty { ty_nil, ty_bool, ty_char, ty_int(ast::IntTy), ty_uint(ast::UintTy), ty_float(ast::FloatTy), /// Substs here, possibly against intuition, *may* contain `ty_param`s. /// That is, even after substitution it is possible that there are type /// variables. This happens when the `ty_enum` corresponds to an enum /// definition and not a concrete use of it. To get the correct `ty_enum` /// from the tcx, use the `NodeId` from the `ast::Ty` and look it up in /// the `ast_ty_to_ty_cache`. This is probably true for `ty_struct` as /// well.` ty_enum(DefId, Substs), ty_uniq(t), ty_str, ty_vec(t, Option<uint>), // Second field is length. ty_ptr(mt), ty_rptr(Region, mt), ty_bare_fn(BareFnTy), ty_closure(Box<ClosureTy>), ty_trait(Box<TyTrait>), ty_struct(DefId, Substs), ty_unboxed_closure(DefId, Region, Substs), ty_tup(Vec<t>), ty_param(ParamTy), // type parameter ty_open(t), // A deref'ed fat pointer, i.e., a dynamically sized value // and its size. Only ever used in trans. It is not necessary // earlier since we don't need to distinguish a DST with its // size (e.g., in a deref) vs a DST with the size elsewhere ( // e.g., in a field). ty_infer(InferTy), // something used only during inference/typeck ty_err, // Also only used during inference/typeck, to represent // the type of an erroneous expression (helps cut down // on non-useful type error messages) } #[deriving(Clone, PartialEq, Eq, Hash, Show)] pub struct TyTrait { // Principal trait reference. pub principal: TraitRef, // would use Rc<TraitRef>, but it runs afoul of some static rules pub bounds: ExistentialBounds } #[deriving(Clone, PartialEq, Eq, Hash, Show)] pub struct TraitRef { pub def_id: DefId, pub substs: Substs, } #[deriving(Clone, PartialEq)] pub enum IntVarValue { IntType(ast::IntTy), UintType(ast::UintTy), } #[deriving(Clone, Show)] pub enum terr_vstore_kind { terr_vec, terr_str, terr_fn, terr_trait } #[deriving(Clone, Show)] pub struct expected_found<T> { pub expected: T, pub found: T } // Data structures used in type unification #[deriving(Clone, Show)] pub enum type_err { terr_mismatch, terr_fn_style_mismatch(expected_found<FnStyle>), terr_onceness_mismatch(expected_found<Onceness>), terr_abi_mismatch(expected_found<abi::Abi>), terr_mutability, terr_sigil_mismatch(expected_found<TraitStore>), terr_box_mutability, terr_ptr_mutability, terr_ref_mutability, terr_vec_mutability, terr_tuple_size(expected_found<uint>), terr_fixed_array_size(expected_found<uint>), terr_ty_param_size(expected_found<uint>), terr_arg_count, terr_regions_does_not_outlive(Region, Region), terr_regions_not_same(Region, Region), terr_regions_no_overlap(Region, Region), terr_regions_insufficiently_polymorphic(BoundRegion, Region), terr_regions_overly_polymorphic(BoundRegion, Region), terr_trait_stores_differ(terr_vstore_kind, expected_found<TraitStore>), terr_sorts(expected_found<t>), terr_integer_as_char, terr_int_mismatch(expected_found<IntVarValue>), terr_float_mismatch(expected_found<ast::FloatTy>), terr_traits(expected_found<ast::DefId>), terr_builtin_bounds(expected_found<BuiltinBounds>), terr_variadic_mismatch(expected_found<bool>), terr_cyclic_ty, terr_convergence_mismatch(expected_found<bool>) } /// Bounds suitable for a named type parameter like `A` in `fn foo<A>` /// as well as the existential type parameter in an object type. #[deriving(PartialEq, Eq, Hash, Clone, Show)] pub struct ParamBounds { pub region_bounds: Vec<ty::Region>, pub builtin_bounds: BuiltinBounds, pub trait_bounds: Vec<Rc<TraitRef>> } /// Bounds suitable for an existentially quantified type parameter /// such as those that appear in object types or closure types. The /// major difference between this case and `ParamBounds` is that /// general purpose trait bounds are omitted and there must be /// *exactly one* region. #[deriving(PartialEq, Eq, Hash, Clone, Show)] pub struct ExistentialBounds { pub region_bound: ty::Region, pub builtin_bounds: BuiltinBounds } pub type BuiltinBounds = EnumSet<BuiltinBound>; #[deriving(Clone, Encodable, PartialEq, Eq, Decodable, Hash, Show)] #[repr(uint)] pub enum BuiltinBound { BoundSend, BoundSized, BoundCopy, BoundSync, } pub fn empty_builtin_bounds() -> BuiltinBounds { EnumSet::new() } pub fn all_builtin_bounds() -> BuiltinBounds { let mut set = EnumSet::new(); set.insert(BoundSend); set.insert(BoundSized); set.insert(BoundSync); set } pub fn region_existential_bound(r: ty::Region) -> ExistentialBounds { /*! * An existential bound that does not implement any traits. */ ty::ExistentialBounds { region_bound: r, builtin_bounds: empty_builtin_bounds() } } impl CLike for BuiltinBound { fn to_uint(&self) -> uint { *self as uint } fn from_uint(v: uint) -> BuiltinBound { unsafe { mem::transmute(v) } } } #[deriving(Clone, PartialEq, Eq, Hash)] pub struct TyVid { pub index: uint } #[deriving(Clone, PartialEq, Eq, Hash)] pub struct IntVid { pub index: uint } #[deriving(Clone, PartialEq, Eq, Hash)] pub struct FloatVid { pub index: uint } #[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash)] pub struct RegionVid { pub index: uint } #[deriving(Clone, PartialEq, Eq, Hash)] pub enum InferTy { TyVar(TyVid), IntVar(IntVid), FloatVar(FloatVid), SkolemizedTy(uint), // FIXME -- once integral fallback is impl'd, we should remove // this type. It's only needed to prevent spurious errors for // integers whose type winds up never being constrained. SkolemizedIntTy(uint), } #[deriving(Clone, Encodable, Decodable, Eq, Hash, Show)] pub enum InferRegion { ReVar(RegionVid), ReSkolemized(uint, BoundRegion) } impl cmp::PartialEq for InferRegion { fn eq(&self, other: &InferRegion) -> bool { match ((*self), *other) { (ReVar(rva), ReVar(rvb)) => { rva == rvb } (ReSkolemized(rva, _), ReSkolemized(rvb, _)) => { rva == rvb } _ => false } } fn ne(&self, other: &InferRegion) -> bool { !((*self) == (*other)) } } impl fmt::Show for TyVid { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result{ write!(f, "_#{}t", self.index) } } impl fmt::Show for IntVid { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "_#{}i", self.index) } } impl fmt::Show for FloatVid { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "_#{}f", self.index) } } impl fmt::Show for RegionVid { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "'_#{}r", self.index) } } impl fmt::Show for FnSig { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { // grr, without tcx not much we can do. write!(f, "(...)") } } impl fmt::Show for InferTy { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { TyVar(ref v) => v.fmt(f), IntVar(ref v) => v.fmt(f), FloatVar(ref v) => v.fmt(f), SkolemizedTy(v) => write!(f, "SkolemizedTy({})", v), SkolemizedIntTy(v) => write!(f, "SkolemizedIntTy({})", v), } } } impl fmt::Show for IntVarValue { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { IntType(ref v) => v.fmt(f), UintType(ref v) => v.fmt(f), } } } #[deriving(Clone, Show)] pub struct TypeParameterDef { pub name: ast::Name, pub def_id: ast::DefId, pub space: subst::ParamSpace, pub index: uint, pub associated_with: Option<ast::DefId>, pub bounds: ParamBounds, pub default: Option<ty::t>, } #[deriving(Encodable, Decodable, Clone, Show)] pub struct RegionParameterDef { pub name: ast::Name, pub def_id: ast::DefId, pub space: subst::ParamSpace, pub index: uint, pub bounds: Vec<ty::Region>, } /// Information about the type/lifetime parameters associated with an /// item or method. Analogous to ast::Generics. #[deriving(Clone, Show)] pub struct Generics { pub types: VecPerParamSpace<TypeParameterDef>, pub regions: VecPerParamSpace<RegionParameterDef>, } impl Generics { pub fn empty() -> Generics { Generics { types: VecPerParamSpace::empty(), regions: VecPerParamSpace::empty() } } pub fn has_type_params(&self, space: subst::ParamSpace) -> bool { !self.types.is_empty_in(space) } pub fn has_region_params(&self, space: subst::ParamSpace) -> bool { !self.regions.is_empty_in(space) } } impl TraitRef { pub fn new(def_id: ast::DefId, substs: Substs) -> TraitRef { TraitRef { def_id: def_id, substs: substs } } pub fn self_ty(&self) -> ty::t { self.substs.self_ty().unwrap() } pub fn input_types(&self) -> &[ty::t] { // Select only the "input types" from a trait-reference. For // now this is all the types that appear in the // trait-reference, but it should eventually exclude // associated types. self.substs.types.as_slice() } } /// When type checking, we use the `ParameterEnvironment` to track /// details about the type/lifetime parameters that are in scope. /// It primarily stores the bounds information. /// /// Note: This information might seem to be redundant with the data in /// `tcx.ty_param_defs`, but it is not. That table contains the /// parameter definitions from an "outside" perspective, but this /// struct will contain the bounds for a parameter as seen from inside /// the function body. Currently the only real distinction is that /// bound lifetime parameters are replaced with free ones, but in the /// future I hope to refine the representation of types so as to make /// more distinctions clearer. pub struct ParameterEnvironment { /// A substitution that can be applied to move from /// the "outer" view of a type or method to the "inner" view. /// In general, this means converting from bound parameters to /// free parameters. Since we currently represent bound/free type /// parameters in the same way, this only has an effect on regions. pub free_substs: Substs, /// Bounds on the various type parameters pub bounds: VecPerParamSpace<ParamBounds>, /// Each type parameter has an implicit region bound that /// indicates it must outlive at least the function body (the user /// may specify stronger requirements). This field indicates the /// region of the callee. pub implicit_region_bound: ty::Region, /// Obligations that the caller must satisfy. This is basically /// the set of bounds on the in-scope type parameters, translated /// into Obligations. /// /// Note: This effectively *duplicates* the `bounds` array for /// now. pub caller_obligations: VecPerParamSpace<traits::Obligation>, /// Caches the results of trait selection. This cache is used /// for things that have to do with the parameters in scope. pub selection_cache: traits::SelectionCache, } impl ParameterEnvironment { pub fn for_item(cx: &ctxt, id: NodeId) -> ParameterEnvironment { match cx.map.find(id) { Some(ast_map::NodeImplItem(ref impl_item)) => { match **impl_item { ast::MethodImplItem(ref method) => { let method_def_id = ast_util::local_def(id); match ty::impl_or_trait_item(cx, method_def_id) { MethodTraitItem(ref method_ty) => { let method_generics = &method_ty.generics; construct_parameter_environment( cx, method.span, method_generics, method.pe_body().id) } TypeTraitItem(_) => { cx.sess .bug("ParameterEnvironment::from_item(): \ can't create a parameter environment \ for type trait items") } } } ast::TypeImplItem(_) => { cx.sess.bug("ParameterEnvironment::from_item(): \ can't create a parameter environment \ for type impl items") } } } Some(ast_map::NodeTraitItem(trait_method)) => { match *trait_method { ast::RequiredMethod(ref required) => { cx.sess.span_bug(required.span, "ParameterEnvironment::from_item(): can't create a parameter \ environment for required trait \ methods") } ast::ProvidedMethod(ref method) => { let method_def_id = ast_util::local_def(id); match ty::impl_or_trait_item(cx, method_def_id) { MethodTraitItem(ref method_ty) => { let method_generics = &method_ty.generics; construct_parameter_environment( cx, method.span, method_generics, method.pe_body().id) } TypeTraitItem(_) => { cx.sess .bug("ParameterEnvironment::from_item(): \ can't create a parameter environment \ for type trait items") } } } ast::TypeTraitItem(_) => { cx.sess.bug("ParameterEnvironment::from_item(): \ can't create a parameter environment \ for type trait items") } } } Some(ast_map::NodeItem(item)) => { match item.node { ast::ItemFn(_, _, _, _, ref body) => { // We assume this is a function. let fn_def_id = ast_util::local_def(id); let fn_pty = ty::lookup_item_type(cx, fn_def_id); construct_parameter_environment(cx, item.span, &fn_pty.generics, body.id) } ast::ItemEnum(..) | ast::ItemStruct(..) | ast::ItemImpl(..) | ast::ItemConst(..) | ast::ItemStatic(..) => { let def_id = ast_util::local_def(id); let pty = ty::lookup_item_type(cx, def_id); construct_parameter_environment(cx, item.span, &pty.generics, id) } _ => { cx.sess.span_bug(item.span, "ParameterEnvironment::from_item(): can't create a parameter \ environment for this kind of item") } } } _ => { cx.sess.bug(format!("ParameterEnvironment::from_item(): \ `{}` is not an item", cx.map.node_to_string(id)).as_slice()) } } } } /// A polytype. /// /// - `generics`: the set of type parameters and their bounds /// - `ty`: the base types, which may reference the parameters defined /// in `generics` #[deriving(Clone, Show)] pub struct Polytype { pub generics: Generics, pub ty: t } /// As `Polytype` but for a trait ref. pub struct TraitDef { /// Generic type definitions. Note that `Self` is listed in here /// as having a single bound, the trait itself (e.g., in the trait /// `Eq`, there is a single bound `Self : Eq`). This is so that /// default methods get to assume that the `Self` parameters /// implements the trait. pub generics: Generics, /// The "supertrait" bounds. pub bounds: ParamBounds, pub trait_ref: Rc<ty::TraitRef>, } /// Records the substitutions used to translate the polytype for an /// item into the monotype of an item reference. #[deriving(Clone)] pub struct ItemSubsts { pub substs: Substs, } /// Records information about each unboxed closure. #[deriving(Clone)] pub struct UnboxedClosure { /// The type of the unboxed closure. pub closure_type: ClosureTy, /// The kind of unboxed closure this is. pub kind: UnboxedClosureKind, } #[deriving(Clone, PartialEq, Eq, Show)] pub enum UnboxedClosureKind { FnUnboxedClosureKind, FnMutUnboxedClosureKind, FnOnceUnboxedClosureKind, } impl UnboxedClosureKind { pub fn trait_did(&self, cx: &ctxt) -> ast::DefId { let result = match *self { FnUnboxedClosureKind => cx.lang_items.require(FnTraitLangItem), FnMutUnboxedClosureKind => { cx.lang_items.require(FnMutTraitLangItem) } FnOnceUnboxedClosureKind => { cx.lang_items.require(FnOnceTraitLangItem) } }; match result { Ok(trait_did) => trait_did, Err(err) => cx.sess.fatal(err.as_slice()), } } } pub fn mk_ctxt<'tcx>(s: Session, type_arena: &'tcx TypedArena<t_box_>, dm: resolve::DefMap, named_region_map: resolve_lifetime::NamedRegionMap, map: ast_map::Map<'tcx>, freevars: RefCell<FreevarMap>, capture_modes: RefCell<CaptureModeMap>, region_maps: middle::region::RegionMaps, lang_items: middle::lang_items::LanguageItems, stability: stability::Index) -> ctxt<'tcx> { ctxt { type_arena: type_arena, interner: RefCell::new(FnvHashMap::new()), named_region_map: named_region_map, item_variance_map: RefCell::new(DefIdMap::new()), variance_computed: Cell::new(false), sess: s, def_map: dm, region_maps: region_maps, node_types: RefCell::new(FnvHashMap::new()), item_substs: RefCell::new(NodeMap::new()), trait_refs: RefCell::new(NodeMap::new()), trait_defs: RefCell::new(DefIdMap::new()), object_cast_map: RefCell::new(NodeMap::new()), map: map, intrinsic_defs: RefCell::new(DefIdMap::new()), freevars: freevars, tcache: RefCell::new(DefIdMap::new()), rcache: RefCell::new(FnvHashMap::new()), short_names_cache: RefCell::new(FnvHashMap::new()), needs_unwind_cleanup_cache: RefCell::new(FnvHashMap::new()), tc_cache: RefCell::new(FnvHashMap::new()), ast_ty_to_ty_cache: RefCell::new(NodeMap::new()), enum_var_cache: RefCell::new(DefIdMap::new()), impl_or_trait_items: RefCell::new(DefIdMap::new()), trait_item_def_ids: RefCell::new(DefIdMap::new()), trait_items_cache: RefCell::new(DefIdMap::new()), impl_trait_cache: RefCell::new(DefIdMap::new()), ty_param_defs: RefCell::new(NodeMap::new()), adjustments: RefCell::new(NodeMap::new()), normalized_cache: RefCell::new(FnvHashMap::new()), lang_items: lang_items, provided_method_sources: RefCell::new(DefIdMap::new()), struct_fields: RefCell::new(DefIdMap::new()), destructor_for_type: RefCell::new(DefIdMap::new()), destructors: RefCell::new(DefIdSet::new()), trait_impls: RefCell::new(DefIdMap::new()), inherent_impls: RefCell::new(DefIdMap::new()), impl_items: RefCell::new(DefIdMap::new()), used_unsafe: RefCell::new(NodeSet::new()), used_mut_nodes: RefCell::new(NodeSet::new()), populated_external_types: RefCell::new(DefIdSet::new()), populated_external_traits: RefCell::new(DefIdSet::new()), upvar_borrow_map: RefCell::new(FnvHashMap::new()), extern_const_statics: RefCell::new(DefIdMap::new()), extern_const_variants: RefCell::new(DefIdMap::new()), method_map: RefCell::new(FnvHashMap::new()), dependency_formats: RefCell::new(FnvHashMap::new()), unboxed_closures: RefCell::new(DefIdMap::new()), node_lint_levels: RefCell::new(FnvHashMap::new()), transmute_restrictions: RefCell::new(Vec::new()), stability: RefCell::new(stability), capture_modes: capture_modes, associated_types: RefCell::new(DefIdMap::new()), selection_cache: traits::SelectionCache::new(), repr_hint_cache: RefCell::new(DefIdMap::new()), } } // Type constructors // Interns a type/name combination, stores the resulting box in cx.interner, // and returns the box as cast to an unsafe ptr (see comments for t above). pub fn mk_t(cx: &ctxt, st: sty) -> t { // Check for primitive types. match st { ty_nil => return mk_nil(), ty_err => return mk_err(), ty_bool => return mk_bool(), ty_int(i) => return mk_mach_int(i), ty_uint(u) => return mk_mach_uint(u), ty_float(f) => return mk_mach_float(f), ty_char => return mk_char(), _ => {} }; let key = intern_key { sty: &st }; match cx.interner.borrow().get(&key) { Some(t) => unsafe { return mem::transmute(&t.sty); }, _ => () } let mut flags = NO_TYPE_FLAGS; fn rflags(r: Region) -> TypeFlags { HAS_REGIONS | { match r { ty::ReInfer(_) => HAS_RE_INFER, _ => NO_TYPE_FLAGS, } } } fn sflags(substs: &Substs) -> TypeFlags { let mut f = NO_TYPE_FLAGS; let mut i = substs.types.iter(); for tt in i { f = f | get(*tt).flags; } match substs.regions { subst::ErasedRegions => {} subst::NonerasedRegions(ref regions) => { for r in regions.iter() { f = f | rflags(*r) } } } return f; } fn flags_for_bounds(bounds: &ExistentialBounds) -> TypeFlags { rflags(bounds.region_bound) } match &st { &ty_nil | &ty_bool | &ty_char | &ty_int(_) | &ty_float(_) | &ty_uint(_) | &ty_str => {} // You might think that we could just return ty_err for // any type containing ty_err as a component, and get // rid of the HAS_TY_ERR flag -- likewise for ty_bot (with // the exception of function types that return bot). // But doing so caused sporadic memory corruption, and // neither I (tjc) nor nmatsakis could figure out why, // so we're doing it this way. &ty_err => flags = flags | HAS_TY_ERR, &ty_param(ref p) => { if p.space == subst::SelfSpace { flags = flags | HAS_SELF; } else { flags = flags | HAS_PARAMS; } } &ty_unboxed_closure(_, ref region, ref substs) => { flags = flags | rflags(*region); flags = flags | sflags(substs); } &ty_infer(_) => flags = flags | HAS_TY_INFER, &ty_enum(_, ref substs) | &ty_struct(_, ref substs) => { flags = flags | sflags(substs); } &ty_trait(box TyTrait { ref principal, ref bounds }) => { flags = flags | sflags(&principal.substs); flags = flags | flags_for_bounds(bounds); } &ty_uniq(tt) | &ty_vec(tt, _) | &ty_open(tt) => { flags = flags | get(tt).flags } &ty_ptr(ref m) => { flags = flags | get(m.ty).flags; } &ty_rptr(r, ref m) => { flags = flags | rflags(r); flags = flags | get(m.ty).flags; } &ty_tup(ref ts) => for tt in ts.iter() { flags = flags | get(*tt).flags; }, &ty_bare_fn(ref f) => { for a in f.sig.inputs.iter() { flags = flags | get(*a).flags; } if let ty::FnConverging(output) = f.sig.output { flags = flags | get(output).flags; } } &ty_closure(ref f) => { match f.store { RegionTraitStore(r, _) => { flags = flags | rflags(r); } _ => {} } for a in f.sig.inputs.iter() { flags = flags | get(*a).flags; } if let ty::FnConverging(output) = f.sig.output { flags = flags | get(output).flags; } flags = flags | flags_for_bounds(&f.bounds); } } let t = cx.type_arena.alloc(t_box_ { sty: st, flags: flags, }); let sty_ptr = &t.sty as *const sty; let key = intern_key { sty: sty_ptr, }; cx.interner.borrow_mut().insert(key, t); unsafe { mem::transmute::<*const sty, t>(sty_ptr) } } #[inline] pub fn mk_prim_t(primitive: &'static t_box_) -> t { unsafe { mem::transmute::<&'static t_box_, t>(primitive) } } #[inline] pub fn mk_nil() -> t { mk_prim_t(&primitives::TY_NIL) } #[inline] pub fn mk_err() -> t { mk_prim_t(&primitives::TY_ERR) } #[inline] pub fn mk_bool() -> t { mk_prim_t(&primitives::TY_BOOL) } #[inline] pub fn mk_int() -> t { mk_prim_t(&primitives::TY_INT) } #[inline] pub fn mk_i8() -> t { mk_prim_t(&primitives::TY_I8) } #[inline] pub fn mk_i16() -> t { mk_prim_t(&primitives::TY_I16) } #[inline] pub fn mk_i32() -> t { mk_prim_t(&primitives::TY_I32) } #[inline] pub fn mk_i64() -> t { mk_prim_t(&primitives::TY_I64) } #[inline] pub fn mk_f32() -> t { mk_prim_t(&primitives::TY_F32) } #[inline] pub fn mk_f64() -> t { mk_prim_t(&primitives::TY_F64) } #[inline] pub fn mk_uint() -> t { mk_prim_t(&primitives::TY_UINT) } #[inline] pub fn mk_u8() -> t { mk_prim_t(&primitives::TY_U8) } #[inline] pub fn mk_u16() -> t { mk_prim_t(&primitives::TY_U16) } #[inline] pub fn mk_u32() -> t { mk_prim_t(&primitives::TY_U32) } #[inline] pub fn mk_u64() -> t { mk_prim_t(&primitives::TY_U64) } pub fn mk_mach_int(tm: ast::IntTy) -> t { match tm { ast::TyI => mk_int(), ast::TyI8 => mk_i8(), ast::TyI16 => mk_i16(), ast::TyI32 => mk_i32(), ast::TyI64 => mk_i64(), } } pub fn mk_mach_uint(tm: ast::UintTy) -> t { match tm { ast::TyU => mk_uint(), ast::TyU8 => mk_u8(), ast::TyU16 => mk_u16(), ast::TyU32 => mk_u32(), ast::TyU64 => mk_u64(), } } pub fn mk_mach_float(tm: ast::FloatTy) -> t { match tm { ast::TyF32 => mk_f32(), ast::TyF64 => mk_f64(), } } #[inline] pub fn mk_char() -> t { mk_prim_t(&primitives::TY_CHAR) } pub fn mk_str(cx: &ctxt) -> t { mk_t(cx, ty_str) } pub fn mk_str_slice(cx: &ctxt, r: Region, m: ast::Mutability) -> t { mk_rptr(cx, r, mt { ty: mk_t(cx, ty_str), mutbl: m }) } pub fn mk_enum(cx: &ctxt, did: ast::DefId, substs: Substs) -> t { // take a copy of substs so that we own the vectors inside mk_t(cx, ty_enum(did, substs)) } pub fn mk_uniq(cx: &ctxt, ty: t) -> t { mk_t(cx, ty_uniq(ty)) } pub fn mk_ptr(cx: &ctxt, tm: mt) -> t { mk_t(cx, ty_ptr(tm)) } pub fn mk_rptr(cx: &ctxt, r: Region, tm: mt) -> t { mk_t(cx, ty_rptr(r, tm)) } pub fn mk_mut_rptr(cx: &ctxt, r: Region, ty: t) -> t { mk_rptr(cx, r, mt {ty: ty, mutbl: ast::MutMutable}) } pub fn mk_imm_rptr(cx: &ctxt, r: Region, ty: t) -> t { mk_rptr(cx, r, mt {ty: ty, mutbl: ast::MutImmutable}) } pub fn mk_mut_ptr(cx: &ctxt, ty: t) -> t { mk_ptr(cx, mt {ty: ty, mutbl: ast::MutMutable}) } pub fn mk_imm_ptr(cx: &ctxt, ty: t) -> t { mk_ptr(cx, mt {ty: ty, mutbl: ast::MutImmutable}) } pub fn mk_nil_ptr(cx: &ctxt) -> t { mk_ptr(cx, mt {ty: mk_nil(), mutbl: ast::MutImmutable}) } pub fn mk_vec(cx: &ctxt, t: t, sz: Option<uint>) -> t { mk_t(cx, ty_vec(t, sz)) } pub fn mk_slice(cx: &ctxt, r: Region, tm: mt) -> t { mk_rptr(cx, r, mt { ty: mk_vec(cx, tm.ty, None), mutbl: tm.mutbl }) } pub fn mk_tup(cx: &ctxt, ts: Vec<t>) -> t { mk_t(cx, ty_tup(ts)) } pub fn mk_tup_or_nil(cx: &ctxt, ts: Vec<t>) -> t { if ts.len() == 0 { ty::mk_nil() } else { mk_t(cx, ty_tup(ts)) } } pub fn mk_closure(cx: &ctxt, fty: ClosureTy) -> t { mk_t(cx, ty_closure(box fty)) } pub fn mk_bare_fn(cx: &ctxt, fty: BareFnTy) -> t { mk_t(cx, ty_bare_fn(fty)) } pub fn mk_ctor_fn(cx: &ctxt, binder_id: ast::NodeId, input_tys: &[ty::t], output: ty::t) -> t { let input_args = input_tys.iter().map(|t| *t).collect(); mk_bare_fn(cx, BareFnTy { fn_style: ast::NormalFn, abi: abi::Rust, sig: FnSig { binder_id: binder_id, inputs: input_args, output: ty::FnConverging(output), variadic: false } }) } pub fn mk_trait(cx: &ctxt, principal: ty::TraitRef, bounds: ExistentialBounds) -> t { // take a copy of substs so that we own the vectors inside let inner = box TyTrait { principal: principal, bounds: bounds }; mk_t(cx, ty_trait(inner)) } pub fn mk_struct(cx: &ctxt, struct_id: ast::DefId, substs: Substs) -> t { // take a copy of substs so that we own the vectors inside mk_t(cx, ty_struct(struct_id, substs)) } pub fn mk_unboxed_closure(cx: &ctxt, closure_id: ast::DefId, region: Region, substs: Substs) -> t { mk_t(cx, ty_unboxed_closure(closure_id, region, substs)) } pub fn mk_var(cx: &ctxt, v: TyVid) -> t { mk_infer(cx, TyVar(v)) } pub fn mk_int_var(cx: &ctxt, v: IntVid) -> t { mk_infer(cx, IntVar(v)) } pub fn mk_float_var(cx: &ctxt, v: FloatVid) -> t { mk_infer(cx, FloatVar(v)) } pub fn mk_infer(cx: &ctxt, it: InferTy) -> t { mk_t(cx, ty_infer(it)) } pub fn mk_param(cx: &ctxt, space: subst::ParamSpace, n: uint, k: DefId) -> t { mk_t(cx, ty_param(ParamTy { space: space, idx: n, def_id: k })) } pub fn mk_self_type(cx: &ctxt, did: ast::DefId) -> t { mk_param(cx, subst::SelfSpace, 0, did) } pub fn mk_param_from_def(cx: &ctxt, def: &TypeParameterDef) -> t { mk_param(cx, def.space, def.index, def.def_id) } pub fn mk_open(cx: &ctxt, t: t) -> t { mk_t(cx, ty_open(t)) } pub fn walk_ty(ty: t, f: |t|) { maybe_walk_ty(ty, |t| { f(t); true }); } pub fn maybe_walk_ty(ty: t, f: |t| -> bool) { if !f(ty) { return; } match get(ty).sty { ty_nil | ty_bool | ty_char | ty_int(_) | ty_uint(_) | ty_float(_) | ty_str | ty_infer(_) | ty_param(_) | ty_err => {} ty_uniq(ty) | ty_vec(ty, _) | ty_open(ty) => maybe_walk_ty(ty, f), ty_ptr(ref tm) | ty_rptr(_, ref tm) => { maybe_walk_ty(tm.ty, f); } ty_trait(box TyTrait { ref principal, .. }) => { for subty in principal.substs.types.iter() { maybe_walk_ty(*subty, |x| f(x)); } } ty_enum(_, ref substs) | ty_struct(_, ref substs) | ty_unboxed_closure(_, _, ref substs) => { for subty in substs.types.iter() { maybe_walk_ty(*subty, |x| f(x)); } } ty_tup(ref ts) => { for tt in ts.iter() { maybe_walk_ty(*tt, |x| f(x)); } } ty_bare_fn(ref ft) => { for a in ft.sig.inputs.iter() { maybe_walk_ty(*a, |x| f(x)); } if let ty::FnConverging(output) = ft.sig.output { maybe_walk_ty(output, f); } } ty_closure(ref ft) => { for a in ft.sig.inputs.iter() { maybe_walk_ty(*a, |x| f(x)); } if let ty::FnConverging(output) = ft.sig.output { maybe_walk_ty(output, f); } } } } // Folds types from the bottom up. pub fn fold_ty(cx: &ctxt, t0: t, fldop: |t| -> t) -> t { let mut f = ty_fold::BottomUpFolder {tcx: cx, fldop: fldop}; f.fold_ty(t0) } impl ParamTy { pub fn new(space: subst::ParamSpace, index: uint, def_id: ast::DefId) -> ParamTy { ParamTy { space: space, idx: index, def_id: def_id } } pub fn for_self(trait_def_id: ast::DefId) -> ParamTy { ParamTy::new(subst::SelfSpace, 0, trait_def_id) } pub fn for_def(def: &TypeParameterDef) -> ParamTy { ParamTy::new(def.space, def.index, def.def_id) } pub fn to_ty(self, tcx: &ty::ctxt) -> ty::t { ty::mk_param(tcx, self.space, self.idx, self.def_id) } pub fn is_self(&self) -> bool { self.space == subst::SelfSpace && self.idx == 0 } } impl ItemSubsts { pub fn empty() -> ItemSubsts { ItemSubsts { substs: Substs::empty() } } pub fn is_noop(&self) -> bool { self.substs.is_noop() } } impl ParamBounds { pub fn empty() -> ParamBounds { ParamBounds { builtin_bounds: empty_builtin_bounds(), trait_bounds: Vec::new(), region_bounds: Vec::new(), } } } // Type utilities pub fn type_is_nil(ty: t) -> bool { get(ty).sty == ty_nil } pub fn type_is_error(ty: t) -> bool { get(ty).flags.intersects(HAS_TY_ERR) } pub fn type_needs_subst(ty: t) -> bool { tbox_has_flag(get(ty), NEEDS_SUBST) } pub fn trait_ref_contains_error(tref: &ty::TraitRef) -> bool { tref.substs.types.any(|&t| type_is_error(t)) } pub fn type_is_ty_var(ty: t) -> bool { match get(ty).sty { ty_infer(TyVar(_)) => true, _ => false } } pub fn type_is_bool(ty: t) -> bool { get(ty).sty == ty_bool } pub fn type_is_self(ty: t) -> bool { match get(ty).sty { ty_param(ref p) => p.space == subst::SelfSpace, _ => false } } fn type_is_slice(ty: t) -> bool { match get(ty).sty { ty_ptr(mt) | ty_rptr(_, mt) => match get(mt.ty).sty { ty_vec(_, None) | ty_str => true, _ => false, }, _ => false } } pub fn type_is_vec(ty: t) -> bool { match get(ty).sty { ty_vec(..) => true, ty_ptr(mt{ty: t, ..}) | ty_rptr(_, mt{ty: t, ..}) | ty_uniq(t) => match get(t).sty { ty_vec(_, None) => true, _ => false }, _ => false } } pub fn type_is_structural(ty: t) -> bool { match get(ty).sty { ty_struct(..) | ty_tup(_) | ty_enum(..) | ty_closure(_) | ty_vec(_, Some(_)) | ty_unboxed_closure(..) => true, _ => type_is_slice(ty) | type_is_trait(ty) } } pub fn type_is_simd(cx: &ctxt, ty: t) -> bool { match get(ty).sty { ty_struct(did, _) => lookup_simd(cx, did), _ => false } } pub fn sequence_element_type(cx: &ctxt, ty: t) -> t { match get(ty).sty { ty_vec(ty, _) => ty, ty_str => mk_mach_uint(ast::TyU8), ty_open(ty) => sequence_element_type(cx, ty), _ => cx.sess.bug(format!("sequence_element_type called on non-sequence value: {}", ty_to_string(cx, ty)).as_slice()), } } pub fn simd_type(cx: &ctxt, ty: t) -> t { match get(ty).sty { ty_struct(did, ref substs) => { let fields = lookup_struct_fields(cx, did); lookup_field_type(cx, did, fields[0].id, substs) } _ => panic!("simd_type called on invalid type") } } pub fn simd_size(cx: &ctxt, ty: t) -> uint { match get(ty).sty { ty_struct(did, _) => { let fields = lookup_struct_fields(cx, did); fields.len() } _ => panic!("simd_size called on invalid type") } } pub fn type_is_region_ptr(ty: t) -> bool { match get(ty).sty { ty_rptr(..) => true, _ => false } } pub fn type_is_unsafe_ptr(ty: t) -> bool { match get(ty).sty { ty_ptr(_) => return true, _ => return false } } pub fn type_is_unique(ty: t) -> bool { match get(ty).sty { ty_uniq(_) => match get(ty).sty { ty_trait(..) => false, _ => true }, _ => false } } pub fn type_is_fat_ptr(cx: &ctxt, ty: t) -> bool { match get(ty).sty { ty_ptr(mt{ty, ..}) | ty_rptr(_, mt{ty, ..}) | ty_uniq(ty) if !type_is_sized(cx, ty) => true, _ => false, } } /* A scalar type is one that denotes an atomic datum, with no sub-components. (A ty_ptr is scalar because it represents a non-managed pointer, so its contents are abstract to rustc.) */ pub fn type_is_scalar(ty: t) -> bool { match get(ty).sty { ty_nil | ty_bool | ty_char | ty_int(_) | ty_float(_) | ty_uint(_) | ty_infer(IntVar(_)) | ty_infer(FloatVar(_)) | ty_bare_fn(..) | ty_ptr(_) => true, _ => false } } /// Returns true if this type is a floating point type and false otherwise. pub fn type_is_floating_point(ty: t) -> bool { match get(ty).sty { ty_float(_) => true, _ => false, } } pub fn type_needs_drop(cx: &ctxt, ty: t) -> bool { type_contents(cx, ty).needs_drop(cx) } // Some things don't need cleanups during unwinding because the // task can free them all at once later. Currently only things // that only contain scalars and shared boxes can avoid unwind // cleanups. pub fn type_needs_unwind_cleanup(cx: &ctxt, ty: t) -> bool { return memoized(&cx.needs_unwind_cleanup_cache, ty, |ty| { type_needs_unwind_cleanup_(cx, ty, &mut FnvHashSet::new()) }); fn type_needs_unwind_cleanup_(cx: &ctxt, ty: t, tycache: &mut FnvHashSet<t>) -> bool { // Prevent infinite recursion if !tycache.insert(ty) { return false; } let mut needs_unwind_cleanup = false; maybe_walk_ty(ty, |ty| { needs_unwind_cleanup |= match get(ty).sty { ty_nil | ty_bool | ty_int(_) | ty_uint(_) | ty_float(_) | ty_tup(_) | ty_ptr(_) => false, ty_enum(did, ref substs) => enum_variants(cx, did).iter().any(|v| v.args.iter().any(|aty| { let t = aty.subst(cx, substs); type_needs_unwind_cleanup_(cx, t, tycache) }) ), _ => true }; !needs_unwind_cleanup }); needs_unwind_cleanup } } /** * Type contents is how the type checker reasons about kinds. * They track what kinds of things are found within a type. You can * think of them as kind of an "anti-kind". They track the kinds of values * and thinks that are contained in types. Having a larger contents for * a type tends to rule that type *out* from various kinds. For example, * a type that contains a reference is not sendable. * * The reason we compute type contents and not kinds is that it is * easier for me (nmatsakis) to think about what is contained within * a type than to think about what is *not* contained within a type. */ #[deriving(Clone)] pub struct TypeContents { pub bits: u64 } macro_rules! def_type_content_sets( (mod $mname:ident { $($name:ident = $bits:expr),+ }) => { #[allow(non_snake_case)] mod $mname { use middle::ty::TypeContents; $( #[allow(non_upper_case_globals)] pub const $name: TypeContents = TypeContents { bits: $bits }; )+ } } ) def_type_content_sets!( mod TC { None = 0b0000_0000__0000_0000__0000, // Things that are interior to the value (first nibble): InteriorUnsized = 0b0000_0000__0000_0000__0001, InteriorUnsafe = 0b0000_0000__0000_0000__0010, // InteriorAll = 0b00000000__00000000__1111, // Things that are owned by the value (second and third nibbles): OwnsOwned = 0b0000_0000__0000_0001__0000, OwnsDtor = 0b0000_0000__0000_0010__0000, OwnsManaged /* see [1] below */ = 0b0000_0000__0000_0100__0000, OwnsAffine = 0b0000_0000__0000_1000__0000, OwnsAll = 0b0000_0000__1111_1111__0000, // Things that are reachable by the value in any way (fourth nibble): ReachesBorrowed = 0b0000_0010__0000_0000__0000, // ReachesManaged /* see [1] below */ = 0b0000_0100__0000_0000__0000, ReachesMutable = 0b0000_1000__0000_0000__0000, ReachesFfiUnsafe = 0b0010_0000__0000_0000__0000, ReachesAll = 0b0011_1111__0000_0000__0000, // Things that cause values to *move* rather than *copy*. This // is almost the same as the `Copy` trait, but for managed // data -- atm, we consider managed data to copy, not move, // but it does not impl Copy as a pure memcpy is not good // enough. Yuck. Moves = 0b0000_0000__0000_1011__0000, // Things that mean drop glue is necessary NeedsDrop = 0b0000_0000__0000_0111__0000, // Things that prevent values from being considered sized Nonsized = 0b0000_0000__0000_0000__0001, // Things that make values considered not POD (would be same // as `Moves`, but for the fact that managed data `@` is // not considered POD) Noncopy = 0b0000_0000__0000_1111__0000, // Bits to set when a managed value is encountered // // [1] Do not set the bits TC::OwnsManaged or // TC::ReachesManaged directly, instead reference // TC::Managed to set them both at once. Managed = 0b0000_0100__0000_0100__0000, // All bits All = 0b1111_1111__1111_1111__1111 } ) impl TypeContents { pub fn when(&self, cond: bool) -> TypeContents { if cond {*self} else {TC::None} } pub fn intersects(&self, tc: TypeContents) -> bool { (self.bits & tc.bits) != 0 } pub fn owns_managed(&self) -> bool { self.intersects(TC::OwnsManaged) } pub fn owns_owned(&self) -> bool { self.intersects(TC::OwnsOwned) } pub fn is_sized(&self, _: &ctxt) -> bool { !self.intersects(TC::Nonsized) } pub fn interior_unsafe(&self) -> bool { self.intersects(TC::InteriorUnsafe) } pub fn interior_unsized(&self) -> bool { self.intersects(TC::InteriorUnsized) } pub fn moves_by_default(&self, _: &ctxt) -> bool { self.intersects(TC::Moves) } pub fn needs_drop(&self, _: &ctxt) -> bool { self.intersects(TC::NeedsDrop) } pub fn owned_pointer(&self) -> TypeContents { /*! * Includes only those bits that still apply * when indirected through a `Box` pointer */ TC::OwnsOwned | ( *self & (TC::OwnsAll | TC::ReachesAll)) } pub fn reference(&self, bits: TypeContents) -> TypeContents { /*! * Includes only those bits that still apply * when indirected through a reference (`&`) */ bits | ( *self & TC::ReachesAll) } pub fn managed_pointer(&self) -> TypeContents { /*! * Includes only those bits that still apply * when indirected through a managed pointer (`@`) */ TC::Managed | ( *self & TC::ReachesAll) } pub fn unsafe_pointer(&self) -> TypeContents { /*! * Includes only those bits that still apply * when indirected through an unsafe pointer (`*`) */ *self & TC::ReachesAll } pub fn union<T>(v: &[T], f: |&T| -> TypeContents) -> TypeContents { v.iter().fold(TC::None, |tc, t| tc | f(t)) } pub fn has_dtor(&self) -> bool { self.intersects(TC::OwnsDtor) } } impl ops::BitOr<TypeContents,TypeContents> for TypeContents { fn bitor(&self, other: &TypeContents) -> TypeContents { TypeContents {bits: self.bits | other.bits} } } impl ops::BitAnd<TypeContents,TypeContents> for TypeContents { fn bitand(&self, other: &TypeContents) -> TypeContents { TypeContents {bits: self.bits & other.bits} } } impl ops::Sub<TypeContents,TypeContents> for TypeContents { fn sub(&self, other: &TypeContents) -> TypeContents { TypeContents {bits: self.bits & !other.bits} } } impl fmt::Show for TypeContents { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "TypeContents({:t})", self.bits) } } pub fn type_interior_is_unsafe(cx: &ctxt, t: ty::t) -> bool { type_contents(cx, t).interior_unsafe() } pub fn type_contents(cx: &ctxt, ty: t) -> TypeContents { return memoized(&cx.tc_cache, ty, |ty| { tc_ty(cx, ty, &mut FnvHashMap::new()) }); fn tc_ty(cx: &ctxt, ty: t, cache: &mut FnvHashMap<t, TypeContents>) -> TypeContents { // Subtle: Note that we are *not* using cx.tc_cache here but rather a // private cache for this walk. This is needed in the case of cyclic // types like: // // struct List { next: Box<Option<List>>, ... } // // When computing the type contents of such a type, we wind up deeply // recursing as we go. So when we encounter the recursive reference // to List, we temporarily use TC::None as its contents. Later we'll // patch up the cache with the correct value, once we've computed it // (this is basically a co-inductive process, if that helps). So in // the end we'll compute TC::OwnsOwned, in this case. // // The problem is, as we are doing the computation, we will also // compute an *intermediate* contents for, e.g., Option<List> of // TC::None. This is ok during the computation of List itself, but if // we stored this intermediate value into cx.tc_cache, then later // requests for the contents of Option<List> would also yield TC::None // which is incorrect. This value was computed based on the crutch // value for the type contents of list. The correct value is // TC::OwnsOwned. This manifested as issue #4821. match cache.get(&ty) { Some(tc) => { return *tc; } None => {} } match cx.tc_cache.borrow().get(&ty) { // Must check both caches! Some(tc) => { return *tc; } None => {} } cache.insert(ty, TC::None); let result = match get(ty).sty { // uint and int are ffi-unsafe ty_uint(ast::TyU) | ty_int(ast::TyI) => { TC::ReachesFfiUnsafe } // Scalar and unique types are sendable, and durable ty_infer(ty::SkolemizedIntTy(_)) | ty_nil | ty_bool | ty_int(_) | ty_uint(_) | ty_float(_) | ty_bare_fn(_) | ty::ty_char => { TC::None } ty_closure(ref c) => { closure_contents(cx, &**c) | TC::ReachesFfiUnsafe } ty_uniq(typ) => { TC::ReachesFfiUnsafe | match get(typ).sty { ty_str => TC::OwnsOwned, _ => tc_ty(cx, typ, cache).owned_pointer(), } } ty_trait(box TyTrait { bounds, .. }) => { object_contents(cx, bounds) | TC::ReachesFfiUnsafe | TC::Nonsized } ty_ptr(ref mt) => { tc_ty(cx, mt.ty, cache).unsafe_pointer() } ty_rptr(r, ref mt) => { TC::ReachesFfiUnsafe | match get(mt.ty).sty { ty_str => borrowed_contents(r, ast::MutImmutable), ty_vec(..) => tc_ty(cx, mt.ty, cache).reference(borrowed_contents(r, mt.mutbl)), _ => tc_ty(cx, mt.ty, cache).reference(borrowed_contents(r, mt.mutbl)), } } ty_vec(t, Some(_)) => { tc_ty(cx, t, cache) } ty_vec(t, None) => { tc_ty(cx, t, cache) | TC::Nonsized } ty_str => TC::Nonsized, ty_struct(did, ref substs) => { let flds = struct_fields(cx, did, substs); let mut res = TypeContents::union(flds.as_slice(), |f| tc_mt(cx, f.mt, cache)); if !lookup_repr_hints(cx, did).contains(&attr::ReprExtern) { res = res | TC::ReachesFfiUnsafe; } if ty::has_dtor(cx, did) { res = res | TC::OwnsDtor; } apply_lang_items(cx, did, res) } ty_unboxed_closure(did, r, ref substs) => { // FIXME(#14449): `borrowed_contents` below assumes `&mut` // unboxed closure. let upvars = unboxed_closure_upvars(cx, did, substs); TypeContents::union(upvars.as_slice(), |f| tc_ty(cx, f.ty, cache)) | borrowed_contents(r, MutMutable) } ty_tup(ref tys) => { TypeContents::union(tys.as_slice(), |ty| tc_ty(cx, *ty, cache)) } ty_enum(did, ref substs) => { let variants = substd_enum_variants(cx, did, substs); let mut res = TypeContents::union(variants.as_slice(), |variant| { TypeContents::union(variant.args.as_slice(), |arg_ty| { tc_ty(cx, *arg_ty, cache) }) }); if ty::has_dtor(cx, did) { res = res | TC::OwnsDtor; } if variants.len() != 0 { let repr_hints = lookup_repr_hints(cx, did); if repr_hints.len() > 1 { // this is an error later on, but this type isn't safe res = res | TC::ReachesFfiUnsafe; } match repr_hints.as_slice().get(0) { Some(h) => if !h.is_ffi_safe() { res = res | TC::ReachesFfiUnsafe; }, // ReprAny None => { res = res | TC::ReachesFfiUnsafe; // We allow ReprAny enums if they are eligible for // the nullable pointer optimization and the // contained type is an `extern fn` if variants.len() == 2 { let mut data_idx = 0; if variants[0].args.len() == 0 { data_idx = 1; } if variants[data_idx].args.len() == 1 { match get(variants[data_idx].args[0]).sty { ty_bare_fn(..) => { res = res - TC::ReachesFfiUnsafe; } _ => { } } } } } } } apply_lang_items(cx, did, res) } ty_param(p) => { // We only ever ask for the kind of types that are defined in // the current crate; therefore, the only type parameters that // could be in scope are those defined in the current crate. // If this assertion fails, it is likely because of a // failure of the cross-crate inlining code to translate a // def-id. assert_eq!(p.def_id.krate, ast::LOCAL_CRATE); let ty_param_defs = cx.ty_param_defs.borrow(); let tp_def = &(*ty_param_defs)[p.def_id.node]; kind_bounds_to_contents( cx, tp_def.bounds.builtin_bounds, tp_def.bounds.trait_bounds.as_slice()) } ty_infer(_) => { // This occurs during coherence, but shouldn't occur at other // times. TC::All } ty_open(t) => { let result = tc_ty(cx, t, cache); assert!(!result.is_sized(cx)) result.unsafe_pointer() | TC::Nonsized } ty_err => { cx.sess.bug("asked to compute contents of error type"); } }; cache.insert(ty, result); result } fn tc_mt(cx: &ctxt, mt: mt, cache: &mut FnvHashMap<t, TypeContents>) -> TypeContents { let mc = TC::ReachesMutable.when(mt.mutbl == MutMutable); mc | tc_ty(cx, mt.ty, cache) } fn apply_lang_items(cx: &ctxt, did: ast::DefId, tc: TypeContents) -> TypeContents { if Some(did) == cx.lang_items.managed_bound() { tc | TC::Managed } else if Some(did) == cx.lang_items.no_copy_bound() { tc | TC::OwnsAffine } else if Some(did) == cx.lang_items.unsafe_type() { tc | TC::InteriorUnsafe } else { tc } } fn borrowed_contents(region: ty::Region, mutbl: ast::Mutability) -> TypeContents { /*! * Type contents due to containing a reference * with the region `region` and borrow kind `bk` */ let b = match mutbl { ast::MutMutable => TC::ReachesMutable | TC::OwnsAffine, ast::MutImmutable => TC::None, }; b | (TC::ReachesBorrowed).when(region != ty::ReStatic) } fn closure_contents(cx: &ctxt, cty: &ClosureTy) -> TypeContents { // Closure contents are just like trait contents, but with potentially // even more stuff. let st = object_contents(cx, cty.bounds); let st = match cty.store { UniqTraitStore => { st.owned_pointer() } RegionTraitStore(r, mutbl) => { st.reference(borrowed_contents(r, mutbl)) } }; // This also prohibits "@once fn" from being copied, which allows it to // be called. Neither way really makes much sense. let ot = match cty.onceness { ast::Once => TC::OwnsAffine, ast::Many => TC::None, }; st | ot } fn object_contents(cx: &ctxt, bounds: ExistentialBounds) -> TypeContents { // These are the type contents of the (opaque) interior kind_bounds_to_contents(cx, bounds.builtin_bounds, []) } fn kind_bounds_to_contents(cx: &ctxt, bounds: BuiltinBounds, traits: &[Rc<TraitRef>]) -> TypeContents { let _i = indenter(); let mut tc = TC::All; each_inherited_builtin_bound(cx, bounds, traits, |bound| { tc = tc - match bound { BoundSync | BoundSend => TC::None, BoundSized => TC::Nonsized, BoundCopy => TC::Noncopy, }; }); return tc; // Iterates over all builtin bounds on the type parameter def, including // those inherited from traits with builtin-kind-supertraits. fn each_inherited_builtin_bound(cx: &ctxt, bounds: BuiltinBounds, traits: &[Rc<TraitRef>], f: |BuiltinBound|) { for bound in bounds.iter() { f(bound); } each_bound_trait_and_supertraits(cx, traits, |trait_ref| { let trait_def = lookup_trait_def(cx, trait_ref.def_id); for bound in trait_def.bounds.builtin_bounds.iter() { f(bound); } true }); } } } pub fn type_moves_by_default(cx: &ctxt, ty: t) -> bool { type_contents(cx, ty).moves_by_default(cx) } pub fn is_ffi_safe(cx: &ctxt, ty: t) -> bool { !type_contents(cx, ty).intersects(TC::ReachesFfiUnsafe) } // True if instantiating an instance of `r_ty` requires an instance of `r_ty`. pub fn is_instantiable(cx: &ctxt, r_ty: t) -> bool { fn type_requires(cx: &ctxt, seen: &mut Vec<DefId>, r_ty: t, ty: t) -> bool { debug!("type_requires({}, {})?", ::util::ppaux::ty_to_string(cx, r_ty), ::util::ppaux::ty_to_string(cx, ty)); let r = { get(r_ty).sty == get(ty).sty || subtypes_require(cx, seen, r_ty, ty) }; debug!("type_requires({}, {})? {}", ::util::ppaux::ty_to_string(cx, r_ty), ::util::ppaux::ty_to_string(cx, ty), r); return r; } fn subtypes_require(cx: &ctxt, seen: &mut Vec<DefId>, r_ty: t, ty: t) -> bool { debug!("subtypes_require({}, {})?", ::util::ppaux::ty_to_string(cx, r_ty), ::util::ppaux::ty_to_string(cx, ty)); let r = match get(ty).sty { // fixed length vectors need special treatment compared to // normal vectors, since they don't necessarily have the // possibility to have length zero. ty_vec(_, Some(0)) => false, // don't need no contents ty_vec(ty, Some(_)) => type_requires(cx, seen, r_ty, ty), ty_nil | ty_bool | ty_char | ty_int(_) | ty_uint(_) | ty_float(_) | ty_str | ty_bare_fn(_) | ty_closure(_) | ty_infer(_) | ty_err | ty_param(_) | ty_vec(_, None) => { false } ty_uniq(typ) | ty_open(typ) => { type_requires(cx, seen, r_ty, typ) } ty_rptr(_, ref mt) => { type_requires(cx, seen, r_ty, mt.ty) } ty_ptr(..) => { false // unsafe ptrs can always be NULL } ty_trait(..) => { false } ty_struct(ref did, _) if seen.contains(did) => { false } ty_struct(did, ref substs) => { seen.push(did); let fields = struct_fields(cx, did, substs); let r = fields.iter().any(|f| type_requires(cx, seen, r_ty, f.mt.ty)); seen.pop().unwrap(); r } ty_unboxed_closure(did, _, ref substs) => { let upvars = unboxed_closure_upvars(cx, did, substs); upvars.iter().any(|f| type_requires(cx, seen, r_ty, f.ty)) } ty_tup(ref ts) => { ts.iter().any(|t| type_requires(cx, seen, r_ty, *t)) } ty_enum(ref did, _) if seen.contains(did) => { false } ty_enum(did, ref substs) => { seen.push(did); let vs = enum_variants(cx, did); let r = !vs.is_empty() && vs.iter().all(|variant| { variant.args.iter().any(|aty| { let sty = aty.subst(cx, substs); type_requires(cx, seen, r_ty, sty) }) }); seen.pop().unwrap(); r } }; debug!("subtypes_require({}, {})? {}", ::util::ppaux::ty_to_string(cx, r_ty), ::util::ppaux::ty_to_string(cx, ty), r); return r; } let mut seen = Vec::new(); !subtypes_require(cx, &mut seen, r_ty, r_ty) } /// Describes whether a type is representable. For types that are not /// representable, 'SelfRecursive' and 'ContainsRecursive' are used to /// distinguish between types that are recursive with themselves and types that /// contain a different recursive type. These cases can therefore be treated /// differently when reporting errors. /// /// The ordering of the cases is significant. They are sorted so that cmp::max /// will keep the "more erroneous" of two values. #[deriving(PartialOrd, Ord, Eq, PartialEq, Show)] pub enum Representability { Representable, ContainsRecursive, SelfRecursive, } /// Check whether a type is representable. This means it cannot contain unboxed /// structural recursion. This check is needed for structs and enums. pub fn is_type_representable(cx: &ctxt, sp: Span, ty: t) -> Representability { // Iterate until something non-representable is found fn find_nonrepresentable<It: Iterator<t>>(cx: &ctxt, sp: Span, seen: &mut Vec<t>, mut iter: It) -> Representability { iter.fold(Representable, |r, ty| cmp::max(r, is_type_structurally_recursive(cx, sp, seen, ty))) } fn are_inner_types_recursive(cx: &ctxt, sp: Span, seen: &mut Vec<t>, ty: t) -> Representability { match get(ty).sty { ty_tup(ref ts) => { find_nonrepresentable(cx, sp, seen, ts.iter().map(|t| *t)) } // Fixed-length vectors. // FIXME(#11924) Behavior undecided for zero-length vectors. ty_vec(ty, Some(_)) => { is_type_structurally_recursive(cx, sp, seen, ty) } ty_struct(did, ref substs) => { let fields = struct_fields(cx, did, substs); find_nonrepresentable(cx, sp, seen, fields.iter().map(|f| f.mt.ty)) } ty_enum(did, ref substs) => { let vs = enum_variants(cx, did); let iter = vs.iter() .flat_map(|variant| { variant.args.iter() }) .map(|aty| { aty.subst_spanned(cx, substs, Some(sp)) }); find_nonrepresentable(cx, sp, seen, iter) } ty_unboxed_closure(did, _, ref substs) => { let upvars = unboxed_closure_upvars(cx, did, substs); find_nonrepresentable(cx, sp, seen, upvars.iter().map(|f| f.ty)) } _ => Representable, } } fn same_struct_or_enum_def_id(ty: t, did: DefId) -> bool { match get(ty).sty { ty_struct(ty_did, _) | ty_enum(ty_did, _) => { ty_did == did } _ => false } } fn same_type(a: t, b: t) -> bool { match (&get(a).sty, &get(b).sty) { (&ty_struct(did_a, ref substs_a), &ty_struct(did_b, ref substs_b)) | (&ty_enum(did_a, ref substs_a), &ty_enum(did_b, ref substs_b)) => { if did_a != did_b { return false; } let types_a = substs_a.types.get_slice(subst::TypeSpace); let types_b = substs_b.types.get_slice(subst::TypeSpace); let mut pairs = types_a.iter().zip(types_b.iter()); pairs.all(|(&a, &b)| same_type(a, b)) } _ => { a == b } } } // Does the type `ty` directly (without indirection through a pointer) // contain any types on stack `seen`? fn is_type_structurally_recursive(cx: &ctxt, sp: Span, seen: &mut Vec<t>, ty: t) -> Representability { debug!("is_type_structurally_recursive: {}", ::util::ppaux::ty_to_string(cx, ty)); match get(ty).sty { ty_struct(did, _) | ty_enum(did, _) => { { // Iterate through stack of previously seen types. let mut iter = seen.iter(); // The first item in `seen` is the type we are actually curious about. // We want to return SelfRecursive if this type contains itself. // It is important that we DON'T take generic parameters into account // for this check, so that Bar<T> in this example counts as SelfRecursive: // // struct Foo; // struct Bar<T> { x: Bar<Foo> } match iter.next() { Some(&seen_type) => { if same_struct_or_enum_def_id(seen_type, did) { debug!("SelfRecursive: {} contains {}", ::util::ppaux::ty_to_string(cx, seen_type), ::util::ppaux::ty_to_string(cx, ty)); return SelfRecursive; } } None => {} } // We also need to know whether the first item contains other types that // are structurally recursive. If we don't catch this case, we will recurse // infinitely for some inputs. // // It is important that we DO take generic parameters into account here, // so that code like this is considered SelfRecursive, not ContainsRecursive: // // struct Foo { Option<Option<Foo>> } for &seen_type in iter { if same_type(ty, seen_type) { debug!("ContainsRecursive: {} contains {}", ::util::ppaux::ty_to_string(cx, seen_type), ::util::ppaux::ty_to_string(cx, ty)); return ContainsRecursive; } } } // For structs and enums, track all previously seen types by pushing them // onto the 'seen' stack. seen.push(ty); let out = are_inner_types_recursive(cx, sp, seen, ty); seen.pop(); out } _ => { // No need to push in other cases. are_inner_types_recursive(cx, sp, seen, ty) } } } debug!("is_type_representable: {}", ::util::ppaux::ty_to_string(cx, ty)); // To avoid a stack overflow when checking an enum variant or struct that // contains a different, structurally recursive type, maintain a stack // of seen types and check recursion for each of them (issues #3008, #3779). let mut seen: Vec<t> = Vec::new(); let r = is_type_structurally_recursive(cx, sp, &mut seen, ty); debug!("is_type_representable: {} is {}", ::util::ppaux::ty_to_string(cx, ty), r); r } pub fn type_is_trait(ty: t) -> bool { type_trait_info(ty).is_some() } pub fn type_trait_info(ty: t) -> Option<&'static TyTrait> { match get(ty).sty { ty_uniq(ty) | ty_rptr(_, mt { ty, ..}) | ty_ptr(mt { ty, ..}) => match get(ty).sty { ty_trait(ref t) => Some(&**t), _ => None }, ty_trait(ref t) => Some(&**t), _ => None } } pub fn type_is_integral(ty: t) -> bool { match get(ty).sty { ty_infer(IntVar(_)) | ty_int(_) | ty_uint(_) => true, _ => false } } pub fn type_is_skolemized(ty: t) -> bool { match get(ty).sty { ty_infer(SkolemizedTy(_)) => true, ty_infer(SkolemizedIntTy(_)) => true, _ => false } } pub fn type_is_uint(ty: t) -> bool { match get(ty).sty { ty_infer(IntVar(_)) | ty_uint(ast::TyU) => true, _ => false } } pub fn type_is_char(ty: t) -> bool { match get(ty).sty { ty_char => true, _ => false } } pub fn type_is_bare_fn(ty: t) -> bool { match get(ty).sty { ty_bare_fn(..) => true, _ => false } } pub fn type_is_fp(ty: t) -> bool { match get(ty).sty { ty_infer(FloatVar(_)) | ty_float(_) => true, _ => false } } pub fn type_is_numeric(ty: t) -> bool { return type_is_integral(ty) || type_is_fp(ty); } pub fn type_is_signed(ty: t) -> bool { match get(ty).sty { ty_int(_) => true, _ => false } } pub fn type_is_machine(ty: t) -> bool { match get(ty).sty { ty_int(ast::TyI) | ty_uint(ast::TyU) => false, ty_int(..) | ty_uint(..) | ty_float(..) => true, _ => false } } // Is the type's representation size known at compile time? pub fn type_is_sized(cx: &ctxt, ty: t) -> bool { type_contents(cx, ty).is_sized(cx) } pub fn lltype_is_sized(cx: &ctxt, ty: t) -> bool { match get(ty).sty { ty_open(_) => true, _ => type_contents(cx, ty).is_sized(cx) } } // Return the smallest part of t which is unsized. Fails if t is sized. // 'Smallest' here means component of the static representation of the type; not // the size of an object at runtime. pub fn unsized_part_of_type(cx: &ctxt, ty: t) -> t { match get(ty).sty { ty_str | ty_trait(..) | ty_vec(..) => ty, ty_struct(def_id, ref substs) => { let unsized_fields: Vec<_> = struct_fields(cx, def_id, substs).iter() .map(|f| f.mt.ty).filter(|ty| !type_is_sized(cx, *ty)).collect(); // Exactly one of the fields must be unsized. assert!(unsized_fields.len() == 1) unsized_part_of_type(cx, unsized_fields[0]) } _ => { assert!(type_is_sized(cx, ty), "unsized_part_of_type failed even though ty is unsized"); panic!("called unsized_part_of_type with sized ty"); } } } // Whether a type is enum like, that is an enum type with only nullary // constructors pub fn type_is_c_like_enum(cx: &ctxt, ty: t) -> bool { match get(ty).sty { ty_enum(did, _) => { let variants = enum_variants(cx, did); if variants.len() == 0 { false } else { variants.iter().all(|v| v.args.len() == 0) } } _ => false } } // Returns the type and mutability of *t. // // The parameter `explicit` indicates if this is an *explicit* dereference. // Some types---notably unsafe ptrs---can only be dereferenced explicitly. pub fn deref(t: t, explicit: bool) -> Option<mt> { match get(t).sty { ty_uniq(ty) => { Some(mt { ty: ty, mutbl: ast::MutImmutable, }) }, ty_rptr(_, mt) => Some(mt), ty_ptr(mt) if explicit => Some(mt), _ => None } } pub fn deref_or_dont(t: t) -> t { match get(t).sty { ty_uniq(ty) => ty, ty_rptr(_, mt) | ty_ptr(mt) => mt.ty, _ => t } } pub fn close_type(cx: &ctxt, t: t) -> t { match get(t).sty { ty_open(t) => mk_rptr(cx, ReStatic, mt {ty: t, mutbl:ast::MutImmutable}), _ => cx.sess.bug(format!("Trying to close a non-open type {}", ty_to_string(cx, t)).as_slice()) } } pub fn type_content(t: t) -> t { match get(t).sty { ty_uniq(ty) => ty, ty_rptr(_, mt) |ty_ptr(mt) => mt.ty, _ => t } } // Extract the unsized type in an open type (or just return t if it is not open). pub fn unopen_type(t: t) -> t { match get(t).sty { ty_open(t) => t, _ => t } } // Returns the type of t[i] pub fn index(ty: t) -> Option<t> { match get(ty).sty { ty_vec(t, _) => Some(t), _ => None } } // Returns the type of elements contained within an 'array-like' type. // This is exactly the same as the above, except it supports strings, // which can't actually be indexed. pub fn array_element_ty(t: t) -> Option<t> { match get(t).sty { ty_vec(t, _) => Some(t), ty_str => Some(mk_u8()), _ => None } } pub fn node_id_to_trait_ref(cx: &ctxt, id: ast::NodeId) -> Rc<ty::TraitRef> { match cx.trait_refs.borrow().get(&id) { Some(t) => t.clone(), None => cx.sess.bug( format!("node_id_to_trait_ref: no trait ref for node `{}`", cx.map.node_to_string(id)).as_slice()) } } pub fn try_node_id_to_type(cx: &ctxt, id: ast::NodeId) -> Option<t> { cx.node_types.borrow().find_copy(&id) } pub fn node_id_to_type(cx: &ctxt, id: ast::NodeId) -> t { match try_node_id_to_type(cx, id) { Some(t) => t, None => cx.sess.bug( format!("node_id_to_type: no type for node `{}`", cx.map.node_to_string(id)).as_slice()) } } pub fn node_id_to_type_opt(cx: &ctxt, id: ast::NodeId) -> Option<t> { match cx.node_types.borrow().get(&id) { Some(&t) => Some(t), None => None } } pub fn node_id_item_substs(cx: &ctxt, id: ast::NodeId) -> ItemSubsts { match cx.item_substs.borrow().get(&id) { None => ItemSubsts::empty(), Some(ts) => ts.clone(), } } pub fn fn_is_variadic(fty: t) -> bool { match get(fty).sty { ty_bare_fn(ref f) => f.sig.variadic, ty_closure(ref f) => f.sig.variadic, ref s => { panic!("fn_is_variadic() called on non-fn type: {}", s) } } } pub fn ty_fn_sig(fty: t) -> FnSig { match get(fty).sty { ty_bare_fn(ref f) => f.sig.clone(), ty_closure(ref f) => f.sig.clone(), ref s => { panic!("ty_fn_sig() called on non-fn type: {}", s) } } } /// Returns the ABI of the given function. pub fn ty_fn_abi(fty: t) -> abi::Abi { match get(fty).sty { ty_bare_fn(ref f) => f.abi, ty_closure(ref f) => f.abi, _ => panic!("ty_fn_abi() called on non-fn type"), } } // Type accessors for substructures of types pub fn ty_fn_args(fty: t) -> Vec<t> { match get(fty).sty { ty_bare_fn(ref f) => f.sig.inputs.clone(), ty_closure(ref f) => f.sig.inputs.clone(), ref s => { panic!("ty_fn_args() called on non-fn type: {}", s) } } } pub fn ty_closure_store(fty: t) -> TraitStore { match get(fty).sty { ty_closure(ref f) => f.store, ty_unboxed_closure(..) => { // Close enough for the purposes of all the callers of this // function (which is soon to be deprecated anyhow). UniqTraitStore } ref s => { panic!("ty_closure_store() called on non-closure type: {}", s) } } } pub fn ty_fn_ret(fty: t) -> FnOutput { match get(fty).sty { ty_bare_fn(ref f) => f.sig.output, ty_closure(ref f) => f.sig.output, ref s => { panic!("ty_fn_ret() called on non-fn type: {}", s) } } } pub fn is_fn_ty(fty: t) -> bool { match get(fty).sty { ty_bare_fn(_) => true, ty_closure(_) => true, _ => false } } pub fn ty_region(tcx: &ctxt, span: Span, ty: t) -> Region { match get(ty).sty { ty_rptr(r, _) => r, ref s => { tcx.sess.span_bug( span, format!("ty_region() invoked on an inappropriate ty: {}", s).as_slice()); } } } pub fn free_region_from_def(free_id: ast::NodeId, def: &RegionParameterDef) -> ty::Region { ty::ReFree(ty::FreeRegion { scope_id: free_id, bound_region: ty::BrNamed(def.def_id, def.name) }) } // Returns the type of a pattern as a monotype. Like @expr_ty, this function // doesn't provide type parameter substitutions. pub fn pat_ty(cx: &ctxt, pat: &ast::Pat) -> t { return node_id_to_type(cx, pat.id); } // Returns the type of an expression as a monotype. // // NB (1): This is the PRE-ADJUSTMENT TYPE for the expression. That is, in // some cases, we insert `AutoAdjustment` annotations such as auto-deref or // auto-ref. The type returned by this function does not consider such // adjustments. See `expr_ty_adjusted()` instead. // // NB (2): This type doesn't provide type parameter substitutions; e.g. if you // ask for the type of "id" in "id(3)", it will return "fn(&int) -> int" // instead of "fn(t) -> T with T = int". pub fn expr_ty(cx: &ctxt, expr: &ast::Expr) -> t { return node_id_to_type(cx, expr.id); } pub fn expr_ty_opt(cx: &ctxt, expr: &ast::Expr) -> Option<t> { return node_id_to_type_opt(cx, expr.id); } pub fn expr_ty_adjusted(cx: &ctxt, expr: &ast::Expr) -> t { /*! * * Returns the type of `expr`, considering any `AutoAdjustment` * entry recorded for that expression. * * It would almost certainly be better to store the adjusted ty in with * the `AutoAdjustment`, but I opted not to do this because it would * require serializing and deserializing the type and, although that's not * hard to do, I just hate that code so much I didn't want to touch it * unless it was to fix it properly, which seemed a distraction from the * task at hand! -nmatsakis */ adjust_ty(cx, expr.span, expr.id, expr_ty(cx, expr), cx.adjustments.borrow().get(&expr.id), |method_call| cx.method_map.borrow().get(&method_call).map(|method| method.ty)) } pub fn expr_span(cx: &ctxt, id: NodeId) -> Span { match cx.map.find(id) { Some(ast_map::NodeExpr(e)) => { e.span } Some(f) => { cx.sess.bug(format!("Node id {} is not an expr: {}", id, f).as_slice()); } None => { cx.sess.bug(format!("Node id {} is not present \ in the node map", id).as_slice()); } } } pub fn local_var_name_str(cx: &ctxt, id: NodeId) -> InternedString { match cx.map.find(id) { Some(ast_map::NodeLocal(pat)) => { match pat.node { ast::PatIdent(_, ref path1, _) => { token::get_ident(path1.node) } _ => { cx.sess.bug( format!("Variable id {} maps to {}, not local", id, pat).as_slice()); } } } r => { cx.sess.bug(format!("Variable id {} maps to {}, not local", id, r).as_slice()); } } } pub fn adjust_ty(cx: &ctxt, span: Span, expr_id: ast::NodeId, unadjusted_ty: ty::t, adjustment: Option<&AutoAdjustment>, method_type: |typeck::MethodCall| -> Option<ty::t>) -> ty::t { /*! See `expr_ty_adjusted` */ match get(unadjusted_ty).sty { ty_err => return unadjusted_ty, _ => {} } return match adjustment { Some(adjustment) => { match *adjustment { AdjustAddEnv(store) => { match ty::get(unadjusted_ty).sty { ty::ty_bare_fn(ref b) => { let bounds = ty::ExistentialBounds { region_bound: ReStatic, builtin_bounds: all_builtin_bounds(), }; ty::mk_closure( cx, ty::ClosureTy {fn_style: b.fn_style, onceness: ast::Many, store: store, bounds: bounds, sig: b.sig.clone(), abi: b.abi}) } ref b => { cx.sess.bug( format!("add_env adjustment on non-bare-fn: \ {}", b).as_slice()); } } } AdjustDerefRef(ref adj) => { let mut adjusted_ty = unadjusted_ty; if !ty::type_is_error(adjusted_ty) { for i in range(0, adj.autoderefs) { let method_call = typeck::MethodCall::autoderef(expr_id, i); match method_type(method_call) { Some(method_ty) => { if let ty::FnConverging(result_type) = ty_fn_ret(method_ty) { adjusted_ty = result_type; } } None => {} } match deref(adjusted_ty, true) { Some(mt) => { adjusted_ty = mt.ty; } None => { cx.sess.span_bug( span, format!("the {}th autoderef failed: \ {}", i, ty_to_string(cx, adjusted_ty)) .as_slice()); } } } } match adj.autoref { None => adjusted_ty, Some(ref autoref) => adjust_for_autoref(cx, span, adjusted_ty, autoref) } } } } None => unadjusted_ty }; fn adjust_for_autoref(cx: &ctxt, span: Span, ty: ty::t, autoref: &AutoRef) -> ty::t{ match *autoref { AutoPtr(r, m, ref a) => { let adjusted_ty = match a { &Some(box ref a) => adjust_for_autoref(cx, span, ty, a), &None => ty }; mk_rptr(cx, r, mt { ty: adjusted_ty, mutbl: m }) } AutoUnsafe(m, ref a) => { let adjusted_ty = match a { &Some(box ref a) => adjust_for_autoref(cx, span, ty, a), &None => ty }; mk_ptr(cx, mt {ty: adjusted_ty, mutbl: m}) } AutoUnsize(ref k) => unsize_ty(cx, ty, k, span), AutoUnsizeUniq(ref k) => ty::mk_uniq(cx, unsize_ty(cx, ty, k, span)), } } } // Take a sized type and a sizing adjustment and produce an unsized version of // the type. pub fn unsize_ty(cx: &ctxt, ty: ty::t, kind: &UnsizeKind, span: Span) -> ty::t { match kind { &UnsizeLength(len) => match get(ty).sty { ty_vec(t, Some(n)) => { assert!(len == n); mk_vec(cx, t, None) } _ => cx.sess.span_bug(span, format!("UnsizeLength with bad sty: {}", ty_to_string(cx, ty)).as_slice()) }, &UnsizeStruct(box ref k, tp_index) => match get(ty).sty { ty_struct(did, ref substs) => { let ty_substs = substs.types.get_slice(subst::TypeSpace); let new_ty = unsize_ty(cx, ty_substs[tp_index], k, span); let mut unsized_substs = substs.clone(); unsized_substs.types.get_mut_slice(subst::TypeSpace)[tp_index] = new_ty; mk_struct(cx, did, unsized_substs) } _ => cx.sess.span_bug(span, format!("UnsizeStruct with bad sty: {}", ty_to_string(cx, ty)).as_slice()) }, &UnsizeVtable(TyTrait { ref principal, bounds }, _) => { mk_trait(cx, (*principal).clone(), bounds) } } } pub fn resolve_expr(tcx: &ctxt, expr: &ast::Expr) -> def::Def { match tcx.def_map.borrow().get(&expr.id) { Some(&def) => def, None => { tcx.sess.span_bug(expr.span, format!( "no def-map entry for expr {}", expr.id).as_slice()); } } } pub fn expr_is_lval(tcx: &ctxt, e: &ast::Expr) -> bool { match expr_kind(tcx, e) { LvalueExpr => true, RvalueDpsExpr | RvalueDatumExpr | RvalueStmtExpr => false } } /// We categorize expressions into three kinds. The distinction between /// lvalue/rvalue is fundamental to the language. The distinction between the /// two kinds of rvalues is an artifact of trans which reflects how we will /// generate code for that kind of expression. See trans/expr.rs for more /// information. pub enum ExprKind { LvalueExpr, RvalueDpsExpr, RvalueDatumExpr, RvalueStmtExpr } pub fn expr_kind(tcx: &ctxt, expr: &ast::Expr) -> ExprKind { if tcx.method_map.borrow().contains_key(&typeck::MethodCall::expr(expr.id)) { // Overloaded operations are generally calls, and hence they are // generated via DPS, but there are a few exceptions: return match expr.node { // `a += b` has a unit result. ast::ExprAssignOp(..) => RvalueStmtExpr, // the deref method invoked for `*a` always yields an `&T` ast::ExprUnary(ast::UnDeref, _) => LvalueExpr, // the index method invoked for `a[i]` always yields an `&T` ast::ExprIndex(..) => LvalueExpr, // the slice method invoked for `a[..]` always yields an `&T` ast::ExprSlice(..) => LvalueExpr, // `for` loops are statements ast::ExprForLoop(..) => RvalueStmtExpr, // in the general case, result could be any type, use DPS _ => RvalueDpsExpr }; } match expr.node { ast::ExprPath(..) => { match resolve_expr(tcx, expr) { def::DefVariant(tid, vid, _) => { let variant_info = enum_variant_with_id(tcx, tid, vid); if variant_info.args.len() > 0u { // N-ary variant. RvalueDatumExpr } else { // Nullary variant. RvalueDpsExpr } } def::DefStruct(_) => { match get(expr_ty(tcx, expr)).sty { ty_bare_fn(..) => RvalueDatumExpr, _ => RvalueDpsExpr } } // Special case: A unit like struct's constructor must be called without () at the // end (like `UnitStruct`) which means this is an ExprPath to a DefFn. But in case // of unit structs this is should not be interpreted as function pointer but as // call to the constructor. def::DefFn(_, true) => RvalueDpsExpr, // Fn pointers are just scalar values. def::DefFn(..) | def::DefStaticMethod(..) | def::DefMethod(..) => RvalueDatumExpr, // Note: there is actually a good case to be made that // DefArg's, particularly those of immediate type, ought to // considered rvalues. def::DefStatic(..) | def::DefUpvar(..) | def::DefLocal(..) => LvalueExpr, def::DefConst(..) => RvalueDatumExpr, def => { tcx.sess.span_bug( expr.span, format!("uncategorized def for expr {}: {}", expr.id, def).as_slice()); } } } ast::ExprUnary(ast::UnDeref, _) | ast::ExprField(..) | ast::ExprTupField(..) | ast::ExprIndex(..) | ast::ExprSlice(..) => { LvalueExpr } ast::ExprCall(..) | ast::ExprMethodCall(..) | ast::ExprStruct(..) | ast::ExprTup(..) | ast::ExprIf(..) | ast::ExprMatch(..) | ast::ExprFnBlock(..) | ast::ExprProc(..) | ast::ExprUnboxedFn(..) | ast::ExprBlock(..) | ast::ExprRepeat(..) | ast::ExprVec(..) => { RvalueDpsExpr } ast::ExprIfLet(..) => { tcx.sess.span_bug(expr.span, "non-desugared ExprIfLet"); } ast::ExprWhileLet(..) => { tcx.sess.span_bug(expr.span, "non-desugared ExprWhileLet"); } ast::ExprLit(ref lit) if lit_is_str(&**lit) => { RvalueDpsExpr } ast::ExprCast(..) => { match tcx.node_types.borrow().get(&expr.id) { Some(&t) => { if type_is_trait(t) { RvalueDpsExpr } else { RvalueDatumExpr } } None => { // Technically, it should not happen that the expr is not // present within the table. However, it DOES happen // during type check, because the final types from the // expressions are not yet recorded in the tcx. At that // time, though, we are only interested in knowing lvalue // vs rvalue. It would be better to base this decision on // the AST type in cast node---but (at the time of this // writing) it's not easy to distinguish casts to traits // from other casts based on the AST. This should be // easier in the future, when casts to traits // would like @Foo, Box<Foo>, or &Foo. RvalueDatumExpr } } } ast::ExprBreak(..) | ast::ExprAgain(..) | ast::ExprRet(..) | ast::ExprWhile(..) | ast::ExprLoop(..) | ast::ExprAssign(..) | ast::ExprInlineAsm(..) | ast::ExprAssignOp(..) | ast::ExprForLoop(..) => { RvalueStmtExpr } ast::ExprLit(_) | // Note: LitStr is carved out above ast::ExprUnary(..) | ast::ExprAddrOf(..) | ast::ExprBinary(..) => { RvalueDatumExpr } ast::ExprBox(ref place, _) => { // Special case `Box<T>` for now: let definition = match tcx.def_map.borrow().get(&place.id) { Some(&def) => def, None => panic!("no def for place"), }; let def_id = definition.def_id(); if tcx.lang_items.exchange_heap() == Some(def_id) { RvalueDatumExpr } else { RvalueDpsExpr } } ast::ExprParen(ref e) => expr_kind(tcx, &**e), ast::ExprMac(..) => { tcx.sess.span_bug( expr.span, "macro expression remains after expansion"); } } } pub fn stmt_node_id(s: &ast::Stmt) -> ast::NodeId { match s.node { ast::StmtDecl(_, id) | StmtExpr(_, id) | StmtSemi(_, id) => { return id; } ast::StmtMac(..) => panic!("unexpanded macro in trans") } } pub fn field_idx_strict(tcx: &ctxt, name: ast::Name, fields: &[field]) -> uint { let mut i = 0u; for f in fields.iter() { if f.name == name { return i; } i += 1u; } tcx.sess.bug(format!( "no field named `{}` found in the list of fields `{}`", token::get_name(name), fields.iter() .map(|f| token::get_name(f.name).get().to_string()) .collect::<Vec<String>>()).as_slice()); } pub fn impl_or_trait_item_idx(id: ast::Name, trait_items: &[ImplOrTraitItem]) -> Option<uint> { trait_items.iter().position(|m| m.name() == id) } pub fn ty_sort_string(cx: &ctxt, t: t) -> String { match get(t).sty { ty_nil | ty_bool | ty_char | ty_int(_) | ty_uint(_) | ty_float(_) | ty_str => { ::util::ppaux::ty_to_string(cx, t) } ty_enum(id, _) => format!("enum {}", item_path_str(cx, id)), ty_uniq(_) => "box".to_string(), ty_vec(_, Some(n)) => format!("array of {} elements", n), ty_vec(_, None) => "slice".to_string(), ty_ptr(_) => "*-ptr".to_string(), ty_rptr(_, _) => "&-ptr".to_string(), ty_bare_fn(_) => "extern fn".to_string(), ty_closure(_) => "fn".to_string(), ty_trait(ref inner) => { format!("trait {}", item_path_str(cx, inner.principal.def_id)) } ty_struct(id, _) => { format!("struct {}", item_path_str(cx, id)) } ty_unboxed_closure(..) => "closure".to_string(), ty_tup(_) => "tuple".to_string(), ty_infer(TyVar(_)) => "inferred type".to_string(), ty_infer(IntVar(_)) => "integral variable".to_string(), ty_infer(FloatVar(_)) => "floating-point variable".to_string(), ty_infer(SkolemizedTy(_)) => "skolemized type".to_string(), ty_infer(SkolemizedIntTy(_)) => "skolemized integral type".to_string(), ty_param(ref p) => { if p.space == subst::SelfSpace { "Self".to_string() } else { "type parameter".to_string() } } ty_err => "type error".to_string(), ty_open(_) => "opened DST".to_string(), } } pub fn type_err_to_str(cx: &ctxt, err: &type_err) -> String { /*! * * Explains the source of a type err in a short, * human readable way. This is meant to be placed in * parentheses after some larger message. You should * also invoke `note_and_explain_type_err()` afterwards * to present additional details, particularly when * it comes to lifetime-related errors. */ fn tstore_to_closure(s: &TraitStore) -> String { match s { &UniqTraitStore => "proc".to_string(), &RegionTraitStore(..) => "closure".to_string() } } match *err { terr_cyclic_ty => "cyclic type of infinite size".to_string(), terr_mismatch => "types differ".to_string(), terr_fn_style_mismatch(values) => { format!("expected {} fn, found {} fn", values.expected.to_string(), values.found.to_string()) } terr_abi_mismatch(values) => { format!("expected {} fn, found {} fn", values.expected.to_string(), values.found.to_string()) } terr_onceness_mismatch(values) => { format!("expected {} fn, found {} fn", values.expected.to_string(), values.found.to_string()) } terr_sigil_mismatch(values) => { format!("expected {}, found {}", tstore_to_closure(&values.expected), tstore_to_closure(&values.found)) } terr_mutability => "values differ in mutability".to_string(), terr_box_mutability => { "boxed values differ in mutability".to_string() } terr_vec_mutability => "vectors differ in mutability".to_string(), terr_ptr_mutability => "pointers differ in mutability".to_string(), terr_ref_mutability => "references differ in mutability".to_string(), terr_ty_param_size(values) => { format!("expected a type with {} type params, \ found one with {} type params", values.expected, values.found) } terr_fixed_array_size(values) => { format!("expected an array with a fixed size of {} elements, \ found one with {} elements", values.expected, values.found) } terr_tuple_size(values) => { format!("expected a tuple with {} elements, \ found one with {} elements", values.expected, values.found) } terr_arg_count => { "incorrect number of function parameters".to_string() } terr_regions_does_not_outlive(..) => { "lifetime mismatch".to_string() } terr_regions_not_same(..) => { "lifetimes are not the same".to_string() } terr_regions_no_overlap(..) => { "lifetimes do not intersect".to_string() } terr_regions_insufficiently_polymorphic(br, _) => { format!("expected bound lifetime parameter {}, \ found concrete lifetime", bound_region_ptr_to_string(cx, br)) } terr_regions_overly_polymorphic(br, _) => { format!("expected concrete lifetime, \ found bound lifetime parameter {}", bound_region_ptr_to_string(cx, br)) } terr_trait_stores_differ(_, ref values) => { format!("trait storage differs: expected `{}`, found `{}`", trait_store_to_string(cx, (*values).expected), trait_store_to_string(cx, (*values).found)) } terr_sorts(values) => { // A naive approach to making sure that we're not reporting silly errors such as: // (expected closure, found closure). let expected_str = ty_sort_string(cx, values.expected); let found_str = ty_sort_string(cx, values.found); if expected_str == found_str { format!("expected {}, found a different {}", expected_str, found_str) } else { format!("expected {}, found {}", expected_str, found_str) } } terr_traits(values) => { format!("expected trait `{}`, found trait `{}`", item_path_str(cx, values.expected), item_path_str(cx, values.found)) } terr_builtin_bounds(values) => { if values.expected.is_empty() { format!("expected no bounds, found `{}`", values.found.user_string(cx)) } else if values.found.is_empty() { format!("expected bounds `{}`, found no bounds", values.expected.user_string(cx)) } else { format!("expected bounds `{}`, found bounds `{}`", values.expected.user_string(cx), values.found.user_string(cx)) } } terr_integer_as_char => { "expected an integral type, found `char`".to_string() } terr_int_mismatch(ref values) => { format!("expected `{}`, found `{}`", values.expected.to_string(), values.found.to_string()) } terr_float_mismatch(ref values) => { format!("expected `{}`, found `{}`", values.expected.to_string(), values.found.to_string()) } terr_variadic_mismatch(ref values) => { format!("expected {} fn, found {} function", if values.expected { "variadic" } else { "non-variadic" }, if values.found { "variadic" } else { "non-variadic" }) } terr_convergence_mismatch(ref values) => { format!("expected {} fn, found {} function", if values.expected { "converging" } else { "diverging" }, if values.found { "converging" } else { "diverging" }) } } } pub fn note_and_explain_type_err(cx: &ctxt, err: &type_err) { match *err { terr_regions_does_not_outlive(subregion, superregion) => { note_and_explain_region(cx, "", subregion, "..."); note_and_explain_region(cx, "...does not necessarily outlive ", superregion, ""); } terr_regions_not_same(region1, region2) => { note_and_explain_region(cx, "", region1, "..."); note_and_explain_region(cx, "...is not the same lifetime as ", region2, ""); } terr_regions_no_overlap(region1, region2) => { note_and_explain_region(cx, "", region1, "..."); note_and_explain_region(cx, "...does not overlap ", region2, ""); } terr_regions_insufficiently_polymorphic(_, conc_region) => { note_and_explain_region(cx, "concrete lifetime that was found is ", conc_region, ""); } terr_regions_overly_polymorphic(_, conc_region) => { note_and_explain_region(cx, "expected concrete lifetime is ", conc_region, ""); } _ => {} } } pub fn provided_source(cx: &ctxt, id: ast::DefId) -> Option<ast::DefId> { cx.provided_method_sources.borrow().get(&id).map(|x| *x) } pub fn provided_trait_methods(cx: &ctxt, id: ast::DefId) -> Vec<Rc<Method>> { if is_local(id) { match cx.map.find(id.node) { Some(ast_map::NodeItem(item)) => { match item.node { ItemTrait(_, _, _, ref ms) => { let (_, p) = ast_util::split_trait_methods(ms.as_slice()); p.iter() .map(|m| { match impl_or_trait_item( cx, ast_util::local_def(m.id)) { MethodTraitItem(m) => m, TypeTraitItem(_) => { cx.sess.bug("provided_trait_methods(): \ split_trait_methods() put \ associated types in the \ provided method bucket?!") } } }).collect() } _ => { cx.sess.bug(format!("provided_trait_methods: `{}` is \ not a trait", id).as_slice()) } } } _ => { cx.sess.bug(format!("provided_trait_methods: `{}` is not a \ trait", id).as_slice()) } } } else { csearch::get_provided_trait_methods(cx, id) } } fn lookup_locally_or_in_crate_store<V:Clone>( descr: &str, def_id: ast::DefId, map: &mut DefIdMap<V>, load_external: || -> V) -> V { /*! * Helper for looking things up in the various maps * that are populated during typeck::collect (e.g., * `cx.impl_or_trait_items`, `cx.tcache`, etc). All of these share * the pattern that if the id is local, it should have * been loaded into the map by the `typeck::collect` phase. * If the def-id is external, then we have to go consult * the crate loading code (and cache the result for the future). */ match map.find_copy(&def_id) { Some(v) => { return v; } None => { } } if def_id.krate == ast::LOCAL_CRATE { panic!("No def'n found for {} in tcx.{}", def_id, descr); } let v = load_external(); map.insert(def_id, v.clone()); v } pub fn trait_item(cx: &ctxt, trait_did: ast::DefId, idx: uint) -> ImplOrTraitItem { let method_def_id = (*ty::trait_item_def_ids(cx, trait_did))[idx].def_id(); impl_or_trait_item(cx, method_def_id) } pub fn trait_items(cx: &ctxt, trait_did: ast::DefId) -> Rc<Vec<ImplOrTraitItem>> { let mut trait_items = cx.trait_items_cache.borrow_mut(); match trait_items.find_copy(&trait_did) { Some(trait_items) => trait_items, None => { let def_ids = ty::trait_item_def_ids(cx, trait_did); let items: Rc<Vec<ImplOrTraitItem>> = Rc::new(def_ids.iter() .map(|d| impl_or_trait_item(cx, d.def_id())) .collect()); trait_items.insert(trait_did, items.clone()); items } } } pub fn impl_or_trait_item(cx: &ctxt, id: ast::DefId) -> ImplOrTraitItem { lookup_locally_or_in_crate_store("impl_or_trait_items", id, &mut *cx.impl_or_trait_items .borrow_mut(), || { csearch::get_impl_or_trait_item(cx, id) }) } /// Returns true if the given ID refers to an associated type and false if it /// refers to anything else. pub fn is_associated_type(cx: &ctxt, id: ast::DefId) -> bool { memoized(&cx.associated_types, id, |id: ast::DefId| { if id.krate == ast::LOCAL_CRATE { match cx.impl_or_trait_items.borrow().get(&id) { Some(ref item) => { match **item { TypeTraitItem(_) => true, MethodTraitItem(_) => false, } } None => false, } } else { csearch::is_associated_type(&cx.sess.cstore, id) } }) } /// Returns the parameter index that the given associated type corresponds to. pub fn associated_type_parameter_index(cx: &ctxt, trait_def: &TraitDef, associated_type_id: ast::DefId) -> uint { for type_parameter_def in trait_def.generics.types.iter() { if type_parameter_def.def_id == associated_type_id { return type_parameter_def.index<|fim▁hole|> } cx.sess.bug("couldn't find associated type parameter index") } #[deriving(PartialEq, Eq)] pub struct AssociatedTypeInfo { pub def_id: ast::DefId, pub index: uint, pub name: ast::Name, } impl PartialOrd for AssociatedTypeInfo { fn partial_cmp(&self, other: &AssociatedTypeInfo) -> Option<Ordering> { Some(self.index.cmp(&other.index)) } } impl Ord for AssociatedTypeInfo { fn cmp(&self, other: &AssociatedTypeInfo) -> Ordering { self.index.cmp(&other.index) } } pub fn trait_item_def_ids(cx: &ctxt, id: ast::DefId) -> Rc<Vec<ImplOrTraitItemId>> { lookup_locally_or_in_crate_store("trait_item_def_ids", id, &mut *cx.trait_item_def_ids.borrow_mut(), || { Rc::new(csearch::get_trait_item_def_ids(&cx.sess.cstore, id)) }) } pub fn impl_trait_ref(cx: &ctxt, id: ast::DefId) -> Option<Rc<TraitRef>> { memoized(&cx.impl_trait_cache, id, |id: ast::DefId| { if id.krate == ast::LOCAL_CRATE { debug!("(impl_trait_ref) searching for trait impl {}", id); match cx.map.find(id.node) { Some(ast_map::NodeItem(item)) => { match item.node { ast::ItemImpl(_, ref opt_trait, _, _) => { match opt_trait { &Some(ref t) => { Some(ty::node_id_to_trait_ref(cx, t.ref_id)) } &None => None } } _ => None } } _ => None } } else { csearch::get_impl_trait(cx, id) } }) } pub fn trait_ref_to_def_id(tcx: &ctxt, tr: &ast::TraitRef) -> ast::DefId { let def = *tcx.def_map.borrow() .get(&tr.ref_id) .expect("no def-map entry for trait"); def.def_id() } pub fn try_add_builtin_trait( tcx: &ctxt, trait_def_id: ast::DefId, builtin_bounds: &mut EnumSet<BuiltinBound>) -> bool { //! Checks whether `trait_ref` refers to one of the builtin //! traits, like `Send`, and adds the corresponding //! bound to the set `builtin_bounds` if so. Returns true if `trait_ref` //! is a builtin trait. match tcx.lang_items.to_builtin_kind(trait_def_id) { Some(bound) => { builtin_bounds.insert(bound); true } None => false } } pub fn ty_to_def_id(ty: t) -> Option<ast::DefId> { match get(ty).sty { ty_trait(ref tt) => Some(tt.principal.def_id), ty_struct(id, _) | ty_enum(id, _) | ty_unboxed_closure(id, _, _) => Some(id), _ => None } } // Enum information #[deriving(Clone)] pub struct VariantInfo { pub args: Vec<t>, pub arg_names: Option<Vec<ast::Ident> >, pub ctor_ty: Option<t>, pub name: ast::Name, pub id: ast::DefId, pub disr_val: Disr, pub vis: Visibility } impl VariantInfo { /// Creates a new VariantInfo from the corresponding ast representation. /// /// Does not do any caching of the value in the type context. pub fn from_ast_variant(cx: &ctxt, ast_variant: &ast::Variant, discriminant: Disr) -> VariantInfo { let ctor_ty = node_id_to_type(cx, ast_variant.node.id); match ast_variant.node.kind { ast::TupleVariantKind(ref args) => { let arg_tys = if args.len() > 0 { ty_fn_args(ctor_ty).iter().map(|a| *a).collect() } else { Vec::new() }; return VariantInfo { args: arg_tys, arg_names: None, ctor_ty: Some(ctor_ty), name: ast_variant.node.name.name, id: ast_util::local_def(ast_variant.node.id), disr_val: discriminant, vis: ast_variant.node.vis }; }, ast::StructVariantKind(ref struct_def) => { let fields: &[StructField] = struct_def.fields.as_slice(); assert!(fields.len() > 0); let arg_tys = struct_def.fields.iter() .map(|field| node_id_to_type(cx, field.node.id)).collect(); let arg_names = fields.iter().map(|field| { match field.node.kind { NamedField(ident, _) => ident, UnnamedField(..) => cx.sess.bug( "enum_variants: all fields in struct must have a name") } }).collect(); return VariantInfo { args: arg_tys, arg_names: Some(arg_names), ctor_ty: None, name: ast_variant.node.name.name, id: ast_util::local_def(ast_variant.node.id), disr_val: discriminant, vis: ast_variant.node.vis }; } } } } pub fn substd_enum_variants(cx: &ctxt, id: ast::DefId, substs: &Substs) -> Vec<Rc<VariantInfo>> { enum_variants(cx, id).iter().map(|variant_info| { let substd_args = variant_info.args.iter() .map(|aty| aty.subst(cx, substs)).collect::<Vec<_>>(); let substd_ctor_ty = variant_info.ctor_ty.subst(cx, substs); Rc::new(VariantInfo { args: substd_args, ctor_ty: substd_ctor_ty, ..(**variant_info).clone() }) }).collect() } pub fn item_path_str(cx: &ctxt, id: ast::DefId) -> String { with_path(cx, id, |path| ast_map::path_to_string(path)).to_string() } pub enum DtorKind { NoDtor, TraitDtor(DefId, bool) } impl DtorKind { pub fn is_present(&self) -> bool { match *self { TraitDtor(..) => true, _ => false } } pub fn has_drop_flag(&self) -> bool { match self { &NoDtor => false, &TraitDtor(_, flag) => flag } } } /* If struct_id names a struct with a dtor, return Some(the dtor's id). Otherwise return none. */ pub fn ty_dtor(cx: &ctxt, struct_id: DefId) -> DtorKind { match cx.destructor_for_type.borrow().get(&struct_id) { Some(&method_def_id) => { let flag = !has_attr(cx, struct_id, "unsafe_no_drop_flag"); TraitDtor(method_def_id, flag) } None => NoDtor, } } pub fn has_dtor(cx: &ctxt, struct_id: DefId) -> bool { cx.destructor_for_type.borrow().contains_key(&struct_id) } pub fn with_path<T>(cx: &ctxt, id: ast::DefId, f: |ast_map::PathElems| -> T) -> T { if id.krate == ast::LOCAL_CRATE { cx.map.with_path(id.node, f) } else { f(ast_map::Values(csearch::get_item_path(cx, id).iter()).chain(None)) } } pub fn enum_is_univariant(cx: &ctxt, id: ast::DefId) -> bool { enum_variants(cx, id).len() == 1 } pub fn type_is_empty(cx: &ctxt, t: t) -> bool { match ty::get(t).sty { ty_enum(did, _) => (*enum_variants(cx, did)).is_empty(), _ => false } } pub fn enum_variants(cx: &ctxt, id: ast::DefId) -> Rc<Vec<Rc<VariantInfo>>> { memoized(&cx.enum_var_cache, id, |id: ast::DefId| { if ast::LOCAL_CRATE != id.krate { Rc::new(csearch::get_enum_variants(cx, id)) } else { /* Although both this code and check_enum_variants in typeck/check call eval_const_expr, it should never get called twice for the same expr, since check_enum_variants also updates the enum_var_cache */ match cx.map.get(id.node) { ast_map::NodeItem(ref item) => { match item.node { ast::ItemEnum(ref enum_definition, _) => { let mut last_discriminant: Option<Disr> = None; Rc::new(enum_definition.variants.iter().map(|variant| { let mut discriminant = match last_discriminant { Some(val) => val + 1, None => INITIAL_DISCRIMINANT_VALUE }; match variant.node.disr_expr { Some(ref e) => match const_eval::eval_const_expr_partial(cx, &**e) { Ok(const_eval::const_int(val)) => { discriminant = val as Disr } Ok(const_eval::const_uint(val)) => { discriminant = val as Disr } Ok(_) => { cx.sess .span_err(e.span, "expected signed integer constant"); } Err(ref err) => { cx.sess .span_err(e.span, format!("expected constant: {}", *err).as_slice()); } }, None => {} }; last_discriminant = Some(discriminant); Rc::new(VariantInfo::from_ast_variant(cx, &**variant, discriminant)) }).collect()) } _ => { cx.sess.bug("enum_variants: id not bound to an enum") } } } _ => cx.sess.bug("enum_variants: id not bound to an enum") } } }) } // Returns information about the enum variant with the given ID: pub fn enum_variant_with_id(cx: &ctxt, enum_id: ast::DefId, variant_id: ast::DefId) -> Rc<VariantInfo> { enum_variants(cx, enum_id).iter() .find(|variant| variant.id == variant_id) .expect("enum_variant_with_id(): no variant exists with that ID") .clone() } // If the given item is in an external crate, looks up its type and adds it to // the type cache. Returns the type parameters and type. pub fn lookup_item_type(cx: &ctxt, did: ast::DefId) -> Polytype { lookup_locally_or_in_crate_store( "tcache", did, &mut *cx.tcache.borrow_mut(), || csearch::get_type(cx, did)) } /// Given the did of a trait, returns its canonical trait ref. pub fn lookup_trait_def(cx: &ctxt, did: DefId) -> Rc<ty::TraitDef> { memoized(&cx.trait_defs, did, |did: DefId| { assert!(did.krate != ast::LOCAL_CRATE); Rc::new(csearch::get_trait_def(cx, did)) }) } /// Given a reference to a trait, returns the bounds declared on the /// trait, with appropriate substitutions applied. pub fn bounds_for_trait_ref(tcx: &ctxt, trait_ref: &TraitRef) -> ty::ParamBounds { let trait_def = lookup_trait_def(tcx, trait_ref.def_id); debug!("bounds_for_trait_ref(trait_def={}, trait_ref={})", trait_def.repr(tcx), trait_ref.repr(tcx)); trait_def.bounds.subst(tcx, &trait_ref.substs) } /// Iterate over attributes of a definition. // (This should really be an iterator, but that would require csearch and // decoder to use iterators instead of higher-order functions.) pub fn each_attr(tcx: &ctxt, did: DefId, f: |&ast::Attribute| -> bool) -> bool { if is_local(did) { let item = tcx.map.expect_item(did.node); item.attrs.iter().all(|attr| f(attr)) } else { info!("getting foreign attrs"); let mut cont = true; csearch::get_item_attrs(&tcx.sess.cstore, did, |attrs| { if cont { cont = attrs.iter().all(|attr| f(attr)); } }); info!("done"); cont } } /// Determine whether an item is annotated with an attribute pub fn has_attr(tcx: &ctxt, did: DefId, attr: &str) -> bool { let mut found = false; each_attr(tcx, did, |item| { if item.check_name(attr) { found = true; false } else { true } }); found } /// Determine whether an item is annotated with `#[repr(packed)]` pub fn lookup_packed(tcx: &ctxt, did: DefId) -> bool { lookup_repr_hints(tcx, did).contains(&attr::ReprPacked) } /// Determine whether an item is annotated with `#[simd]` pub fn lookup_simd(tcx: &ctxt, did: DefId) -> bool { has_attr(tcx, did, "simd") } /// Obtain the representation annotation for a struct definition. pub fn lookup_repr_hints(tcx: &ctxt, did: DefId) -> Rc<Vec<attr::ReprAttr>> { memoized(&tcx.repr_hint_cache, did, |did: DefId| { Rc::new(if did.krate == LOCAL_CRATE { let mut acc = Vec::new(); ty::each_attr(tcx, did, |meta| { acc.extend(attr::find_repr_attrs(tcx.sess.diagnostic(), meta).into_iter()); true }); acc } else { csearch::get_repr_attrs(&tcx.sess.cstore, did) }) }) } // Look up a field ID, whether or not it's local // Takes a list of type substs in case the struct is generic pub fn lookup_field_type(tcx: &ctxt, struct_id: DefId, id: DefId, substs: &Substs) -> ty::t { let t = if id.krate == ast::LOCAL_CRATE { node_id_to_type(tcx, id.node) } else { let mut tcache = tcx.tcache.borrow_mut(); let pty = match tcache.entry(id) { Occupied(entry) => entry.into_mut(), Vacant(entry) => entry.set(csearch::get_field_type(tcx, struct_id, id)), }; pty.ty }; t.subst(tcx, substs) } // Look up the list of field names and IDs for a given struct. // Fails if the id is not bound to a struct. pub fn lookup_struct_fields(cx: &ctxt, did: ast::DefId) -> Vec<field_ty> { if did.krate == ast::LOCAL_CRATE { let struct_fields = cx.struct_fields.borrow(); match struct_fields.get(&did) { Some(fields) => (**fields).clone(), _ => { cx.sess.bug( format!("ID not mapped to struct fields: {}", cx.map.node_to_string(did.node)).as_slice()); } } } else { csearch::get_struct_fields(&cx.sess.cstore, did) } } pub fn is_tuple_struct(cx: &ctxt, did: ast::DefId) -> bool { let fields = lookup_struct_fields(cx, did); !fields.is_empty() && fields.iter().all(|f| f.name == token::special_names::unnamed_field) } // Returns a list of fields corresponding to the struct's items. trans uses // this. Takes a list of substs with which to instantiate field types. pub fn struct_fields(cx: &ctxt, did: ast::DefId, substs: &Substs) -> Vec<field> { lookup_struct_fields(cx, did).iter().map(|f| { field { name: f.name, mt: mt { ty: lookup_field_type(cx, did, f.id, substs), mutbl: MutImmutable } } }).collect() } // Returns a list of fields corresponding to the tuple's items. trans uses // this. pub fn tup_fields(v: &[t]) -> Vec<field> { v.iter().enumerate().map(|(i, &f)| { field { name: token::intern(i.to_string().as_slice()), mt: mt { ty: f, mutbl: MutImmutable } } }).collect() } pub struct UnboxedClosureUpvar { pub def: def::Def, pub span: Span, pub ty: t, } // Returns a list of `UnboxedClosureUpvar`s for each upvar. pub fn unboxed_closure_upvars(tcx: &ctxt, closure_id: ast::DefId, substs: &Substs) -> Vec<UnboxedClosureUpvar> { // Presently an unboxed closure type cannot "escape" out of a // function, so we will only encounter ones that originated in the // local crate or were inlined into it along with some function. // This may change if abstract return types of some sort are // implemented. assert!(closure_id.krate == ast::LOCAL_CRATE); let capture_mode = tcx.capture_modes.borrow().get_copy(&closure_id.node); match tcx.freevars.borrow().get(&closure_id.node) { None => vec![], Some(ref freevars) => { freevars.iter().map(|freevar| { let freevar_def_id = freevar.def.def_id(); let freevar_ty = node_id_to_type(tcx, freevar_def_id.node); let mut freevar_ty = freevar_ty.subst(tcx, substs); if capture_mode == ast::CaptureByRef { let borrow = tcx.upvar_borrow_map.borrow().get_copy(&ty::UpvarId { var_id: freevar_def_id.node, closure_expr_id: closure_id.node }); freevar_ty = mk_rptr(tcx, borrow.region, ty::mt { ty: freevar_ty, mutbl: borrow.kind.to_mutbl_lossy() }); } UnboxedClosureUpvar { def: freevar.def, span: freevar.span, ty: freevar_ty } }).collect() } } } pub fn is_binopable(cx: &ctxt, ty: t, op: ast::BinOp) -> bool { #![allow(non_upper_case_globals)] static tycat_other: int = 0; static tycat_bool: int = 1; static tycat_char: int = 2; static tycat_int: int = 3; static tycat_float: int = 4; static tycat_raw_ptr: int = 6; static opcat_add: int = 0; static opcat_sub: int = 1; static opcat_mult: int = 2; static opcat_shift: int = 3; static opcat_rel: int = 4; static opcat_eq: int = 5; static opcat_bit: int = 6; static opcat_logic: int = 7; static opcat_mod: int = 8; fn opcat(op: ast::BinOp) -> int { match op { ast::BiAdd => opcat_add, ast::BiSub => opcat_sub, ast::BiMul => opcat_mult, ast::BiDiv => opcat_mult, ast::BiRem => opcat_mod, ast::BiAnd => opcat_logic, ast::BiOr => opcat_logic, ast::BiBitXor => opcat_bit, ast::BiBitAnd => opcat_bit, ast::BiBitOr => opcat_bit, ast::BiShl => opcat_shift, ast::BiShr => opcat_shift, ast::BiEq => opcat_eq, ast::BiNe => opcat_eq, ast::BiLt => opcat_rel, ast::BiLe => opcat_rel, ast::BiGe => opcat_rel, ast::BiGt => opcat_rel } } fn tycat(cx: &ctxt, ty: t) -> int { if type_is_simd(cx, ty) { return tycat(cx, simd_type(cx, ty)) } match get(ty).sty { ty_char => tycat_char, ty_bool => tycat_bool, ty_int(_) | ty_uint(_) | ty_infer(IntVar(_)) => tycat_int, ty_float(_) | ty_infer(FloatVar(_)) => tycat_float, ty_ptr(_) => tycat_raw_ptr, _ => tycat_other } } static t: bool = true; static f: bool = false; let tbl = [ // +, -, *, shift, rel, ==, bit, logic, mod /*other*/ [f, f, f, f, f, f, f, f, f], /*bool*/ [f, f, f, f, t, t, t, t, f], /*char*/ [f, f, f, f, t, t, f, f, f], /*int*/ [t, t, t, t, t, t, t, f, t], /*float*/ [t, t, t, f, t, t, f, f, f], /*bot*/ [t, t, t, t, t, t, t, t, t], /*raw ptr*/ [f, f, f, f, t, t, f, f, f]]; return tbl[tycat(cx, ty) as uint ][opcat(op) as uint]; } /// Returns an equivalent type with all the typedefs and self regions removed. pub fn normalize_ty(cx: &ctxt, t: t) -> t { let u = TypeNormalizer(cx).fold_ty(t); return u; struct TypeNormalizer<'a, 'tcx: 'a>(&'a ctxt<'tcx>); impl<'a, 'tcx> TypeFolder<'tcx> for TypeNormalizer<'a, 'tcx> { fn tcx(&self) -> &ctxt<'tcx> { let TypeNormalizer(c) = *self; c } fn fold_ty(&mut self, t: ty::t) -> ty::t { match self.tcx().normalized_cache.borrow().find_copy(&t) { None => {} Some(u) => return u } let t_norm = ty_fold::super_fold_ty(self, t); self.tcx().normalized_cache.borrow_mut().insert(t, t_norm); return t_norm; } fn fold_region(&mut self, _: ty::Region) -> ty::Region { ty::ReStatic } fn fold_substs(&mut self, substs: &subst::Substs) -> subst::Substs { subst::Substs { regions: subst::ErasedRegions, types: substs.types.fold_with(self) } } fn fold_sig(&mut self, sig: &ty::FnSig) -> ty::FnSig { // The binder-id is only relevant to bound regions, which // are erased at trans time. ty::FnSig { binder_id: ast::DUMMY_NODE_ID, inputs: sig.inputs.fold_with(self), output: sig.output.fold_with(self), variadic: sig.variadic, } } } } // Returns the repeat count for a repeating vector expression. pub fn eval_repeat_count(tcx: &ctxt, count_expr: &ast::Expr) -> uint { match const_eval::eval_const_expr_partial(tcx, count_expr) { Ok(ref const_val) => match *const_val { const_eval::const_int(count) => if count < 0 { tcx.sess.span_err(count_expr.span, "expected positive integer for \ repeat count, found negative integer"); 0 } else { count as uint }, const_eval::const_uint(count) => count as uint, const_eval::const_float(count) => { tcx.sess.span_err(count_expr.span, "expected positive integer for \ repeat count, found float"); count as uint } const_eval::const_str(_) => { tcx.sess.span_err(count_expr.span, "expected positive integer for \ repeat count, found string"); 0 } const_eval::const_bool(_) => { tcx.sess.span_err(count_expr.span, "expected positive integer for \ repeat count, found boolean"); 0 } const_eval::const_binary(_) => { tcx.sess.span_err(count_expr.span, "expected positive integer for \ repeat count, found binary array"); 0 } const_eval::const_nil => { tcx.sess.span_err(count_expr.span, "expected positive integer for \ repeat count, found ()"); 0 } }, Err(..) => { tcx.sess.span_err(count_expr.span, "expected constant integer for repeat count, \ found variable"); 0 } } } // Iterate over a type parameter's bounded traits and any supertraits // of those traits, ignoring kinds. // Here, the supertraits are the transitive closure of the supertrait // relation on the supertraits from each bounded trait's constraint // list. pub fn each_bound_trait_and_supertraits(tcx: &ctxt, bounds: &[Rc<TraitRef>], f: |Rc<TraitRef>| -> bool) -> bool { for bound_trait_ref in traits::transitive_bounds(tcx, bounds) { if !f(bound_trait_ref) { return false; } } return true; } pub fn required_region_bounds(tcx: &ctxt, region_bounds: &[ty::Region], builtin_bounds: BuiltinBounds, trait_bounds: &[Rc<TraitRef>]) -> Vec<ty::Region> { /*! * Given a type which must meet the builtin bounds and trait * bounds, returns a set of lifetimes which the type must outlive. * * Requires that trait definitions have been processed. */ let mut all_bounds = Vec::new(); debug!("required_region_bounds(builtin_bounds={}, trait_bounds={})", builtin_bounds.repr(tcx), trait_bounds.repr(tcx)); all_bounds.push_all(region_bounds); push_region_bounds([], builtin_bounds, &mut all_bounds); debug!("from builtin bounds: all_bounds={}", all_bounds.repr(tcx)); each_bound_trait_and_supertraits( tcx, trait_bounds, |trait_ref| { let bounds = ty::bounds_for_trait_ref(tcx, &*trait_ref); push_region_bounds(bounds.region_bounds.as_slice(), bounds.builtin_bounds, &mut all_bounds); debug!("from {}: bounds={} all_bounds={}", trait_ref.repr(tcx), bounds.repr(tcx), all_bounds.repr(tcx)); true }); return all_bounds; fn push_region_bounds(region_bounds: &[ty::Region], builtin_bounds: ty::BuiltinBounds, all_bounds: &mut Vec<ty::Region>) { all_bounds.push_all(region_bounds.as_slice()); if builtin_bounds.contains(&ty::BoundSend) { all_bounds.push(ty::ReStatic); } } } pub fn get_tydesc_ty(tcx: &ctxt) -> Result<t, String> { tcx.lang_items.require(TyDescStructLangItem).map(|tydesc_lang_item| { tcx.intrinsic_defs.borrow().find_copy(&tydesc_lang_item) .expect("Failed to resolve TyDesc") }) } pub fn item_variances(tcx: &ctxt, item_id: ast::DefId) -> Rc<ItemVariances> { lookup_locally_or_in_crate_store( "item_variance_map", item_id, &mut *tcx.item_variance_map.borrow_mut(), || Rc::new(csearch::get_item_variances(&tcx.sess.cstore, item_id))) } /// Records a trait-to-implementation mapping. pub fn record_trait_implementation(tcx: &ctxt, trait_def_id: DefId, impl_def_id: DefId) { match tcx.trait_impls.borrow().get(&trait_def_id) { Some(impls_for_trait) => { impls_for_trait.borrow_mut().push(impl_def_id); return; } None => {} } tcx.trait_impls.borrow_mut().insert(trait_def_id, Rc::new(RefCell::new(vec!(impl_def_id)))); } /// Populates the type context with all the implementations for the given type /// if necessary. pub fn populate_implementations_for_type_if_necessary(tcx: &ctxt, type_id: ast::DefId) { if type_id.krate == LOCAL_CRATE { return } if tcx.populated_external_types.borrow().contains(&type_id) { return } let mut inherent_impls = Vec::new(); csearch::each_implementation_for_type(&tcx.sess.cstore, type_id, |impl_def_id| { let impl_items = csearch::get_impl_items(&tcx.sess.cstore, impl_def_id); // Record the trait->implementation mappings, if applicable. let associated_traits = csearch::get_impl_trait(tcx, impl_def_id); for trait_ref in associated_traits.iter() { record_trait_implementation(tcx, trait_ref.def_id, impl_def_id); } // For any methods that use a default implementation, add them to // the map. This is a bit unfortunate. for impl_item_def_id in impl_items.iter() { let method_def_id = impl_item_def_id.def_id(); match impl_or_trait_item(tcx, method_def_id) { MethodTraitItem(method) => { for &source in method.provided_source.iter() { tcx.provided_method_sources .borrow_mut() .insert(method_def_id, source); } } TypeTraitItem(_) => {} } } // Store the implementation info. tcx.impl_items.borrow_mut().insert(impl_def_id, impl_items); // If this is an inherent implementation, record it. if associated_traits.is_none() { inherent_impls.push(impl_def_id); } }); tcx.inherent_impls.borrow_mut().insert(type_id, Rc::new(inherent_impls)); tcx.populated_external_types.borrow_mut().insert(type_id); } /// Populates the type context with all the implementations for the given /// trait if necessary. pub fn populate_implementations_for_trait_if_necessary( tcx: &ctxt, trait_id: ast::DefId) { if trait_id.krate == LOCAL_CRATE { return } if tcx.populated_external_traits.borrow().contains(&trait_id) { return } csearch::each_implementation_for_trait(&tcx.sess.cstore, trait_id, |implementation_def_id| { let impl_items = csearch::get_impl_items(&tcx.sess.cstore, implementation_def_id); // Record the trait->implementation mapping. record_trait_implementation(tcx, trait_id, implementation_def_id); // For any methods that use a default implementation, add them to // the map. This is a bit unfortunate. for impl_item_def_id in impl_items.iter() { let method_def_id = impl_item_def_id.def_id(); match impl_or_trait_item(tcx, method_def_id) { MethodTraitItem(method) => { for &source in method.provided_source.iter() { tcx.provided_method_sources .borrow_mut() .insert(method_def_id, source); } } TypeTraitItem(_) => {} } } // Store the implementation info. tcx.impl_items.borrow_mut().insert(implementation_def_id, impl_items); }); tcx.populated_external_traits.borrow_mut().insert(trait_id); } /// Given the def_id of an impl, return the def_id of the trait it implements. /// If it implements no trait, return `None`. pub fn trait_id_of_impl(tcx: &ctxt, def_id: ast::DefId) -> Option<ast::DefId> { let node = match tcx.map.find(def_id.node) { Some(node) => node, None => return None }; match node { ast_map::NodeItem(item) => { match item.node { ast::ItemImpl(_, Some(ref trait_ref), _, _) => { Some(node_id_to_trait_ref(tcx, trait_ref.ref_id).def_id) } _ => None } } _ => None } } /// If the given def ID describes a method belonging to an impl, return the /// ID of the impl that the method belongs to. Otherwise, return `None`. pub fn impl_of_method(tcx: &ctxt, def_id: ast::DefId) -> Option<ast::DefId> { if def_id.krate != LOCAL_CRATE { return match csearch::get_impl_or_trait_item(tcx, def_id).container() { TraitContainer(_) => None, ImplContainer(def_id) => Some(def_id), }; } match tcx.impl_or_trait_items.borrow().find_copy(&def_id) { Some(trait_item) => { match trait_item.container() { TraitContainer(_) => None, ImplContainer(def_id) => Some(def_id), } } None => None } } /// If the given def ID describes an item belonging to a trait (either a /// default method or an implementation of a trait method), return the ID of /// the trait that the method belongs to. Otherwise, return `None`. pub fn trait_of_item(tcx: &ctxt, def_id: ast::DefId) -> Option<ast::DefId> { if def_id.krate != LOCAL_CRATE { return csearch::get_trait_of_item(&tcx.sess.cstore, def_id, tcx); } match tcx.impl_or_trait_items.borrow().find_copy(&def_id) { Some(impl_or_trait_item) => { match impl_or_trait_item.container() { TraitContainer(def_id) => Some(def_id), ImplContainer(def_id) => trait_id_of_impl(tcx, def_id), } } None => None } } /// If the given def ID describes an item belonging to a trait, (either a /// default method or an implementation of a trait method), return the ID of /// the method inside trait definition (this means that if the given def ID /// is already that of the original trait method, then the return value is /// the same). /// Otherwise, return `None`. pub fn trait_item_of_item(tcx: &ctxt, def_id: ast::DefId) -> Option<ImplOrTraitItemId> { let impl_item = match tcx.impl_or_trait_items.borrow().get(&def_id) { Some(m) => m.clone(), None => return None, }; let name = impl_item.name(); match trait_of_item(tcx, def_id) { Some(trait_did) => { let trait_items = ty::trait_items(tcx, trait_did); trait_items.iter() .position(|m| m.name() == name) .map(|idx| ty::trait_item(tcx, trait_did, idx).id()) } None => None } } /// Creates a hash of the type `t` which will be the same no matter what crate /// context it's calculated within. This is used by the `type_id` intrinsic. pub fn hash_crate_independent(tcx: &ctxt, t: t, svh: &Svh) -> u64 { let mut state = sip::SipState::new(); macro_rules! byte( ($b:expr) => { ($b as u8).hash(&mut state) } ); macro_rules! hash( ($e:expr) => { $e.hash(&mut state) } ); let region = |_state: &mut sip::SipState, r: Region| { match r { ReStatic => {} ReEmpty | ReEarlyBound(..) | ReLateBound(..) | ReFree(..) | ReScope(..) | ReInfer(..) => { tcx.sess.bug("non-static region found when hashing a type") } } }; let did = |state: &mut sip::SipState, did: DefId| { let h = if ast_util::is_local(did) { svh.clone() } else { tcx.sess.cstore.get_crate_hash(did.krate) }; h.as_str().hash(state); did.node.hash(state); }; let mt = |state: &mut sip::SipState, mt: mt| { mt.mutbl.hash(state); }; ty::walk_ty(t, |t| { match ty::get(t).sty { ty_nil => byte!(0), ty_bool => byte!(2), ty_char => byte!(3), ty_int(i) => { byte!(4); hash!(i); } ty_uint(u) => { byte!(5); hash!(u); } ty_float(f) => { byte!(6); hash!(f); } ty_str => { byte!(7); } ty_enum(d, _) => { byte!(8); did(&mut state, d); } ty_uniq(_) => { byte!(9); } ty_vec(_, Some(n)) => { byte!(10); n.hash(&mut state); } ty_vec(_, None) => { byte!(11); } ty_ptr(m) => { byte!(12); mt(&mut state, m); } ty_rptr(r, m) => { byte!(13); region(&mut state, r); mt(&mut state, m); } ty_bare_fn(ref b) => { byte!(14); hash!(b.fn_style); hash!(b.abi); } ty_closure(ref c) => { byte!(15); hash!(c.fn_style); hash!(c.onceness); hash!(c.bounds); match c.store { UniqTraitStore => byte!(0), RegionTraitStore(r, m) => { byte!(1) region(&mut state, r); assert_eq!(m, ast::MutMutable); } } } ty_trait(box TyTrait { ref principal, bounds }) => { byte!(17); did(&mut state, principal.def_id); hash!(bounds); } ty_struct(d, _) => { byte!(18); did(&mut state, d); } ty_tup(ref inner) => { byte!(19); hash!(inner.len()); } ty_param(p) => { byte!(20); hash!(p.idx); did(&mut state, p.def_id); } ty_open(_) => byte!(22), ty_infer(_) => unreachable!(), ty_err => byte!(23), ty_unboxed_closure(d, r, _) => { byte!(24); did(&mut state, d); region(&mut state, r); } } }); state.result() } impl Variance { pub fn to_string(self) -> &'static str { match self { Covariant => "+", Contravariant => "-", Invariant => "o", Bivariant => "*", } } } pub fn empty_parameter_environment() -> ParameterEnvironment { /*! * Construct a parameter environment suitable for static contexts * or other contexts where there are no free type/lifetime * parameters in scope. */ ty::ParameterEnvironment { free_substs: Substs::empty(), bounds: VecPerParamSpace::empty(), caller_obligations: VecPerParamSpace::empty(), implicit_region_bound: ty::ReEmpty, selection_cache: traits::SelectionCache::new(), } } pub fn construct_parameter_environment( tcx: &ctxt, span: Span, generics: &ty::Generics, free_id: ast::NodeId) -> ParameterEnvironment { /*! See `ParameterEnvironment` struct def'n for details */ // // Construct the free substs. // // map T => T let mut types = VecPerParamSpace::empty(); for &space in subst::ParamSpace::all().iter() { push_types_from_defs(tcx, &mut types, space, generics.types.get_slice(space)); } // map bound 'a => free 'a let mut regions = VecPerParamSpace::empty(); for &space in subst::ParamSpace::all().iter() { push_region_params(&mut regions, space, free_id, generics.regions.get_slice(space)); } let free_substs = Substs { types: types, regions: subst::NonerasedRegions(regions) }; // // Compute the bounds on Self and the type parameters. // let mut bounds = VecPerParamSpace::empty(); for &space in subst::ParamSpace::all().iter() { push_bounds_from_defs(tcx, &mut bounds, space, &free_substs, generics.types.get_slice(space)); } // // Compute region bounds. For now, these relations are stored in a // global table on the tcx, so just enter them there. I'm not // crazy about this scheme, but it's convenient, at least. // for &space in subst::ParamSpace::all().iter() { record_region_bounds_from_defs(tcx, space, &free_substs, generics.regions.get_slice(space)); } debug!("construct_parameter_environment: free_id={} \ free_subst={} \ bounds={}", free_id, free_substs.repr(tcx), bounds.repr(tcx)); let obligations = traits::obligations_for_generics(tcx, traits::ObligationCause::misc(span), generics, &free_substs); return ty::ParameterEnvironment { free_substs: free_substs, bounds: bounds, implicit_region_bound: ty::ReScope(free_id), caller_obligations: obligations, selection_cache: traits::SelectionCache::new(), }; fn push_region_params(regions: &mut VecPerParamSpace<ty::Region>, space: subst::ParamSpace, free_id: ast::NodeId, region_params: &[RegionParameterDef]) { for r in region_params.iter() { regions.push(space, ty::free_region_from_def(free_id, r)); } } fn push_types_from_defs(tcx: &ty::ctxt, types: &mut subst::VecPerParamSpace<ty::t>, space: subst::ParamSpace, defs: &[TypeParameterDef]) { for (i, def) in defs.iter().enumerate() { debug!("construct_parameter_environment(): push_types_from_defs: \ space={} def={} index={}", space, def.repr(tcx), i); let ty = ty::mk_param(tcx, space, i, def.def_id); types.push(space, ty); } } fn push_bounds_from_defs(tcx: &ty::ctxt, bounds: &mut subst::VecPerParamSpace<ParamBounds>, space: subst::ParamSpace, free_substs: &subst::Substs, defs: &[TypeParameterDef]) { for def in defs.iter() { let b = def.bounds.subst(tcx, free_substs); bounds.push(space, b); } } fn record_region_bounds_from_defs(tcx: &ty::ctxt, space: subst::ParamSpace, free_substs: &subst::Substs, defs: &[RegionParameterDef]) { for (subst_region, def) in free_substs.regions().get_slice(space).iter().zip( defs.iter()) { // For each region parameter 'subst... let bounds = def.bounds.subst(tcx, free_substs); for bound_region in bounds.iter() { // Which is declared with a bound like 'subst:'bound... match (subst_region, bound_region) { (&ty::ReFree(subst_fr), &ty::ReFree(bound_fr)) => { // Record that 'subst outlives 'bound. Or, put // another way, 'bound <= 'subst. tcx.region_maps.relate_free_regions(bound_fr, subst_fr); }, _ => { // All named regions are instantiated with free regions. tcx.sess.bug( format!("push_region_bounds_from_defs: \ non free region: {} / {}", subst_region.repr(tcx), bound_region.repr(tcx)).as_slice()); } } } } } } impl BorrowKind { pub fn from_mutbl(m: ast::Mutability) -> BorrowKind { match m { ast::MutMutable => MutBorrow, ast::MutImmutable => ImmBorrow, } } pub fn to_mutbl_lossy(self) -> ast::Mutability { /*! * Returns a mutability `m` such that an `&m T` pointer could * be used to obtain this borrow kind. Because borrow kinds * are richer than mutabilities, we sometimes have to pick a * mutability that is stronger than necessary so that it at * least *would permit* the borrow in question. */ match self { MutBorrow => ast::MutMutable, ImmBorrow => ast::MutImmutable, // We have no type corresponding to a unique imm borrow, so // use `&mut`. It gives all the capabilities of an `&uniq` // and hence is a safe "over approximation". UniqueImmBorrow => ast::MutMutable, } } pub fn to_user_str(&self) -> &'static str { match *self { MutBorrow => "mutable", ImmBorrow => "immutable", UniqueImmBorrow => "uniquely immutable", } } } impl<'tcx> mc::Typer<'tcx> for ty::ctxt<'tcx> { fn tcx<'a>(&'a self) -> &'a ty::ctxt<'tcx> { self } fn node_ty(&self, id: ast::NodeId) -> mc::McResult<ty::t> { Ok(ty::node_id_to_type(self, id)) } fn node_method_ty(&self, method_call: typeck::MethodCall) -> Option<ty::t> { self.method_map.borrow().get(&method_call).map(|method| method.ty) } fn adjustments<'a>(&'a self) -> &'a RefCell<NodeMap<ty::AutoAdjustment>> { &self.adjustments } fn is_method_call(&self, id: ast::NodeId) -> bool { self.method_map.borrow().contains_key(&typeck::MethodCall::expr(id)) } fn temporary_scope(&self, rvalue_id: ast::NodeId) -> Option<ast::NodeId> { self.region_maps.temporary_scope(rvalue_id) } fn upvar_borrow(&self, upvar_id: ty::UpvarId) -> ty::UpvarBorrow { self.upvar_borrow_map.borrow().get_copy(&upvar_id) } fn capture_mode(&self, closure_expr_id: ast::NodeId) -> ast::CaptureClause { self.capture_modes.borrow().get_copy(&closure_expr_id) } fn unboxed_closures<'a>(&'a self) -> &'a RefCell<DefIdMap<UnboxedClosure>> { &self.unboxed_closures } } /// The category of explicit self. #[deriving(Clone, Eq, PartialEq, Show)] pub enum ExplicitSelfCategory { StaticExplicitSelfCategory, ByValueExplicitSelfCategory, ByReferenceExplicitSelfCategory(Region, ast::Mutability), ByBoxExplicitSelfCategory, } /// Pushes all the lifetimes in the given type onto the given list. A /// "lifetime in a type" is a lifetime specified by a reference or a lifetime /// in a list of type substitutions. This does *not* traverse into nominal /// types, nor does it resolve fictitious types. pub fn accumulate_lifetimes_in_type(accumulator: &mut Vec<ty::Region>, typ: t) { walk_ty(typ, |typ| { match get(typ).sty { ty_rptr(region, _) => { accumulator.push(region) } ty_trait(ref t) => { accumulator.push_all(t.principal.substs.regions().as_slice()); } ty_enum(_, ref substs) | ty_struct(_, ref substs) => { accum_substs(accumulator, substs); } ty_closure(ref closure_ty) => { match closure_ty.store { RegionTraitStore(region, _) => accumulator.push(region), UniqTraitStore => {} } } ty_unboxed_closure(_, ref region, ref substs) => { accumulator.push(*region); accum_substs(accumulator, substs); } ty_nil | ty_bool | ty_char | ty_int(_) | ty_uint(_) | ty_float(_) | ty_uniq(_) | ty_str | ty_vec(_, _) | ty_ptr(_) | ty_bare_fn(_) | ty_tup(_) | ty_param(_) | ty_infer(_) | ty_open(_) | ty_err => { } } }); fn accum_substs(accumulator: &mut Vec<Region>, substs: &Substs) { match substs.regions { subst::ErasedRegions => {} subst::NonerasedRegions(ref regions) => { for region in regions.iter() { accumulator.push(*region) } } } } } /// A free variable referred to in a function. #[deriving(Encodable, Decodable)] pub struct Freevar { /// The variable being accessed free. pub def: def::Def, // First span where it is accessed (there can be multiple). pub span: Span } pub type FreevarMap = NodeMap<Vec<Freevar>>; pub type CaptureModeMap = NodeMap<ast::CaptureClause>; pub fn with_freevars<T>(tcx: &ty::ctxt, fid: ast::NodeId, f: |&[Freevar]| -> T) -> T { match tcx.freevars.borrow().get(&fid) { None => f(&[]), Some(d) => f(d.as_slice()) } } impl AutoAdjustment { pub fn is_identity(&self) -> bool { match *self { AdjustAddEnv(..) => false, AdjustDerefRef(ref r) => r.is_identity(), } } } impl AutoDerefRef { pub fn is_identity(&self) -> bool { self.autoderefs == 0 && self.autoref.is_none() } }<|fim▁end|>
}
<|file_name|>test_average.py<|end_file_name|><|fim▁begin|>import unittest import numpy import six from chainer import functions from chainer import testing from chainer.testing import attr from chainer import utils<|fim▁hole|>@testing.parameterize(*( testing.product({ 'shape': [(3, 2, 4)], 'axis': [None, 0, 1, 2, -1, (0, 1), (1, -1)], 'dtype': [numpy.float16, numpy.float32, numpy.float64], 'use_weights': [True, False], 'keepdims': [True, False], 'use_variable_method': [True, False], }) + testing.product({ 'shape': [()], 'axis': [None], 'dtype': [numpy.float16, numpy.float32, numpy.float64], 'use_weights': [True, False], 'keepdims': [True, False], 'use_variable_method': [True, False], }))) @testing.fix_random() @testing.inject_backend_tests( None, # CPU tests [ {}, ] # GPU tests + testing.product({ 'use_cuda': [True], 'cuda_device': [0, 1], }) # ChainerX tests + testing.product({ 'use_chainerx': [True], 'chainerx_device': ['native:0', 'cuda:0', 'cuda:1'], })) class TestAverage(testing.FunctionTestCase): def setUp(self): self.skip_double_backward_test = True if self.dtype == numpy.float16: self.check_forward_options.update({'atol': 5e-3, 'rtol': 5e-3}) self.check_backward_options.update({'atol': 1e-2, 'rtol': 1e-1}) else: self.check_backward_options.update({'atol': 1e-2, 'rtol': 1e-2}) def before_test(self, test_name): if self.use_weights and isinstance(self.axis, tuple): # This condition is not supported raise unittest.SkipTest( 'Tuple axis is not supported when weights is given') def generate_inputs(self): x = numpy.random.uniform(-1, 1, self.shape).astype(self.dtype) if self.axis is None: w_shape = self.shape elif isinstance(self.axis, int): axis = self.axis if axis < 0: ndim = len(self.shape) axis += ndim w_shape = self.shape[axis], else: w_shape = tuple(self.shape[a] for a in self.axis) # Sample weights. Weights should not sum to 0. while True: w = numpy.random.uniform(-2, 2, w_shape).astype(self.dtype) w_sum_eps = 1.0 if self.dtype == numpy.float16 else 5e-2 if abs(w.sum()) > w_sum_eps: break return x, w def forward(self, inputs, device): x, w = inputs if not self.use_weights: w = None if self.use_variable_method: y = x.mean(axis=self.axis, weights=w, keepdims=self.keepdims) else: y = functions.average( x, axis=self.axis, weights=w, keepdims=self.keepdims) return y, def forward_expected(self, inputs): x, w = inputs if not self.use_weights: w = None y_expect = numpy.average(x, axis=self.axis, weights=w) if self.keepdims: # numpy.average does not support keepdims axis = self.axis if axis is None: axis = list(six.moves.range(x.ndim)) elif isinstance(axis, int): axis = axis, shape = list(x.shape) for i in six.moves.range(len(shape)): if i in axis or i - len(shape) in axis: shape[i] = 1 y_expect = y_expect.reshape(shape) y_expect = utils.force_array(y_expect, dtype=self.dtype) return y_expect, @testing.parameterize(*( testing.product({ 'shape': [(30, 20, 40)], 'axis': [None, 0, 1, 2, -1, (0, 1), (1, -1)], 'dtype': [numpy.float16], 'use_weights': [False], # np.average overflows when `weights` is used 'keepdims': [True, False], }) )) @testing.inject_backend_tests( None, # CPU tests [ {}, ] # GPU tests + testing.product({ 'use_cuda': [True], 'cuda_device': [0, 1], }) # ChainerX tests + testing.product({ 'use_chainerx': [True], 'chainerx_device': ['native:0', 'cuda:0', 'cuda:1'], })) @attr.slow @testing.with_requires('numpy>=1.12') # NumPy #8222 class TestAverageOverflowingSum(testing.FunctionTestCase): def setUp(self): self.check_forward_options.update({'atol': 1e-2, 'rtol': 2e-3}) self.check_backward_options.update({'atol': 1e-2, 'rtol': 1e-2}) self.check_double_backward_options.update({'atol': 1e-2, 'rtol': 1e-2}) def generate_inputs(self): x = numpy.random.uniform(3000, 7000, self.shape).astype(self.dtype) return x, def forward(self, inputs, device): x, = inputs y = functions.average( x, self.axis, keepdims=self.keepdims) return y, def forward_expected(self, inputs): x, = inputs y_expect = numpy.mean( x.astype(numpy.float64), self.axis, keepdims=self.keepdims ).astype(self.dtype) return utils.force_array(y_expect), @testing.parameterize(*testing.product({ 'dtype': [numpy.float16, numpy.float32, numpy.float64], })) class TestAverageDuplicateValueInAxis(unittest.TestCase): def test_duplicate_value(self): x = numpy.random.uniform(-1, 1, 24).reshape(2, 3, 4).astype(self.dtype) with self.assertRaises(ValueError): functions.average(x, axis=(0, 0)) def test_duplicate_value_negative(self): x = numpy.random.uniform(-1, 1, 24).reshape(2, 3, 4).astype(self.dtype) with self.assertRaises(ValueError): functions.average(x, axis=(1, -2)) def test_weights_and_axis(self): x = numpy.random.uniform(-1, 1, 24).reshape(2, 3, 4).astype(self.dtype) w = numpy.random.uniform(-1, 1, 6).reshape(2, 3).astype(self.dtype) with self.assertRaises(ValueError): functions.average(x, axis=(0, 1), weights=w) testing.run_module(__name__, __file__)<|fim▁end|>
<|file_name|>listwidget.py<|end_file_name|><|fim▁begin|>import os from functools import partial from PyQt4.QtGui import QWidget from PyQt4.QtCore import Qt from qgis.core import QgsMapLayer from qgis.gui import QgsExpressionBuilderDialog from roam.api.utils import layer_by_name from configmanager.models import QgsLayerModel, QgsFieldModel from configmanager.editorwidgets.core import ConfigWidget from configmanager.editorwidgets.uifiles.ui_listwidget_config import Ui_Form class ListWidgetConfig(Ui_Form, ConfigWidget): description = 'Select an item from a predefined list' def __init__(self, parent=None): super(ListWidgetConfig, self).__init__(parent) self.setupUi(self) self.allownull = False self.orderby = False self.orderbyCheck.hide() self.layerRadio.clicked.connect(partial(self.stackedWidget.setCurrentIndex, 0)) self.listRadio.clicked.connect(partial(self.stackedWidget.setCurrentIndex, 1)) self.layermodel = QgsLayerModel(watchregistry=False) self.layermodel.layerfilter = [QgsMapLayer.VectorLayer] self.fieldmodel = QgsFieldModel() self.blockSignals(True) self.layerCombo.setModel(self.layermodel) self.keyCombo.setModel(self.fieldmodel) self.valueCombo.setModel(self.fieldmodel) self.filterButton.pressed.connect(self.define_filter) self.fieldmodel.setLayerFilter(self.layerCombo.view().selectionModel()) self.reset() self.blockSignals(False) def define_filter(self): layer = self.layerCombo.currentText() if not layer: return layer = layer_by_name(layer) dlg = QgsExpressionBuilderDialog(layer, "List filter", self) text = self.filterText.toPlainText() dlg.setExpressionText(text) if dlg.exec_(): self.filterText.setPlainText(dlg.expressionText()) def reset(self): self.listtype = 'layer' self.listText.setPlainText('') self.orderby = False self.allownull = False self.filterText.setPlainText('') self.layerCombo.setCurrentIndex(-1) self.keyCombo.setCurrentIndex(-1) self.valueCombo.setCurrentIndex(-1) def widgetchanged(self): self.widgetdirty.emit(self.getconfig()) @property def allownull(self): return self.allownullCheck.isChecked() @allownull.setter def allownull(self, value): self.allownullCheck.setChecked(value) @property def orderby(self): return self.orderbyCheck.isChecked() @orderby.setter def orderby(self, value): self.orderbyCheck.setChecked(value) @property def list(self): return [item for item in self.listText.toPlainText().split('\n')] @property def filter(self): return self.filterText.toPlainText() @property def layer(self): return self.layerCombo.currentText() @property def key(self): index_key = self.fieldmodel.index(self.keyCombo.currentIndex(), 0) fieldname_key = self.fieldmodel.data(index_key, QgsFieldModel.FieldNameRole) return fieldname_key @property def value(self): index_value = self.fieldmodel.index(self.valueCombo.currentIndex(), 0) return self.fieldmodel.data(index_value, QgsFieldModel.FieldNameRole) def getconfig(self): config = {} config['allownull'] = self.allownull config['orderbyvalue'] = self.orderby if self.layerRadio.isChecked(): subconfig = {} # TODO Grab the data here and not just the text subconfig['layer'] = self.layer subconfig['key'] = self.key subconfig['value'] = self.value subconfig['filter'] = self.filter config['layer'] = subconfig else: config['list'] = {} config['list']['items'] = self.list return config def blockSignals(self, bool): for child in self.findChildren(QWidget): child.blockSignals(bool) super(ListWidgetConfig, self).blockSignals(bool) def setconfig(self, config): self.blockSignals(True) self.allownull = config.get('allownull', True) self.orderby = config.get('orderbyvalue', False) #Clear the widgets self.listText.setPlainText('') self.keyCombo.clear() self.valueCombo.clear() self.filterText.clear() self.layermodel.refresh() # Rebind all the values if 'list' in config: subconfig = config.get('list', {}) self.listRadio.setChecked(True) self.stackedWidget.setCurrentIndex(1) listitems = subconfig.get('items', []) itemtext = '\n'.join(listitems) self.listText.setPlainText(itemtext) else: self.layerRadio.setChecked(True) self.stackedWidget.setCurrentIndex(0) subconfig = config.get('layer', {}) layer = subconfig.get('layer', '') or '' key = subconfig.get('key', '') or '' value = subconfig.get('value', '') or '' filter = subconfig.get('filter', None) index = self.layerCombo.findData(layer, Qt.DisplayRole) if index > -1: self.layerCombo.setCurrentIndex(index) index = self.layermodel.index(index, 0) self.fieldmodel.updateLayer(index, None) keyindex = self.keyCombo.findData(key.lower(), QgsFieldModel.FieldNameRole) if keyindex > -1: self.keyCombo.setCurrentIndex(keyindex) <|fim▁hole|> self.valueCombo.setCurrentIndex(valueindex) self.filterText.setPlainText(filter) self.allownullCheck.setChecked(self.allownull) self.orderbyCheck.setChecked(self.orderby) self.blockSignals(False)<|fim▁end|>
valueindex = self.valueCombo.findData(value.lower(), QgsFieldModel.FieldNameRole) if valueindex > -1:
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>pub mod auth;<|fim▁end|>
pub mod hello;
<|file_name|>0051_auto_20190423_0803.py<|end_file_name|><|fim▁begin|># Generated by Django 2.1.7 on 2019-04-23 06:03 import diventi.accounts.models from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('accounts', '0050_auto_20190421_2252'), ]<|fim▁hole|> managers=[ ('objects', diventi.accounts.models.DiventiUserManager()), ], ), ]<|fim▁end|>
operations = [ migrations.AlterModelManagers( name='diventiuser',
<|file_name|>window_delegate_cpptoc.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2016 The Chromium Embedded Framework Authors. All rights // reserved. Use of this source code is governed by a BSD-style license that // can be found in the LICENSE file. // // --------------------------------------------------------------------------- // // This file was generated by the CEF translator tool. If making changes by // hand only do so within the body of existing method and function // implementations. See the translator.README.txt file in the tools directory // for more information. // #include "libcef_dll/cpptoc/views/window_delegate_cpptoc.h" #include "libcef_dll/ctocpp/views/view_ctocpp.h" #include "libcef_dll/ctocpp/views/window_ctocpp.h" namespace { // MEMBER FUNCTIONS - Body may be edited by hand. void CEF_CALLBACK window_delegate_on_window_created( struct _cef_window_delegate_t* self, cef_window_t* window) { // AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING DCHECK(self); if (!self) return; // Verify param: window; type: refptr_diff DCHECK(window); if (!window) return; // Execute CefWindowDelegateCppToC::Get(self)->OnWindowCreated( CefWindowCToCpp::Wrap(window)); } void CEF_CALLBACK window_delegate_on_window_destroyed( struct _cef_window_delegate_t* self, cef_window_t* window) { // AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING DCHECK(self); if (!self) return; // Verify param: window; type: refptr_diff DCHECK(window); if (!window) return; // Execute CefWindowDelegateCppToC::Get(self)->OnWindowDestroyed( CefWindowCToCpp::Wrap(window)); } int CEF_CALLBACK window_delegate_is_frameless( struct _cef_window_delegate_t* self, cef_window_t* window) { // AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING DCHECK(self); if (!self) return 0; // Verify param: window; type: refptr_diff DCHECK(window); if (!window) return 0; // Execute bool _retval = CefWindowDelegateCppToC::Get(self)->IsFrameless( CefWindowCToCpp::Wrap(window)); // Return type: bool return _retval; } int CEF_CALLBACK window_delegate_can_resize(struct _cef_window_delegate_t* self, cef_window_t* window) { // AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING DCHECK(self); if (!self) return 0; // Verify param: window; type: refptr_diff DCHECK(window); if (!window) return 0; // Execute bool _retval = CefWindowDelegateCppToC::Get(self)->CanResize( CefWindowCToCpp::Wrap(window)); // Return type: bool return _retval; } int CEF_CALLBACK window_delegate_can_maximize( struct _cef_window_delegate_t* self, cef_window_t* window) { // AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING DCHECK(self); if (!self) return 0; // Verify param: window; type: refptr_diff DCHECK(window); if (!window) return 0; // Execute bool _retval = CefWindowDelegateCppToC::Get(self)->CanMaximize( CefWindowCToCpp::Wrap(window)); // Return type: bool return _retval; } int CEF_CALLBACK window_delegate_can_minimize( struct _cef_window_delegate_t* self, cef_window_t* window) { // AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING DCHECK(self); if (!self) return 0; // Verify param: window; type: refptr_diff DCHECK(window); if (!window) return 0; // Execute bool _retval = CefWindowDelegateCppToC::Get(self)->CanMinimize( CefWindowCToCpp::Wrap(window)); // Return type: bool return _retval; } int CEF_CALLBACK window_delegate_can_close(struct _cef_window_delegate_t* self, cef_window_t* window) { // AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING DCHECK(self); if (!self) return 0; // Verify param: window; type: refptr_diff DCHECK(window); if (!window) return 0; // Execute bool _retval = CefWindowDelegateCppToC::Get(self)->CanClose(<|fim▁hole|> // Return type: bool return _retval; } cef_size_t CEF_CALLBACK window_delegate_get_preferred_size( struct _cef_view_delegate_t* self, cef_view_t* view) { // AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING DCHECK(self); if (!self) return CefSize(); // Verify param: view; type: refptr_diff DCHECK(view); if (!view) return CefSize(); // Execute cef_size_t _retval = CefWindowDelegateCppToC::Get( reinterpret_cast<cef_window_delegate_t*>(self))->GetPreferredSize( CefViewCToCpp::Wrap(view)); // Return type: simple return _retval; } cef_size_t CEF_CALLBACK window_delegate_get_minimum_size( struct _cef_view_delegate_t* self, cef_view_t* view) { // AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING DCHECK(self); if (!self) return CefSize(); // Verify param: view; type: refptr_diff DCHECK(view); if (!view) return CefSize(); // Execute cef_size_t _retval = CefWindowDelegateCppToC::Get( reinterpret_cast<cef_window_delegate_t*>(self))->GetMinimumSize( CefViewCToCpp::Wrap(view)); // Return type: simple return _retval; } cef_size_t CEF_CALLBACK window_delegate_get_maximum_size( struct _cef_view_delegate_t* self, cef_view_t* view) { // AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING DCHECK(self); if (!self) return CefSize(); // Verify param: view; type: refptr_diff DCHECK(view); if (!view) return CefSize(); // Execute cef_size_t _retval = CefWindowDelegateCppToC::Get( reinterpret_cast<cef_window_delegate_t*>(self))->GetMaximumSize( CefViewCToCpp::Wrap(view)); // Return type: simple return _retval; } int CEF_CALLBACK window_delegate_get_height_for_width( struct _cef_view_delegate_t* self, cef_view_t* view, int width) { // AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING DCHECK(self); if (!self) return 0; // Verify param: view; type: refptr_diff DCHECK(view); if (!view) return 0; // Execute int _retval = CefWindowDelegateCppToC::Get( reinterpret_cast<cef_window_delegate_t*>(self))->GetHeightForWidth( CefViewCToCpp::Wrap(view), width); // Return type: simple return _retval; } void CEF_CALLBACK window_delegate_on_parent_view_changed( struct _cef_view_delegate_t* self, cef_view_t* view, int added, cef_view_t* parent) { // AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING DCHECK(self); if (!self) return; // Verify param: view; type: refptr_diff DCHECK(view); if (!view) return; // Verify param: parent; type: refptr_diff DCHECK(parent); if (!parent) return; // Execute CefWindowDelegateCppToC::Get(reinterpret_cast<cef_window_delegate_t*>( self))->OnParentViewChanged( CefViewCToCpp::Wrap(view), added?true:false, CefViewCToCpp::Wrap(parent)); } void CEF_CALLBACK window_delegate_on_child_view_changed( struct _cef_view_delegate_t* self, cef_view_t* view, int added, cef_view_t* child) { // AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING DCHECK(self); if (!self) return; // Verify param: view; type: refptr_diff DCHECK(view); if (!view) return; // Verify param: child; type: refptr_diff DCHECK(child); if (!child) return; // Execute CefWindowDelegateCppToC::Get(reinterpret_cast<cef_window_delegate_t*>( self))->OnChildViewChanged( CefViewCToCpp::Wrap(view), added?true:false, CefViewCToCpp::Wrap(child)); } } // namespace // CONSTRUCTOR - Do not edit by hand. CefWindowDelegateCppToC::CefWindowDelegateCppToC() { GetStruct()->on_window_created = window_delegate_on_window_created; GetStruct()->on_window_destroyed = window_delegate_on_window_destroyed; GetStruct()->is_frameless = window_delegate_is_frameless; GetStruct()->can_resize = window_delegate_can_resize; GetStruct()->can_maximize = window_delegate_can_maximize; GetStruct()->can_minimize = window_delegate_can_minimize; GetStruct()->can_close = window_delegate_can_close; GetStruct()->base.base.get_preferred_size = window_delegate_get_preferred_size; GetStruct()->base.base.get_minimum_size = window_delegate_get_minimum_size; GetStruct()->base.base.get_maximum_size = window_delegate_get_maximum_size; GetStruct()->base.base.get_height_for_width = window_delegate_get_height_for_width; GetStruct()->base.base.on_parent_view_changed = window_delegate_on_parent_view_changed; GetStruct()->base.base.on_child_view_changed = window_delegate_on_child_view_changed; } template<> CefRefPtr<CefWindowDelegate> CefCppToC<CefWindowDelegateCppToC, CefWindowDelegate, cef_window_delegate_t>::UnwrapDerived( CefWrapperType type, cef_window_delegate_t* s) { NOTREACHED() << "Unexpected class type: " << type; return NULL; } #ifndef NDEBUG template<> base::AtomicRefCount CefCppToC<CefWindowDelegateCppToC, CefWindowDelegate, cef_window_delegate_t>::DebugObjCt = 0; #endif template<> CefWrapperType CefCppToC<CefWindowDelegateCppToC, CefWindowDelegate, cef_window_delegate_t>::kWrapperType = WT_WINDOW_DELEGATE;<|fim▁end|>
CefWindowCToCpp::Wrap(window));
<|file_name|>program.py<|end_file_name|><|fim▁begin|>""" Copyright (C) 2008-2013 Tomasz Bursztyka This program is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ """ ProgramHeader and Program classes """ from elf.core.property import VALUE_FIXED, VALUE_BITWISE from elf.core.header import Header from elf.core.page import Page from elf.utils import mirrorDict phdr_type = { 'PT_NULL' : 0, 'PT_LOAD' : 1, 'PT_DYNAMIC' : 2, 'PT_INTERP' : 3, 'PT_NOTE' : 4, 'PT_SHLIB' : 5, 'PT_PHDR' : 6, 'PT_TLS' : 7, 'PT_NUM' : 8, 'PT_LOOS' : 0x60000000, 'PT_GNU_EH_FRAME' : 0x6474e550, 'PT_GNU_STACK' : 0x6474e551, 'PT_GNU_RELRO' : 0x6474e552, 'PT_PAX_FLAGS' : 0x65041580, 'PT_LOSUNW' : 0x6ffffffa, 'PT_SUNWBSS' : 0x6ffffffa, 'PT_SUNWSTACK' : 0x6ffffffb, 'PT_HISUNW' : 0x6fffffff, 'PT_HIOS' : 0x6fffffff, 'PT_LOPROC' : 0x70000000, 'PT_HIPROC' : 0x7fffffff, 'PT_MIPS_REGINFO' : 0x70000000, 'PT_MIPS_RTPROC' : 0x70000001, 'PT_MIPS_OPTIONS' : 0x70000002, 'PT_HP_TLS' : (0x60000000 + 0x0), 'PT_HP_CORE_NONE' : (0x60000000 + 0x1), 'PT_HP_CORE_VERSION' : (0x60000000 + 0x2), 'PT_HP_CORE_KERNEL' : (0x60000000 + 0x3), 'PT_HP_CORE_COMM' : (0x60000000 + 0x4), 'PT_HP_CORE_PROC' : (0x60000000 + 0x5), 'PT_HP_CORE_LOADABLE' : (0x60000000 + 0x6), 'PT_HP_CORE_STACK' : (0x60000000 + 0x7), 'PT_HP_CORE_SHM' : (0x60000000 + 0x8), 'PT_HP_CORE_MMF' : (0x60000000 + 0x9), 'PT_HP_PARALLEL' : (0x60000000 + 0x10), 'PT_HP_FASTBIND' : (0x60000000 + 0x11), 'PT_HP_OPT_ANNOT' : (0x60000000 + 0x12), 'PT_HP_HSL_ANNOT' : (0x60000000 + 0x13), 'PT_HP_STACK' : (0x60000000 + 0x14), 'PT_PARISC_ARCHEXT' : 0x70000000, 'PT_PARISC_UNWIND' : 0x70000001, 'PT_ARM_EXIDX' : 0x70000001, 'PT_IA_64_ARCHEXT' : (0x70000000 + 0), 'PT_IA_64_UNWIND' : (0x70000000 + 1), 'PT_IA_64_HP_OPT_ANOT' : (0x60000000 + 0x12), 'PT_IA_64_HP_HSL_ANOT' : (0x60000000 + 0x13), 'PT_IA_64_HP_STACK' : (0x60000000 + 0x14), } phdr_type = mirrorDict(phdr_type) phdr_flags = { 'PF_X' : (1 << 0), 'PF_W' : (1 << 1), 'PF_R' : (1 << 2), 'PF_PAGEEXEC' : (1 << 4), 'PF_NOPAGEEXEC' : (1 << 5), 'PF_SEGMEXEC' : (1 << 6), 'PF_NOSEGMEXEC' : (1 << 7), 'PF_MPROTECT' : (1 << 8), 'PF_NOMPROTECT' : (1 << 9), 'PF_RANDEXEC' : (1 << 10), 'PF_NORANDEXEC' : (1 << 11), 'PF_EMUTRAMP' : (1 << 12), 'PF_NOEMUTRAMP' : (1 << 13), 'PF_RANDMMAP' : (1 << 14), 'PF_NORANDMMAP' : (1 << 15), 'PF_MASKOS' : 0x0ff00000, 'PF_MASKPROC' : 0xf0000000, 'PF_MIPS_LOCAL' : 0x10000000, 'PF_PARISC_SBP' : 0x08000000,<|fim▁hole|> 'PF_HP_FAR_SHARED' : 0x00200000, 'PF_HP_NEAR_SHARED' : 0x00400000, 'PF_HP_CODE' : 0x01000000, 'PF_HP_MODIFY' : 0x02000000, 'PF_HP_LAZYSWAP' : 0x04000000, 'PF_HP_SBP' : 0x08000000, 'PF_ARM_SB' : 0x10000000, 'PF_IA_64_NORECOV' : 0x80000000, } phdr_flags = mirrorDict(phdr_flags) class ProgramHeader( Header ): descriptions_32 = [ 'p_type', 'p_offset', 'p_vaddr', 'p_paddr', 'p_filesz', 'p_memsz', 'p_flags', 'p_align' ] descriptions_64 = [ 'p_type', 'p_flags', 'p_offset', 'p_vaddr', 'p_paddr', 'p_filesz', 'p_memsz', 'p_align' ] hr_values = { 'p_type' : [ VALUE_FIXED, phdr_type ], 'p_flags' : [ VALUE_BITWISE, phdr_flags ], } format_32 = [ 'i', 'I', 'I', 'I', 'i', 'i', 'i', 'I' ] format_64 = [ 'i', 'i', 'Q', 'Q', 'Q', 'q', 'q', 'Q' ] def affect(self, program): try: self.p_vaddr -= self.p_offset - program.offset_start self.p_paddr = self.p_vaddr self.p_offset = program.offset_start self.p_filesz = program.size except Exception: pass class Program( Page ): def __init__(self, phdr): Page.__init__(self, phdr, phdr.p_offset, phdr.p_filesz) self.protected = True ####### # EOF # #######<|fim▁end|>
'PF_HP_PAGE_SIZE' : 0x00100000,
<|file_name|>libgeneric.ts<|end_file_name|><|fim▁begin|>namespace pxsim { // A ref-counted collection of either primitive or ref-counted objects (String, Image, // user-defined record, another collection) export class RefCollection extends RefObject { private data: any[] = []; constructor() { super(); } scan(mark: (path: string, v: any) => void) { for (let i = 0; i < this.data.length; ++i) mark("[" + i + "]", this.data[i]) } gcKey() { return "[...]" } gcSize() { return this.data.length + 2 } toArray(): any[] { return this.data.slice(0); } toAny(): any[] { return this.data.map(v => RefObject.toAny(v)); } toDebugString(): string { let s = "["; for (let i = 0; i < this.data.length; ++i) { if (i > 0) s += ","; let newElem = RefObject.toDebugString(this.data[i]); if (s.length + newElem.length > 100) { if (i == 0) { s += newElem.substr(0, 100); } s += "..." break; } else { s += newElem; } } s += "]" return s; } destroy() { let data = this.data for (let i = 0; i < data.length; ++i) { data[i] = 0; } this.data = []; } isValidIndex(x: number) { return (x >= 0 && x < this.data.length); } push(x: any) { this.data.push(x); } pop() { return this.data.pop();; } getLength() { return this.data.length; } setLength(x: number) { this.data.length = x; } getAt(x: number) { return this.data[x]; } setAt(x: number, y: any) { this.data[x] = y; } insertAt(x: number, y: number) { this.data.splice(x, 0, y); } removeAt(x: number) { let ret = this.data.splice(x, 1) return ret[0]; // return the deleted element. } indexOf(x: number, start: number) { return this.data.indexOf(x, start); } print() { //console.log(`RefCollection id:${this.id} refs:${this.refcnt} len:${this.data.length} d0:${this.data[0]}`)<|fim▁hole|> export function mk() { return new RefCollection(); } export function isArray(c: any) { return c instanceof RefCollection } export function length(c: RefCollection) { pxtrt.nullCheck(c) return c.getLength(); } export function setLength(c: RefCollection, x: number) { pxtrt.nullCheck(c) c.setLength(x); } export function push(c: RefCollection, x: any) { pxtrt.nullCheck(c) c.push(x); } export function pop(c: RefCollection, x: any) { pxtrt.nullCheck(c) let ret = c.pop(); // no decr() since we're returning it return ret; } export function getAt(c: RefCollection, x: number) { pxtrt.nullCheck(c) let tmp = c.getAt(x); return tmp; } export function removeAt(c: RefCollection, x: number) { pxtrt.nullCheck(c) if (!c.isValidIndex(x)) return; // no decr() since we're returning it return c.removeAt(x); } export function insertAt(c: RefCollection, x: number, y: number) { pxtrt.nullCheck(c) c.insertAt(x, y); } export function setAt(c: RefCollection, x: number, y: any) { pxtrt.nullCheck(c) c.setAt(x, y); } export function indexOf(c: RefCollection, x: any, start: number) { pxtrt.nullCheck(c) return c.indexOf(x, start) } export function removeElement(c: RefCollection, x: any) { pxtrt.nullCheck(c) let idx = indexOf(c, x, 0); if (idx >= 0) { removeAt(c, idx); return 1; } return 0; } } export namespace Math_ { // for explanations see: // http://stackoverflow.com/questions/3428136/javascript-integer-math-incorrect-results (second answer) // (but the code below doesn't come from there; I wrote it myself) export const imul = Math.imul || function (a: number, b: number) { const ah = (a >>> 16) & 0xffff; const al = a & 0xffff; const bh = (b >>> 16) & 0xffff; const bl = b & 0xffff; // the shift by 0 fixes the sign on the high part // the final |0 converts the unsigned value into a signed value return ((al * bl) + (((ah * bl + al * bh) << 16) >>> 0) | 0); }; export function idiv(x: number, y: number) { return ((x | 0) / (y | 0)) | 0 } export function round(n: number) { return Math.round(n) } export function roundWithPrecision(x: number, digits: number): number { digits = digits | 0; // invalid digits input if (digits <= 0) return Math.round(x); if (x == 0) return 0; let r = 0; while (r == 0 && digits < 21) { const d = Math.pow(10, digits++); r = Math.round(x * d + Number.EPSILON) / d; } return r; } export function ceil(n: number) { return Math.ceil(n) } export function floor(n: number) { return Math.floor(n) } export function sqrt(n: number) { return Math.sqrt(n) } export function pow(x: number, y: number) { return Math.pow(x, y) } export function clz32(n: number) { return Math.clz32(n) } export function log(n: number) { return Math.log(n) } export function log10(n: number) { return Math.log10(n) } export function log2(n: number) { return Math.log2(n) } export function exp(n: number) { return Math.exp(n) } export function sin(n: number) { return Math.sin(n) } export function sinh(n: number) { return Math.sinh(n) } export function cos(n: number) { return Math.cos(n) } export function cosh(n: number) { return Math.cosh(n) } export function tan(n: number) { return Math.tan(n) } export function tanh(n: number) { return Math.tanh(n) } export function asin(n: number) { return Math.asin(n) } export function asinh(n: number) { return Math.asinh(n) } export function acos(n: number) { return Math.acos(n) } export function acosh(n: number) { return Math.acosh(n) } export function atan(n: number) { return Math.atan(n) } export function atanh(x: number) { return Math.atanh(x) } export function atan2(y: number, x: number) { return Math.atan2(y, x) } export function trunc(x: number) { return x > 0 ? Math.floor(x) : Math.ceil(x); } export function random(): number { return Math.random(); } export function randomRange(min: number, max: number): number { if (min == max) return min; if (min > max) { let t = min; min = max; max = t; } if (Math.floor(min) == min && Math.floor(max) == max) return min + Math.floor(Math.random() * (max - min + 1)); else return min + Math.random() * (max - min); } } export namespace Number_ { export function lt(x: number, y: number) { return x < y; } export function le(x: number, y: number) { return x <= y; } export function neq(x: number, y: number) { return !eq(x, y); } export function eq(x: number, y: number) { return pxtrt.nullFix(x) == pxtrt.nullFix(y); } export function eqDecr(x: number, y: number) { if (pxtrt.nullFix(x) == pxtrt.nullFix(y)) { return true; } else { return false } } export function gt(x: number, y: number) { return x > y; } export function ge(x: number, y: number) { return x >= y; } export function div(x: number, y: number) { return Math.floor(x / y) | 0; } export function mod(x: number, y: number) { return x % y; } export function bnot(x: number) { return ~x; } export function toString(x: number) { return (x + ""); } } export namespace thumb { export function adds(x: number, y: number) { return (x + y) | 0; } export function subs(x: number, y: number) { return (x - y) | 0; } export function divs(x: number, y: number) { return Math.floor(x / y) | 0; } export function muls(x: number, y: number) { return Math_.imul(x, y); } export function ands(x: number, y: number) { return x & y; } export function orrs(x: number, y: number) { return x | y; } export function eors(x: number, y: number) { return x ^ y; } export function lsls(x: number, y: number) { return x << y; } export function lsrs(x: number, y: number) { return x >>> y; } export function asrs(x: number, y: number) { return x >> y; } export function bnot(x: number) { return ~x; } export function ignore(v: any) { return v; } } export namespace avr { function toInt(v: number) { return (v << 16) >> 16 } export function adds(x: number, y: number) { return toInt(x + y); } export function subs(x: number, y: number) { return toInt(x - y); } export function divs(x: number, y: number) { return toInt(Math.floor(x / y)); } export function muls(x: number, y: number) { return toInt(Math_.imul(x, y)); } export function ands(x: number, y: number) { return toInt(x & y); } export function orrs(x: number, y: number) { return toInt(x | y); } export function eors(x: number, y: number) { return toInt(x ^ y); } export function lsls(x: number, y: number) { return toInt(x << y); } export function lsrs(x: number, y: number) { return (x & 0xffff) >>> y; } export function asrs(x: number, y: number) { return toInt(x >> y); } export function bnot(x: number) { return ~x; } export function ignore(v: any) { return v; } } export namespace String_ { export function stringConv(v: any) { const cb = getResume(); if (v instanceof RefRecord) { if (v.vtable.toStringMethod) { runtime.runFiberAsync(v.vtable.toStringMethod as any, v) .then(() => { cb(runtime.currFrame.retval + "") }) return } } cb(v + "") } export function mkEmpty() { return "" } export function fromCharCode(code: number) { return (String.fromCharCode(code)); } export function toNumber(s: string) { return parseFloat(s); } // TODO check edge-conditions export function concat(a: string, b: string) { return (a + b); } export function substring(s: string, i: number, j: number) { pxtrt.nullCheck(s) return (s.slice(i, i + j)); } export function equals(s1: string, s2: string) { return s1 == s2; } export function compare(s1: string, s2: string) { if (s1 == s2) return 0; if (s1 < s2) return -1; return 1; } export function compareDecr(s1: string, s2: string) { if (s1 == s2) { return 0; } if (s1 < s2) return -1; return 1; } export function length(s: string) { return s.length } export function substr(s: string, start: number, length?: number) { return (s.substr(start, length)); } function inRange(s: string, i: number) { pxtrt.nullCheck(s) return 0 <= i && i < s.length } export function charAt(s: string, i: number) { return (s.charAt(i)); } export function charCodeAt(s: string, i: number) { pxtrt.nullCheck(s) return inRange(s, i) ? s.charCodeAt(i) : 0; } export function indexOf(s: string, searchValue: string, start?: number) { pxtrt.nullCheck(s); if (searchValue == null) return -1; return s.indexOf(searchValue, start); } export function lastIndexOf(s: string, searchValue: string, start?: number) { pxtrt.nullCheck(s); if (searchValue == null) return -1; return s.lastIndexOf(searchValue, start); } export function includes(s: string, searchValue: string, start?: number) { pxtrt.nullCheck(s); if (searchValue == null) return false; return s.includes(searchValue, start); } } export namespace Boolean_ { export function toString(v: boolean) { return v ? "true" : "false" } export function bang(v: boolean) { return !v; } } export class RefBuffer extends RefObject { isStatic = false constructor(public data: Uint8Array) { super(); } scan(mark: (path: string, v: any) => void) { // nothing to do } gcKey() { return "Buffer" } gcSize() { return 2 + (this.data.length + 3 >> 2) } gcIsStatic() { return this.isStatic } print() { // console.log(`RefBuffer id:${this.id} refs:${this.refcnt} len:${this.data.length} d0:${this.data[0]}`) } toDebugString(): string { return BufferMethods.toHex(this); } } export namespace BufferMethods { // keep in sync with C++! export enum NumberFormat { Int8LE = 1, UInt8LE, Int16LE, UInt16LE, Int32LE, Int8BE, UInt8BE, Int16BE, UInt16BE, Int32BE, UInt32LE, UInt32BE, Float32LE, Float64LE, Float32BE, Float64BE, }; function fmtInfoCore(fmt: NumberFormat) { switch (fmt) { case NumberFormat.Int8LE: return -1; case NumberFormat.UInt8LE: return 1; case NumberFormat.Int16LE: return -2; case NumberFormat.UInt16LE: return 2; case NumberFormat.Int32LE: return -4; case NumberFormat.UInt32LE: return 4; case NumberFormat.Int8BE: return -10; case NumberFormat.UInt8BE: return 10; case NumberFormat.Int16BE: return -20; case NumberFormat.UInt16BE: return 20; case NumberFormat.Int32BE: return -40; case NumberFormat.UInt32BE: return 40; case NumberFormat.Float32LE: return 4; case NumberFormat.Float32BE: return 40; case NumberFormat.Float64LE: return 8; case NumberFormat.Float64BE: return 80; default: throw U.userError("bad format"); } } function fmtInfo(fmt: NumberFormat) { let size = fmtInfoCore(fmt) let signed = false if (size < 0) { signed = true size = -size } let swap = false if (size >= 10) { swap = true size /= 10 } let isFloat = fmt >= NumberFormat.Float32LE return { size, signed, swap, isFloat } } export function getNumber(buf: RefBuffer, fmt: NumberFormat, offset: number) { let inf = fmtInfo(fmt) if (inf.isFloat) { let subarray = buf.data.buffer.slice(offset, offset + inf.size) if (inf.swap) { let u8 = new Uint8Array(subarray) u8.reverse() } if (inf.size == 4) return new Float32Array(subarray)[0] else return new Float64Array(subarray)[0] } let r = 0 for (let i = 0; i < inf.size; ++i) { r <<= 8 let off = inf.swap ? offset + i : offset + inf.size - i - 1 r |= buf.data[off] } if (inf.signed) { let missingBits = 32 - (inf.size * 8) r = (r << missingBits) >> missingBits; } else { r = r >>> 0; } return r } export function setNumber(buf: RefBuffer, fmt: NumberFormat, offset: number, r: number) { let inf = fmtInfo(fmt) if (inf.isFloat) { let arr = new Uint8Array(inf.size) if (inf.size == 4) new Float32Array(arr.buffer)[0] = r else new Float64Array(arr.buffer)[0] = r if (inf.swap) arr.reverse() for (let i = 0; i < inf.size; ++i) { buf.data[offset + i] = arr[i] } return } for (let i = 0; i < inf.size; ++i) { let off = !inf.swap ? offset + i : offset + inf.size - i - 1 buf.data[off] = (r & 0xff) r >>= 8 } } export function createBuffer(size: number) { return new RefBuffer(new Uint8Array(size)); } export function createBufferFromHex(hex: string) { let r = createBuffer(hex.length >> 1) for (let i = 0; i < hex.length; i += 2) r.data[i >> 1] = parseInt(hex.slice(i, i + 2), 16) r.isStatic = true return r } export function isReadOnly(buf: RefBuffer) { return buf.isStatic } export function getBytes(buf: RefBuffer) { // not sure if this is any useful... return buf.data; } function inRange(buf: RefBuffer, off: number) { pxtrt.nullCheck(buf) return 0 <= off && off < buf.data.length } export function getUint8(buf: RefBuffer, off: number) { return getByte(buf, off); } export function getByte(buf: RefBuffer, off: number) { if (inRange(buf, off)) return buf.data[off] else return 0; } export function setUint8(buf: RefBuffer, off: number, v: number) { setByte(buf, off, v); } function checkWrite(buf: RefBuffer) { if (buf.isStatic) U.userError("Writing to read only buffer.") } export function setByte(buf: RefBuffer, off: number, v: number) { if (inRange(buf, off)) { checkWrite(buf) buf.data[off] = v } } export function length(buf: RefBuffer) { return buf.data.length } export function fill(buf: RefBuffer, value: number, offset: number = 0, length: number = -1) { if (offset < 0 || offset > buf.data.length) return; if (length < 0) length = buf.data.length; length = Math.min(length, buf.data.length - offset); checkWrite(buf) buf.data.fill(value, offset, offset + length) } export function slice(buf: RefBuffer, offset: number, length: number) { offset = Math.min(buf.data.length, offset); if (length < 0) length = buf.data.length; length = Math.min(length, buf.data.length - offset); return new RefBuffer(buf.data.slice(offset, offset + length)); } export function toHex(buf: RefBuffer): string { const hex = "0123456789abcdef"; let res = ""; for (let i = 0; i < buf.data.length; ++i) { res += hex[buf.data[i] >> 4]; res += hex[buf.data[i] & 0xf]; } return res; } export function toString(buf: RefBuffer): string { return U.fromUTF8(U.uint8ArrayToString(buf.data)) } function memmove(dst: Uint8Array, dstOff: number, src: Uint8Array, srcOff: number, len: number) { if (src.buffer === dst.buffer) { memmove(dst, dstOff, src.slice(srcOff, srcOff + len), 0, len); } else { for (let i = 0; i < len; ++i) dst[dstOff + i] = src[srcOff + i]; } } const INT_MIN = -0x80000000; export function shift(buf: RefBuffer, offset: number, start: number, len: number) { if (len < 0) len = buf.data.length - start; if (start < 0 || start + len > buf.data.length || start + len < start || len == 0 || offset == 0 || offset == INT_MIN) return; if (len == 0 || offset == 0 || offset == INT_MIN) return; if (offset <= -len || offset >= len) { fill(buf, 0); return; } checkWrite(buf) if (offset < 0) { offset = -offset; memmove(buf.data, start + offset, buf.data, start, len - offset); buf.data.fill(0, start, start + offset) } else { len = len - offset; memmove(buf.data, start, buf.data, start + offset, len); buf.data.fill(0, start + len, start + len + offset) } } export function rotate(buf: RefBuffer, offset: number, start: number, len: number) { if (len < 0) len = buf.data.length - start; if (start < 0 || start + len > buf.data.length || start + len < start || len == 0 || offset == 0 || offset == INT_MIN) return; checkWrite(buf) if (offset < 0) offset += len << 8; // try to make it positive offset %= len; if (offset < 0) offset += len; let data = buf.data let n_first = offset let first = 0 let next = n_first let last = len while (first != next) { let tmp = data[first + start] data[first++ + start] = data[next + start] data[next++ + start] = tmp if (next == last) { next = n_first; } else if (first == n_first) { n_first = next; } } } export function write(buf: RefBuffer, dstOffset: number, src: RefBuffer, srcOffset = 0, length = -1) { if (length < 0) length = src.data.length; if (srcOffset < 0 || dstOffset < 0 || dstOffset > buf.data.length) return; length = Math.min(src.data.length - srcOffset, buf.data.length - dstOffset); if (length < 0) return; checkWrite(buf) memmove(buf.data, dstOffset, src.data, srcOffset, length) } } } namespace pxsim.control { export function createBufferFromUTF8(str: string) { return new pxsim.RefBuffer(U.stringToUint8Array(U.toUTF8(str))); } }<|fim▁end|>
} } export namespace Array_ {
<|file_name|>vpmulhw.rs<|end_file_name|><|fim▁begin|>use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode}; use ::RegType::*; use ::instruction_def::*; use ::Operand::*; use ::Reg::*; use ::RegScale::*; fn vpmulhw_1() { run_test(&Instruction { mnemonic: Mnemonic::VPMULHW, operand1: Some(Direct(XMM7)), operand2: Some(Direct(XMM1)), operand3: Some(Direct(XMM6)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 241, 229, 254], OperandSize::Dword) } fn vpmulhw_2() { run_test(&Instruction { mnemonic: Mnemonic::VPMULHW, operand1: Some(Direct(XMM1)), operand2: Some(Direct(XMM2)), operand3: Some(IndirectScaledIndexedDisplaced(EBX, EDX, Two, 1362152290, Some(OperandSize::Xmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 233, 229, 140, 83, 98, 203, 48, 81], OperandSize::Dword) } fn vpmulhw_3() { run_test(&Instruction { mnemonic: Mnemonic::VPMULHW, operand1: Some(Direct(XMM6)), operand2: Some(Direct(XMM2)), operand3: Some(Direct(XMM2)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 233, 229, 242], OperandSize::Qword) } fn vpmulhw_4() { run_test(&Instruction { mnemonic: Mnemonic::VPMULHW, operand1: Some(Direct(XMM2)), operand2: Some(Direct(XMM5)), operand3: Some(IndirectScaledIndexed(RCX, RAX, Two, Some(OperandSize::Xmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 209, 229, 20, 65], OperandSize::Qword) } fn vpmulhw_5() { run_test(&Instruction { mnemonic: Mnemonic::VPMULHW, operand1: Some(Direct(YMM1)), operand2: Some(Direct(YMM6)), operand3: Some(Direct(YMM4)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 205, 229, 204], OperandSize::Dword) } fn vpmulhw_6() { run_test(&Instruction { mnemonic: Mnemonic::VPMULHW, operand1: Some(Direct(YMM3)), operand2: Some(Direct(YMM6)), operand3: Some(Indirect(ESI, Some(OperandSize::Ymmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 205, 229, 30], OperandSize::Dword) } fn vpmulhw_7() { run_test(&Instruction { mnemonic: Mnemonic::VPMULHW, operand1: Some(Direct(YMM0)), operand2: Some(Direct(YMM6)), operand3: Some(Direct(YMM1)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 205, 229, 193], OperandSize::Qword) } fn vpmulhw_8() { run_test(&Instruction { mnemonic: Mnemonic::VPMULHW, operand1: Some(Direct(YMM1)), operand2: Some(Direct(YMM7)), operand3: Some(Indirect(RSI, Some(OperandSize::Ymmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 197, 229, 14], OperandSize::Qword) } fn vpmulhw_9() { run_test(&Instruction { mnemonic: Mnemonic::VPMULHW, operand1: Some(Direct(XMM3)), operand2: Some(Direct(XMM2)), operand3: Some(Direct(XMM2)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None }, &[98, 241, 109, 138, 229, 218], OperandSize::Dword) } fn vpmulhw_10() { run_test(&Instruction { mnemonic: Mnemonic::VPMULHW, operand1: Some(Direct(XMM0)), operand2: Some(Direct(XMM3)), operand3: Some(IndirectScaledIndexed(ESI, EBX, Eight, Some(OperandSize::Xmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 241, 101, 143, 229, 4, 222], OperandSize::Dword) } fn vpmulhw_11() { run_test(&Instruction { mnemonic: Mnemonic::VPMULHW, operand1: Some(Direct(XMM20)), operand2: Some(Direct(XMM7)), operand3: Some(Direct(XMM5)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K5), broadcast: None }, &[98, 225, 69, 141, 229, 229], OperandSize::Qword) } fn vpmulhw_12() { run_test(&Instruction { mnemonic: Mnemonic::VPMULHW, operand1: Some(Direct(XMM4)), operand2: Some(Direct(XMM14)), operand3: Some(IndirectScaledDisplaced(RCX, Eight, 532933922, Some(OperandSize::Xmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K5), broadcast: None }, &[98, 241, 13, 141, 229, 36, 205, 34, 237, 195, 31], OperandSize::Qword) } fn vpmulhw_13() { run_test(&Instruction { mnemonic: Mnemonic::VPMULHW, operand1: Some(Direct(YMM0)), operand2: Some(Direct(YMM3)), operand3: Some(Direct(YMM0)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K5), broadcast: None }, &[98, 241, 101, 173, 229, 192], OperandSize::Dword) } fn vpmulhw_14() { run_test(&Instruction { mnemonic: Mnemonic::VPMULHW, operand1: Some(Direct(YMM4)), operand2: Some(Direct(YMM5)), operand3: Some(Indirect(EAX, Some(OperandSize::Ymmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K5), broadcast: None }, &[98, 241, 85, 173, 229, 32], OperandSize::Dword) } fn vpmulhw_15() { run_test(&Instruction { mnemonic: Mnemonic::VPMULHW, operand1: Some(Direct(YMM16)), operand2: Some(Direct(YMM10)), operand3: Some(Direct(YMM9)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None }, &[98, 193, 45, 174, 229, 193], OperandSize::Qword) } fn vpmulhw_16() { run_test(&Instruction { mnemonic: Mnemonic::VPMULHW, operand1: Some(Direct(YMM5)), operand2: Some(Direct(YMM7)), operand3: Some(Indirect(RBX, Some(OperandSize::Ymmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 241, 69, 175, 229, 43], OperandSize::Qword) } fn vpmulhw_17() { run_test(&Instruction { mnemonic: Mnemonic::VPMULHW, operand1: Some(Direct(ZMM1)), operand2: Some(Direct(ZMM2)), operand3: Some(Direct(ZMM0)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K5), broadcast: None }, &[98, 241, 109, 205, 229, 200], OperandSize::Dword) } <|fim▁hole|>} fn vpmulhw_19() { run_test(&Instruction { mnemonic: Mnemonic::VPMULHW, operand1: Some(Direct(ZMM30)), operand2: Some(Direct(ZMM16)), operand3: Some(Direct(ZMM19)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None }, &[98, 33, 125, 195, 229, 243], OperandSize::Qword) } fn vpmulhw_20() { run_test(&Instruction { mnemonic: Mnemonic::VPMULHW, operand1: Some(Direct(ZMM6)), operand2: Some(Direct(ZMM31)), operand3: Some(IndirectScaledIndexedDisplaced(RDX, RDX, Two, 1709561421, Some(OperandSize::Zmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K5), broadcast: None }, &[98, 241, 5, 197, 229, 180, 82, 77, 214, 229, 101], OperandSize::Qword) }<|fim▁end|>
fn vpmulhw_18() { run_test(&Instruction { mnemonic: Mnemonic::VPMULHW, operand1: Some(Direct(ZMM5)), operand2: Some(Direct(ZMM2)), operand3: Some(IndirectScaledIndexedDisplaced(ESI, EDX, Two, 64509818, Some(OperandSize::Zmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 241, 109, 207, 229, 172, 86, 122, 87, 216, 3], OperandSize::Dword)
<|file_name|>common-dialog.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit } from '@angular/core'; import { MdDialogRef } from '@angular/material'; <|fim▁hole|>}) export class CommonDialogComponent implements OnInit { constructor( public dialogRef: MdDialogRef<CommonDialogComponent> ) { } title = { icon_text: "warning", icon_color: "yellow", text: "警告" } dialog_content = ""; dialog_subtitle = ""; ngOnInit() { } }<|fim▁end|>
@Component({ selector: 'app-common-dialog', templateUrl: './common-dialog.component.html', styleUrls: ['./common-dialog.component.css']
<|file_name|>modify_ssl_vpn_server.go<|end_file_name|><|fim▁begin|>package vpc //Licensed under the Apache License, Version 2.0 (the "License"); //you may not use this file except in compliance with the License. //You may obtain a copy of the License at // //http://www.apache.org/licenses/LICENSE-2.0 // //Unless required by applicable law or agreed to in writing, software //distributed under the License is distributed on an "AS IS" BASIS, //WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //See the License for the specific language governing permissions and //limitations under the License. // // Code generated by Alibaba Cloud SDK Code Generator. // Changes may cause incorrect behavior and will be lost if the code is regenerated. import (<|fim▁hole|> "github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses" ) // ModifySslVpnServer invokes the vpc.ModifySslVpnServer API synchronously func (client *Client) ModifySslVpnServer(request *ModifySslVpnServerRequest) (response *ModifySslVpnServerResponse, err error) { response = CreateModifySslVpnServerResponse() err = client.DoAction(request, response) return } // ModifySslVpnServerWithChan invokes the vpc.ModifySslVpnServer API asynchronously func (client *Client) ModifySslVpnServerWithChan(request *ModifySslVpnServerRequest) (<-chan *ModifySslVpnServerResponse, <-chan error) { responseChan := make(chan *ModifySslVpnServerResponse, 1) errChan := make(chan error, 1) err := client.AddAsyncTask(func() { defer close(responseChan) defer close(errChan) response, err := client.ModifySslVpnServer(request) if err != nil { errChan <- err } else { responseChan <- response } }) if err != nil { errChan <- err close(responseChan) close(errChan) } return responseChan, errChan } // ModifySslVpnServerWithCallback invokes the vpc.ModifySslVpnServer API asynchronously func (client *Client) ModifySslVpnServerWithCallback(request *ModifySslVpnServerRequest, callback func(response *ModifySslVpnServerResponse, err error)) <-chan int { result := make(chan int, 1) err := client.AddAsyncTask(func() { var response *ModifySslVpnServerResponse var err error defer close(result) response, err = client.ModifySslVpnServer(request) callback(response, err) result <- 1 }) if err != nil { defer close(result) callback(nil, err) result <- 0 } return result } // ModifySslVpnServerRequest is the request struct for api ModifySslVpnServer type ModifySslVpnServerRequest struct { *requests.RpcRequest ResourceOwnerId requests.Integer `position:"Query" name:"ResourceOwnerId"` ClientToken string `position:"Query" name:"ClientToken"` SslVpnServerId string `position:"Query" name:"SslVpnServerId"` LocalSubnet string `position:"Query" name:"LocalSubnet"` IDaaSRegionId string `position:"Query" name:"IDaaSRegionId"` EnableMultiFactorAuth requests.Boolean `position:"Query" name:"EnableMultiFactorAuth"` IDaaSInstanceId string `position:"Query" name:"IDaaSInstanceId"` Cipher string `position:"Query" name:"Cipher"` ClientIpPool string `position:"Query" name:"ClientIpPool"` ResourceOwnerAccount string `position:"Query" name:"ResourceOwnerAccount"` Compress requests.Boolean `position:"Query" name:"Compress"` OwnerAccount string `position:"Query" name:"OwnerAccount"` OwnerId requests.Integer `position:"Query" name:"OwnerId"` Port requests.Integer `position:"Query" name:"Port"` Proto string `position:"Query" name:"Proto"` Name string `position:"Query" name:"Name"` } // ModifySslVpnServerResponse is the response struct for api ModifySslVpnServer type ModifySslVpnServerResponse struct { *responses.BaseResponse RequestId string `json:"RequestId" xml:"RequestId"` RegionId string `json:"RegionId" xml:"RegionId"` SslVpnServerId string `json:"SslVpnServerId" xml:"SslVpnServerId"` VpnGatewayId string `json:"VpnGatewayId" xml:"VpnGatewayId"` Name string `json:"Name" xml:"Name"` LocalSubnet string `json:"LocalSubnet" xml:"LocalSubnet"` ClientIpPool string `json:"ClientIpPool" xml:"ClientIpPool"` CreateTime int64 `json:"CreateTime" xml:"CreateTime"` Cipher string `json:"Cipher" xml:"Cipher"` Proto string `json:"Proto" xml:"Proto"` Port int `json:"Port" xml:"Port"` Compress bool `json:"Compress" xml:"Compress"` Connections int `json:"Connections" xml:"Connections"` MaxConnections int `json:"MaxConnections" xml:"MaxConnections"` InternetIp string `json:"InternetIp" xml:"InternetIp"` EnableMultiFactorAuth bool `json:"EnableMultiFactorAuth" xml:"EnableMultiFactorAuth"` IDaaSInstanceId string `json:"IDaaSInstanceId" xml:"IDaaSInstanceId"` } // CreateModifySslVpnServerRequest creates a request to invoke ModifySslVpnServer API func CreateModifySslVpnServerRequest() (request *ModifySslVpnServerRequest) { request = &ModifySslVpnServerRequest{ RpcRequest: &requests.RpcRequest{}, } request.InitWithApiInfo("Vpc", "2016-04-28", "ModifySslVpnServer", "vpc", "openAPI") request.Method = requests.POST return } // CreateModifySslVpnServerResponse creates a response to parse from ModifySslVpnServer response func CreateModifySslVpnServerResponse() (response *ModifySslVpnServerResponse) { response = &ModifySslVpnServerResponse{ BaseResponse: &responses.BaseResponse{}, } return }<|fim▁end|>
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests"
<|file_name|>data_source_sakuracloud_bucket_object_test.go<|end_file_name|><|fim▁begin|>// Copyright 2016-2019 terraform-provider-sakuracloud authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package sakuracloud import ( "fmt" "os" "testing" "github.com/hashicorp/terraform-plugin-sdk/helper/acctest" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" ) func TestAccSakuraCloudBucketObjectDataSource_Basic(t *testing.T) { randString1 := acctest.RandStringFromCharSet(10, acctest.CharSetAlpha) randString2 := acctest.RandStringFromCharSet(10, acctest.CharSetAlpha) randString3 := acctest.RandStringFromCharSet(10, acctest.CharSetAlpha) key := fmt.Sprintf("%s/%s/%s.txt", randString1, randString2, randString3) bucket := os.Getenv("SACLOUD_OJS_ACCESS_KEY_ID") resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, Providers: testAccProviders, PreventPostDestroyRefresh: true, CheckDestroy: testAccCheckSakuraCloudNoteDataSourceDestroy, Steps: []resource.TestStep{ { Config: testAccCheckSakuraCloudDataSourceBucketObject(bucket, key), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr( "data.sakuracloud_bucket_object.foobar", "key", key), resource.TestCheckResourceAttr( "data.sakuracloud_bucket_object.foobar", "size", "7"), resource.TestCheckResourceAttr( "data.sakuracloud_bucket_object.foobar", "content_type", "text/plain"), resource.TestCheckResourceAttr( "data.sakuracloud_bucket_object.foobar", "body", "content"), resource.TestCheckResourceAttr( "data.sakuracloud_bucket_object.foobar", "etag", "9a0364b9e99bb480dd25e1f0284c8555"), resource.TestCheckResourceAttr( "data.sakuracloud_bucket_object.foobar", "http_url", fmt.Sprintf("http://%s.b.sakurastorage.jp/%s", bucket, key)), resource.TestCheckResourceAttr( "data.sakuracloud_bucket_object.foobar", "https_url", fmt.Sprintf("https://%s.b.sakurastorage.jp/%s", bucket, key)), resource.TestCheckResourceAttr( "data.sakuracloud_bucket_object.foobar", "http_path_url", fmt.Sprintf("http://b.sakurastorage.jp/%s/%s", bucket, key)), resource.TestCheckResourceAttr(<|fim▁hole|> "data.sakuracloud_bucket_object.foobar", "https_path_url", fmt.Sprintf("https://b.sakurastorage.jp/%s/%s", bucket, key)), resource.TestCheckResourceAttr( "data.sakuracloud_bucket_object.foobar", "http_cache_url", fmt.Sprintf("http://%s.c.sakurastorage.jp/%s", bucket, key)), resource.TestCheckResourceAttr( "data.sakuracloud_bucket_object.foobar", "https_cache_url", fmt.Sprintf("https://%s.c.sakurastorage.jp/%s", bucket, key)), ), }, }, }) } func testAccCheckSakuraCloudDataSourceBucketObject(bucket, key string) string { return fmt.Sprintf(` resource "sakuracloud_bucket_object" "foobar" { bucket = "%s" key = "%s" content = "content" } data "sakuracloud_bucket_object" "foobar" { bucket = "${sakuracloud_bucket_object.foobar.bucket}" key = "${sakuracloud_bucket_object.foobar.key}" } `, bucket, key) }<|fim▁end|>
<|file_name|>test_tahoelafs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python """ Test the TahoeLAFS @author: Marek Palatinus <[email protected]> """ import sys import logging import unittest from fs.base import FS import fs.errors as errors from fs.tests import FSTestCases, ThreadingTestCases from fs.contrib.tahoelafs import TahoeLAFS, Connection <|fim▁hole|>WEBAPI = 'http://insecure.tahoe-lafs.org' # The public grid is too slow for threading testcases, disabling for now... class TestTahoeLAFS(unittest.TestCase,FSTestCases):#,ThreadingTestCases): # Disabled by default because it takes a *really* long time. __test__ = False def setUp(self): self.dircap = TahoeLAFS.createdircap(WEBAPI) self.fs = TahoeLAFS(self.dircap, cache_timeout=0, webapi=WEBAPI) def tearDown(self): self.fs.close() def test_dircap(self): # Is dircap in correct format? self.assert_(self.dircap.startswith('URI:DIR2:') and len(self.dircap) > 50) def test_concurrent_copydir(self): # makedir() on TahoeLAFS is currently not atomic pass def test_makedir_winner(self): # makedir() on TahoeLAFS is currently not atomic pass def test_big_file(self): pass if __name__ == '__main__': unittest.main()<|fim▁end|>
logging.getLogger().setLevel(logging.DEBUG) logging.getLogger('fs.tahoelafs').addHandler(logging.StreamHandler(sys.stdout))
<|file_name|>stream.py<|end_file_name|><|fim▁begin|>import os import asyncio import logging from functools import partial from collections import deque from lxml import etree from . import stanzas from .stanzas import Iq from .parser import Parser from .utils import signalEvent from .utils import benchmark as timedWait from . import getLogger log = getLogger(__name__) if "VEX_TIMED_WAITS" in os.environ and int(os.environ["VEX_TIMED_WAITS"]): from .metrics import ValueMetric stream_wait_met = ValueMetric("stream:wait_time", type_=float) else: stream_wait_met = None _ENFORCE_TIMEOUTS = bool("VEX_ENFORCE_TIMEOUTS" in os.environ and int(os.environ["VEX_ENFORCE_TIMEOUTS"])) class QueuedStanza: def __init__(self, s): self.task_set = set() self.stanza = s class ParserTask(asyncio.Task): def __init__(self, stream, loop=None): super().__init__(self._run(), loop=loop) self._parser = Parser() self._data_queue = asyncio.Queue() self._stream = stream def parse(self, bytes_): self._data_queue.put_nowait(bytes_) def reset(self): self._parser.reset() async def _run(self): while True: try: data = await self._data_queue.get() elems = self._parser.parse(data) for e in elems: stanza = stanzas.makeStanza(e) if log.getEffectiveLevel() <= logging.VERBOSE: log.verbose("[STANZA IN]:\n%s" % stanza.toXml(pprint=True).decode("utf-8")) await self._stream._handleStanza(stanza) except asyncio.CancelledError: pass except Exception as ex: log.exception(ex) class Stream(asyncio.Protocol): """Base class for XMPP streams.""" def __init__(self, creds, state_callbacks=None, mixins=None, default_timeout=None): self.creds = creds self._transport = None self._waiter_futures = [] self._tls_active = False self._callbacks = state_callbacks self._mixins = mixins or [] for mixin in self._mixins: for name, obj in mixin._exports: if name in self.__dict__: raise ValueError("Mixin '%s' exports ambiguous " "data named '%s'" % (str(mixin), name)) else: # Add the symbol to the stream's namespace self.__dict__[name] = obj self._parser_task = ParserTask(self) self.default_timeout = default_timeout # Stream errors self.error = None self._stanza_queue = deque(maxlen=10) @property def connected(self): if not self._transport: return False else: if (getattr(self._transport, "_closing") and self._transport._closing): # SSL transport return False return True @property def tls_active(self): return self._tls_active @property def jid(self): return self.creds.jid def close(self): if self.connected: self.send(b"</stream:stream>") self._transport.close() self._parser_task.cancel() def send(self, data): """Send ``data`` which can be a vexmpp.stanza.Stanza, lxml.etree.Element, a str, or bytes. The the case of bytes the encoding MUST be utf-8 encoded (per XMPP specification). <|fim▁hole|> invoked. Currently there is not a Mixin callback for strings or bytes. """ def _send(bytes_): if not self._transport: log.warn("Data send with disconnected transport") return self._transport.write(bytes_) log.debug("[BYTES OUT]: %s", bytes_) stanza = None if isinstance(data, stanzas.Stanza): stanza = data raw_data = data.toXml() elif isinstance(data, str): raw_data = data.encode("utf-8") elif isinstance(data, etree._Element): stanza = stanzas.Stanza(xml=data) raw_data = etree.tostring(data, encoding="utf-8") elif isinstance(data, bytes): raw_data = data else: raise ValueError("Unable to send type {}".format(type(data))) if stanza and log.getEffectiveLevel() <= logging.VERBOSE: log.verbose("[STANZA OUT]:\n%s" % stanza.toXml(pprint=True).decode("utf-8")) _send(raw_data) if stanza: for m in self._mixins: hook = partial(m.onSend, self, stanza) asyncio.ensure_future(self._runMixin(hook)) async def sendAndWaitIq(self, child_ns, to=None, child_name="query", type="get", raise_on_error=False, timeout=None, id_prefix=None): iq = Iq(to=to, type=type, request=(child_name, child_ns), id_prefix=id_prefix) resp = await self.sendAndWait(iq, raise_on_error=raise_on_error, timeout=timeout) return resp async def sendAndWait(self, stanza, raise_on_error=False, timeout=None): if not stanza.id: stanza.setId() xpath = "/%s[@id='%s']" % (stanza.name, stanza.id) self.send(stanza) resp = await self.wait([(xpath, None)], timeout=timeout) if resp.error is not None and raise_on_error: raise resp.error else: return resp async def negotiate(self, timeout=None): raise NotImplementedError() async def wait(self, xpaths, timeout=None): """``xpaths`` is a 2-tuple of the form (xpath, nsmap), or a list of the same tuples to wait on a choice of matches. The first matched stanza is returned. Passing a ``timeout`` argument will raise a asyncio.TimeoutError if not matches are found.""" global stream_wait_met if not isinstance(xpaths, list): xpaths = [xpaths] if timeout is None and self.default_timeout: timeout = self.default_timeout log.debug("Stream wait for %s [timeout=%s]" % (xpaths, timeout)) if _ENFORCE_TIMEOUTS and not timeout: raise RuntimeError("Timeout not set error") fut = _StreamWaitFuture(xpaths) # Run thru queue. Note, once a tasklet has seen a stanza it is skipped # by _StreamWaitFuture.matchStanza for queued_stanza in self._stanza_queue: matched = fut.matchStanza(queued_stanza) if matched: return queued_stanza.stanza self._waiter_futures.append(fut) try: with timedWait() as timer_stat: match = await asyncio.wait_for(fut, timeout) if stream_wait_met: stream_wait_met.update(timer_stat["total"]) log.debug("Stream wait - time: {:.3f} " "min/max/avg: {:.6f}/{:.6f}/{:.6f}" .format(stream_wait_met.value, stream_wait_met.min, stream_wait_met.max, stream_wait_met.average)) return match except asyncio.TimeoutError as ex: raise asyncio.TimeoutError( "Timeout ({}s) while waiting for xpaths: {}" .format(timeout, xpaths)) from ex finally: self._waiter_futures.remove(fut) # asyncio.Protocol implementation def connection_made(self, transport, tls=False): log.debug("Connection_made: %s", transport) self._transport = transport self._tls_active = tls signalEvent(self._callbacks, "connected", self, tls) def starttls_made(self, transport): self.connection_made(transport, tls=True) async def _handleStanza(self, stanza): if isinstance(stanza, stanzas.StreamError): signalEvent(self._callbacks, "streamError", self, stanza) self._transport.close() return for m in self._mixins: hook = partial(m.onStanza, self, stanza) asyncio.ensure_future(self._runMixin(hook)) self._stanza_queue.append(QueuedStanza(stanza)) if self._waiter_futures: for queued_stanza in self._stanza_queue: for fut in [f for f in self._waiter_futures if not f.done()]: matched = fut.matchStanza(queued_stanza) if matched: # XXX: How useful is this since _stanza_queue? # Yield the event loop, which is essential for a handle # and wait in quick succession. await asyncio.sleep(0) # asyncio.Protocol implementation def data_received(self, data): log.debug('[BYTES IN]: {!r}'.format(data.decode())) self._parser_task.parse(data) # asyncio.Protocol implementation def connection_lost(self, reason): self._transport = None self._tls_active = False log.debug('The server closed the connection: %s' % str(reason)) signalEvent(self._callbacks, "disconnected", self, reason) @property def default_timeout(self): return self._default_timeout @default_timeout.setter def default_timeout(self, t): if t is not None: t = int(t) self._default_timeout = t async def _runMixin(self, functor): try: await functor() except: log.exception("{} mixin error".format(functor.__class__.__name__)) class Mixin(object): def __init__(self, export_tuples=None): """ ``export_tuples`` is a list of 2-tuples (name, obj) that added to the stream object's __dict__, as in __dict__[name] = obj. By default no values are exported. """ self._exports = export_tuples if export_tuples else [] async def postSession(self, stream): """Called after stream negotiation and session creation.""" pass async def onStanza(self, stream, stanza): """Called for each incoming Stanza. See :func:`vexmpp.utils.xpathFilter` for a decorator that can filter only the stanzas the implementation is interested in. """ pass async def onSend(self, stream, stanza): """Called for each outgoing stanza.""" pass class StreamCallbacks: def connected(self, stream, tls_active): pass def disconnected(self, stream, reason): pass def streamError(self, stream, error): pass class _StreamWaitFuture(asyncio.Future): def __init__(self, xpaths, *args, loop=None): super().__init__(*args, loop=loop) self._xpaths = xpaths self._task = asyncio.Task.current_task() def matchStanza(self, queued_stanza): log.debug(f"MatchStanza: {queued_stanza.stanza.toXml()} xpaths: " "{0} - @{1}".format(self._xpaths, id(self._task))) if self._task in queued_stanza.task_set: # seen this... return False queued_stanza.task_set.add(self._task) stanza = queued_stanza.stanza for xp, nsmap in self._xpaths: log.debug("MatchStanza: Testing xpath {} against stanza {}" .format((xp, nsmap), stanza.toXml())) if stanza.xml.xpath(xp, namespaces=nsmap): log.debug("MatchStanza: matched") self.set_result(stanza) return True log.debug("MatchStanza: NOT matched") return False<|fim▁end|>
In the case of Stanza and Element the Mixin.onSend callback is
<|file_name|>table-notifications.js<|end_file_name|><|fim▁begin|>/** Namespace NotificationsTable */ var NotificationsTable = new function() { var ns = this; // reference to the namespace ns.oTable = null; var asInitVals = []; /** Update the table to list the notifications. */ this.update = function() { ns.oTable.fnClearTable( 0 ); ns.oTable.fnDraw(); }; /** Update the table to list the notifications. */ refresh_notifications = function() { if (ns.oTable) { ns.oTable.fnClearTable( 0 ); ns.oTable.fnDraw(); } }; this.approve = function(changeRequestID) { requestQueue.register(django_url + project.id + '/changerequest/approve', "POST", { "id": changeRequestID }, function (status, text, xml) { if (status == 200) { if (text && text != " ") { var jso = JSON.parse(text); if (jso.error) { alert(jso.error); } else { refresh_notifications(); } } } else if (status == 500) { win = window.open('', '', 'width=1100,height=620'); win.document.write(text); win.focus(); } return true; }); }; this.reject = function(changeRequestID) { requestQueue.register(django_url + project.id + '/changerequest/reject', "POST", { "id": changeRequestID }, function (status, text, xml) { if (status == 200) { if (text && text != " ") { var jso = JSON.parse(text); if (jso.error) { alert(jso.error); } else { refresh_notifications(); } } } else if (status == 500) { win = window.open('', '', 'width=1100,height=620'); win.document.write(text); win.focus(); } return true; }); }; this.perform_action = function(row_id) { var node = document.getElementById('action_select_' + row_id); if (node && node.tagName == "SELECT") { var row = $(node).closest('tr'); if (1 !== row.length) { CATMAID.error("Couldn't find table row for notification"); return; } var row_data = ns.oTable.fnGetData(row[0]); var action = node.options[node.selectedIndex].value; if (action == 'Show') { SkeletonAnnotations.staticMoveTo(row_data[6], row_data[5], row_data[4], function () {SkeletonAnnotations.staticSelectNode(row_data[7], row_data[8]);}); } else if (action == 'Approve') { NotificationsTable.approve(row_data[0]); CATMAID.client.get_messages(); // Refresh the notifications icon badge } else if (action == 'Reject') { NotificationsTable.reject(row_data[0]); CATMAID.client.get_messages(); // Refresh the notifications icon badge } node.selectedIndex = 0; } }; this.init = function (pid) { ns.pid = pid; ns.oTable = $('#notificationstable').dataTable({ // http://www.datatables.net/usage/options "bDestroy": true, "sDom": '<"H"lr>t<"F"ip>', // default: <"H"lfr>t<"F"ip> "bProcessing": true, "bServerSide": true, "bAutoWidth": false, "sAjaxSource": django_url + project.id + '/notifications/list', "fnServerData": function (sSource, aoData, fnCallback) { $.ajax({ "dataType": 'json', "type": "POST", "cache": false, "url": sSource, "data": aoData, "success": fnCallback }); }, "fnRowCallback": function ( nRow, aaData, iDisplayIndex ) { // Color each row based on its status. if (aaData[3] === 'Open') { nRow.style.backgroundColor = '#ffffdd'; } else if (aaData[3] === 'Approved') { nRow.style.backgroundColor = '#ddffdd'; } else if (aaData[3] === 'Rejected') { nRow.style.backgroundColor = '#ffdddd'; } else if (aaData[3] === 'Invalid') { nRow.style.backgroundColor = '#dddddd'; } return nRow; }, "iDisplayLength": 50, "aLengthMenu": [CATMAID.pageLengthOptions, CATMAID.pageLengthLabels], "bJQueryUI": true, "aoColumns": [{ "bSearchable": false, "bSortable": true, "bVisible": false }, // id { "sClass": "center", "bSearchable": true, "bSortable": false, }, // type { "bSearchable": false, "bSortable": false, }, // description { "sClass": "center", "bSearchable": true, "bSortable": true, "sWidth": "120px" }, // status { "bSearchable": false, "bVisible": false }, // x { "bSearchable": false, "bVisible": false }, // y { "bSearchable": false, "bVisible": false }, // z { "bSearchable": false, "bVisible": false }, // node_id<|fim▁hole|> "bSearchable": false, "bVisible": false }, // skeleton_id { "bSearchable": true, "bSortable": true }, // from { "bSearchable": false, "bSortable": true, "sWidth": "100px" }, // date { "sClass": "center", "bSearchable": false, "bSortable": false, "mData": null, "mRender" : function(obj, type, full) { var id = full[0]; var disabled = (full[3] == 'Open' ? '' : ' disabled'); return '<select id="action_select_' + id + '" onchange="NotificationsTable.perform_action(' + id + ')">' + ' <option>Action:</option>' + ' <option>Show</option>' + ' <option' + disabled + '>Approve</option>' + ' <option' + disabled + '>Reject</option>' + '</select>'; }, "sWidth": "100px" } // actions ] }); // filter table $.each(asInitVals, function(index, value) { if(value==="Search") return; if(value) { ns.oTable.fnFilter(value, index); } }); $("#notificationstable thead input").keyup(function () { /* Filter on the column (the index) of this element */ var i = $("thead input").index(this) + 2; asInitVals[i] = this.value; ns.oTable.fnFilter(this.value, i); }); $("#notificationstable thead input").each(function (i) { asInitVals[i+2] = this.value; }); $("#notificationstable thead input").focus(function () { if (this.className === "search_init") { this.className = ""; this.value = ""; } }); $("#notificationstable thead input").blur(function (event) { if (this.value === "") { this.className = "search_init"; this.value = asInitVals[$("thead input").index(this)+2]; } }); $('select#search_type').change( function() { ns.oTable.fnFilter( $(this).val(), 1 ); asInitVals[1] = $(this).val(); }); }; }();<|fim▁end|>
{
<|file_name|>ilm.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python from cogent.app.util import CommandLineApplication,\ CommandLineAppResult, ResultPath from cogent.app.parameters import Parameter,ValuedParameter,Parameters __author__ = "Shandy Wikman" __copyright__ = "Copyright 2007-2012, The Cogent Project" __contributors__ = ["Shandy Wikman"] __license__ = "GPL" __version__ = "1.5.3" __maintainer__ = "Shandy Wikman" __email__ = "[email protected]" __status__ = "Development"<|fim▁hole|> class ILM(CommandLineApplication): """Application controller ILM application Predict a secondary structure given a score matrix Main options: -L l: minimum loop length (default=3) -V v: minimum virtual loop length (default=3) -H h: minimum helix length (default=3) -N n: number of helices selected per iteration (default=1) -I i: number of iterations before termination(default=unlimited) """ _parameters = { '-L':ValuedParameter(Prefix='-',Name='L',Delimiter=' '), '-V':ValuedParameter(Prefix='-',Name='V',Delimiter=' '), '-H':ValuedParameter(Prefix='-',Name='H',Delimiter=' '), '-N':ValuedParameter(Prefix='-',Name='N',Delimiter=' '), '-I':ValuedParameter(Prefix='-',Name='I',Delimiter=' ')} _command = 'ilm' _input_handler = '_input_as_string' class hlxplot(CommandLineApplication): """Application controller hlxplot application Compute a helix plot score matrix from a sequence alignment Options: -b B: Set bad pair penalty to B (Default = 2) -g G: Set good pair score to G (Default = 1) -h H: Set minimum helix length to H (Default = 2) -l L: Set minimum loop length to L (Default = 3) -s S: Set helix length score to S (Default = 2.0) -t : Write output in text format (Default = Binary format) -x X: Set paired gap penalty to X (Default = 3) """ _parameters = { '-b':ValuedParameter(Prefix='-',Name='b',Delimiter=' '), '-g':ValuedParameter(Prefix='-',Name='g',Delimiter=' '), '-h':ValuedParameter(Prefix='-',Name='h',Delimiter=' '), '-l':ValuedParameter(Prefix='-',Name='l',Delimiter=' '), '-s':ValuedParameter(Prefix='-',Name='s',Delimiter=' '), '-t':ValuedParameter(Prefix='-',Name='t',Delimiter=' '), '-x':ValuedParameter(Prefix='-',Name='x',Delimiter=' ')} _command = 'hlxplot' _input_handler = '_input_as_string' class xhlxplot(CommandLineApplication): """Application controller xhlxplot application Compute an extended helix plot score matrix from a single sequence Options: -b B: Set bad pair penalty to B (Default = 200) -h H: Set minimum helix length to H (Default = 2) -l L: Set minimum loop length to L (Default = 3) -x X: Set paired gap penalty to X (Default = 500) -t : Write output in text format (Default = Binary format) -c : No Closing GU (Default = allows closing GU) """ _parameters = { '-b':ValuedParameter(Prefix='-',Name='b',Delimiter=' '), '-h':ValuedParameter(Prefix='-',Name='h',Delimiter=' '), '-l':ValuedParameter(Prefix='-',Name='l',Delimiter=' '), '-x':ValuedParameter(Prefix='-',Name='x',Delimiter=' '), '-t':ValuedParameter(Prefix='-',Name='t',Delimiter=' '), '-c':ValuedParameter(Prefix='-',Name='c',Delimiter=' ')} _command = 'xhlxplot' _input_handler = '_input_as_string'<|fim▁end|>
<|file_name|>menu-bar.component.ts<|end_file_name|><|fim▁begin|>import {Component, OnInit} from '@angular/core'; import {SharedData} from '../shared-data'; @Component({ selector: 'app-menu-bar', templateUrl: './menu-bar.component.html', styleUrls: ['./menu-bar.component.css']<|fim▁hole|>export class MenuBarComponent implements OnInit { constructor(public sharedData: SharedData) { } ngOnInit(): void { } }<|fim▁end|>
})
<|file_name|>post.js<|end_file_name|><|fim▁begin|>import 'whatwg-fetch';<|fim▁hole|> export default function post(url, data) { return fetch(url, { method: "POST", body: JSON.stringify(data), headers: { "Content-Type": "application/json" }, credentials: "same-origin" }).then(function (response) { response.status //=> number 100–599 response.statusText //=> String response.headers //=> Headers response.url //=> String return response.text() }, function (error) { error.message //=> String }); }<|fim▁end|>
<|file_name|>apiController.ts<|end_file_name|><|fim▁begin|>/** * Api Controller is the base for all API Rest controllers, it exposes a common * way of defining routes for each controller */ import * as express from "express"; export abstract class ApiController { public abstract getRoutes(): Route[]; } export interface Route { method: "GET"|"PUT"|"POST"|"DELETE"; handler?: (req: express.Request, res: express.Response) => void; path: string; schema?: object;<|fim▁hole|>}<|fim▁end|>
middleware?: express.Handler[] | express.Handler;
<|file_name|>tuple1.py<|end_file_name|><|fim▁begin|>a = (1, 2) b = (1, 3, 5, 7, 8, 11) print a[0] #b[3] = 3 # error! x1 = a[0] y1 = a[1] x1, y1 = a b1, b2, b3, b4, b5, b6 = b print b4 #b1, b2 = b # error! a = 1, 2, 3 print a <|fim▁hole|> a = f() x, y = f() x = f()[0]<|fim▁end|>
def f(): return 1, 3
<|file_name|>app.ts<|end_file_name|><|fim▁begin|>/// <reference path='./Scripts/DlhSoft.ProjectData.GanttChart.HTML.Controls.d.ts'/> import GanttChartView = DlhSoft.Controls.GanttChartView; import GanttChartItem = GanttChartView.Item; import PredecessorItem = GanttChartView.PredecessorItem; import NetworkDiagramView = DlhSoft.Controls.Pert.NetworkDiagramView; import NetworkDiagramItem = NetworkDiagramView.Item; // Query string syntax: ?theme // Supported themes: Default, Generic-bright, Generic-blue, DlhSoft-gray, Purple-green, Steel-blue, Dark-black, Cyan-green, Blue-navy, Orange-brown, Teal-green, Purple-beige, Gray-blue, Aero. var queryString = window.location.search; var theme = queryString ? queryString.substr(1) : null; declare var initializeGanttChartTemplates; declare var initializeGanttChartTheme; declare var initializePertChartTemplates; declare var initializePertChartTheme; // Retrieve and store the control and container elements for reference purposes. var ganttChartViewContainerElement = <HTMLElement>document.querySelector('#ganttChartViewContainer'); var ganttChartViewElement = <HTMLElement>document.querySelector('#ganttChartView'); var networkDiagramViewContainerElement = <HTMLElement>document.querySelector('#networkDiagramViewContainer'); var networkDiagramViewElement = <HTMLElement>document.querySelector('#networkDiagramView'); // Prepare Gantt Chart data items and settings. var date = new Date(), year = date.getFullYear(), month = date.getMonth(); var ganttChartItems = <GanttChartItem[]>[]; for (var i = 1; i <= 8; i++) ganttChartItems.push({ content: 'Task ' + i, start: new Date(year, month, 2 + i - 1, 8, 0, 0), finish: new Date(year, month, 2 + i - 1 + 3, 16, 0, 0) }); ganttChartItems[2].predecessors = <PredecessorItem[]>[{ item: ganttChartItems[1] }]; // Task 2 depends on Task 1. var ganttChartSettings = <GanttChartView.Settings>{ // Set the current time value to automatically scroll to a specific chart coordinate, and display a vertical bar highlighter at the specified point. currentTime: new Date(year, month, 2, 12, 0, 0) }; // Optionally, initialize custom theme and templates for Gantt Chart (themes.js, templates.js). initializeGanttChartTheme(ganttChartSettings, theme); initializeGanttChartTemplates(ganttChartSettings, theme); // Initialize the component. var ganttChartView = DlhSoft.Controls.GanttChartView.initialize(ganttChartViewElement, ganttChartItems, ganttChartSettings); function addNewGanttChartItem() { var item = { content: 'New task', start: new Date(year, month, 2, 8, 0, 0), finish: new Date(year, month, 4, 16, 0, 0) }; ganttChartView.addItem(item); } function showNetworkDiagram() { ganttChartViewContainerElement.style.display = 'none'; networkDiagramViewContainerElement.style.display = null; // Prepare Network Diagram data items and settings. var items = ganttChartView.getNetworkDiagramItems();<|fim▁hole|> var settings = <NetworkDiagramView.Settings>{ }; // Optionally, initialize custom theme and templates for Network Diagram (themes.js, templates.js). initializePertChartTheme(settings, theme); initializePertChartTemplates(settings, theme); var networkDiagramView = DlhSoft.Controls.Pert.NetworkDiagramView.initialize(networkDiagramViewElement, items, settings); } function hideNetworkDiagram() { networkDiagramViewContainerElement.style.display = 'none'; ganttChartViewContainerElement.style.display = null; }<|fim▁end|>
<|file_name|>androidmetrics_jni.cc<|end_file_name|><|fim▁begin|>/* * Copyright 2016 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. An additional intellectual property rights grant can be found * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ #include <map> #include <memory> #include "webrtc/sdk/android/src/jni/classreferenceholder.h" #include "webrtc/sdk/android/src/jni/jni_helpers.h" #include "webrtc/system_wrappers/include/metrics.h" #include "webrtc/system_wrappers/include/metrics_default.h" // Enables collection of native histograms and creating them. namespace webrtc_jni { JOW(void, Metrics_nativeEnable)(JNIEnv* jni, jclass) {<|fim▁hole|>// Gets and clears native histograms. JOW(jobject, Metrics_nativeGetAndReset)(JNIEnv* jni, jclass) { jclass j_metrics_class = jni->FindClass("org/webrtc/Metrics"); jmethodID j_add = GetMethodID(jni, j_metrics_class, "add", "(Ljava/lang/String;Lorg/webrtc/Metrics$HistogramInfo;)V"); jclass j_info_class = jni->FindClass("org/webrtc/Metrics$HistogramInfo"); jmethodID j_add_sample = GetMethodID(jni, j_info_class, "addSample", "(II)V"); // Create |Metrics|. jobject j_metrics = jni->NewObject( j_metrics_class, GetMethodID(jni, j_metrics_class, "<init>", "()V")); std::map<std::string, std::unique_ptr<webrtc::metrics::SampleInfo>> histograms; webrtc::metrics::GetAndReset(&histograms); for (const auto& kv : histograms) { // Create and add samples to |HistogramInfo|. jobject j_info = jni->NewObject( j_info_class, GetMethodID(jni, j_info_class, "<init>", "(III)V"), kv.second->min, kv.second->max, static_cast<int>(kv.second->bucket_count)); for (const auto& sample : kv.second->samples) { jni->CallVoidMethod(j_info, j_add_sample, sample.first, sample.second); } // Add |HistogramInfo| to |Metrics|. jstring j_name = jni->NewStringUTF(kv.first.c_str()); jni->CallVoidMethod(j_metrics, j_add, j_name, j_info); jni->DeleteLocalRef(j_name); jni->DeleteLocalRef(j_info); } CHECK_EXCEPTION(jni); return j_metrics; } } // namespace webrtc_jni<|fim▁end|>
webrtc::metrics::Enable(); }
<|file_name|>isEqual.js<|end_file_name|><|fim▁begin|>define('lodash/lang/isEqual', ['exports', 'lodash/internal/baseIsEqual', 'lodash/internal/bindCallback'], function (exports, _lodashInternalBaseIsEqual, _lodashInternalBindCallback) { 'use strict'; /** * Performs a deep comparison between two values to determine if they are * equivalent. If `customizer` is provided it's invoked to compare values. * If `customizer` returns `undefined` comparisons are handled by the method * instead. The `customizer` is bound to `thisArg` and invoked with up to * three arguments: (value, other [, index|key]). * * **Note:** This method supports comparing arrays, booleans, `Date` objects, * numbers, `Object` objects, regexes, and strings. Objects are compared by * their own, not inherited, enumerable properties. Functions and DOM nodes * are **not** supported. Provide a customizer function to extend support * for comparing other values. * * @static * @memberOf _ * @alias eq<|fim▁hole|> * @category Lang * @param {*} value The value to compare. * @param {*} other The other value to compare. * @param {Function} [customizer] The function to customize value comparisons. * @param {*} [thisArg] The `this` binding of `customizer`. * @returns {boolean} Returns `true` if the values are equivalent, else `false`. * @example * * var object = { 'user': 'fred' }; * var other = { 'user': 'fred' }; * * object == other; * // => false * * _.isEqual(object, other); * // => true * * // using a customizer callback * var array = ['hello', 'goodbye']; * var other = ['hi', 'goodbye']; * * _.isEqual(array, other, function(value, other) { * if (_.every([value, other], RegExp.prototype.test, /^h(?:i|ello)$/)) { * return true; * } * }); * // => true */ function isEqual(value, other, customizer, thisArg) { customizer = typeof customizer == 'function' ? (0, _lodashInternalBindCallback['default'])(customizer, thisArg, 3) : undefined; var result = customizer ? customizer(value, other) : undefined; return result === undefined ? (0, _lodashInternalBaseIsEqual['default'])(value, other, customizer) : !!result; } exports['default'] = isEqual; });<|fim▁end|>
<|file_name|>opa_test.go<|end_file_name|><|fim▁begin|>// Copyright 2018 The OPA Authors. All rights reserved. // Use of this source code is governed by an Apache2 // license that can be found in the LICENSE file. package opa import ( "context" "encoding/json" "reflect" "testing" ) func TestCompileRequestDeniedAlways(t *testing.T) { input := map[string]interface{}{ "method": "GET", "path": []string{"post"}, "user": "bob", } policy := ` package example allow = true { input.method = "GET" input.path = ["posts"] } ` expected := Result{Defined: false} result, err := Compile(context.Background(), input, []byte(policy)) if err != nil { t.Fatalf("Unexpected error while compiling query: %v", err) } if !reflect.DeepEqual(result, expected) { t.Fatalf("Expected %v but got: %v", expected, result) } } func TestCompileRequestAllowedAlways(t *testing.T) { input := map[string]interface{}{ "method": "GET", "path": []string{"posts"}, "user": "bob", } policy := ` package example allow = true { input.method = "GET" input.path = ["posts"] } ` expected := Result{Defined: true} result, err := Compile(context.Background(), input, []byte(policy)) if err != nil { t.Fatalf("Unexpected error while compiling query: %v", err) } if !reflect.DeepEqual(result, expected) { t.Fatalf("Expected %v but got: %v", expected, result) } } func TestCompileTermQuery(t *testing.T) { input := map[string]interface{}{ "method": "GET",<|fim▁hole|> } policy := ` package example allow = true { input.method = "GET" input.path = ["posts"] allowed[x] } allowed[x] { x := data.elastic.posts[_] x.author == input.user } ` result, err := Compile(context.Background(), input, []byte(policy)) if err != nil { t.Fatalf("Unexpected error while compiling query: %v", err) } if !result.Defined { t.Fatal("Expected result to be defined") } expectedQueryResult := `{"bool":{"_name":"BoolShouldQuery","should":{"term":{"author":{"_name":"TermQuery","value":"bob"}}}}}` actualQuerySource, err := result.Query.Source() if err != nil { t.Fatalf("Unexpected error while creating query source %v", err) } actualQueryResult, err := marshalQuery(actualQuerySource) if err != nil { t.Fatalf("Unexpected error while marshalling query: %v", err) } if actualQueryResult != expectedQueryResult { t.Fatalf("Expected %v but got: %v", expectedQueryResult, actualQueryResult) } } func TestCompileRangeQuery(t *testing.T) { input := map[string]interface{}{ "method": "GET", "path": []string{"posts"}, "clearance": 9, } policy := ` package example allow = true { input.method = "GET" input.path = ["posts"] allowed[x] } allowed[x] { x := data.elastic.posts[_] x.clearance > input.clearance } ` result, err := Compile(context.Background(), input, []byte(policy)) if err != nil { t.Fatalf("Unexpected error while compiling query: %v", err) } if !result.Defined { t.Fatal("Expected result to be defined") } expectedQueryResult := `{"bool":{"_name":"BoolShouldQuery","should":{"range":{"clearance":{"from":9,"include_lower":false,"include_upper":true,"to":null}}}}}` actualQuerySource, err := result.Query.Source() if err != nil { t.Fatalf("Unexpected error while creating query source %v", err) } actualQueryResult, err := marshalQuery(actualQuerySource) if err != nil { t.Fatalf("Unexpected error while marshalling query: %v", err) } if actualQueryResult != expectedQueryResult { t.Fatalf("Expected %v but got: %v", expectedQueryResult, actualQueryResult) } } func TestCompileMustNotQuery(t *testing.T) { input := map[string]interface{}{ "method": "GET", "path": []string{"posts"}, "clearance": 9, } policy := ` package example allow = true { input.method = "GET" input.path = ["posts"] allowed[x] } allowed[x] { x := data.elastic.posts[_] x.clearance != input.clearance } ` result, err := Compile(context.Background(), input, []byte(policy)) if err != nil { t.Fatalf("Unexpected error while compiling query: %v", err) } if !result.Defined { t.Fatal("Expected result to be defined") } expectedQueryResult := `{"bool":{"_name":"BoolShouldQuery","should":{"bool":{"_name":"BoolMustNotQuery","must_not":{"term":{"clearance":9}}}}}}` actualQuerySource, err := result.Query.Source() if err != nil { t.Fatalf("Unexpected error while creating query source %v", err) } actualQueryResult, err := marshalQuery(actualQuerySource) if err != nil { t.Fatalf("Unexpected error while marshalling query: %v", err) } if actualQueryResult != expectedQueryResult { t.Fatalf("Expected %v but got: %v", expectedQueryResult, actualQueryResult) } } func TestCompileQueryStringQuery(t *testing.T) { input := map[string]interface{}{ "method": "GET", "path": []string{"posts"}, "message": "OPA Rules !", } policy := ` package example allow = true { input.method = "GET" input.path = ["posts"] allowed[x] } allowed[x] { x := data.elastic.posts[_] contains(x.message, "OPA") } ` result, err := Compile(context.Background(), input, []byte(policy)) if err != nil { t.Fatalf("Unexpected error while compiling query: %v", err) } if !result.Defined { t.Fatal("Expected result to be defined") } expectedQueryResult := `{"bool":{"_name":"BoolShouldQuery","should":{"query_string":{"_name":"QueryStringQuery","default_field":"message","query":"*OPA*"}}}}` actualQuerySource, err := result.Query.Source() if err != nil { t.Fatalf("Unexpected error while creating query source %v", err) } actualQueryResult, err := marshalQuery(actualQuerySource) if err != nil { t.Fatalf("Unexpected error while marshalling query: %v", err) } if actualQueryResult != expectedQueryResult { t.Fatalf("Expected %v but got: %v", expectedQueryResult, actualQueryResult) } } func TestCompileRegexpQuery(t *testing.T) { input := map[string]interface{}{ "method": "GET", "path": []string{"posts"}, "email": "[email protected]", } policy := ` package example allow = true { input.method = "GET" input.path = ["posts"] allowed[x] } allowed[x] { x := data.elastic.posts[_] re_match("[a-zA-Z]+@[a-zA-Z]+.org", x.email) } ` result, err := Compile(context.Background(), input, []byte(policy)) if err != nil { t.Fatalf("Unexpected error while compiling query: %v", err) } if !result.Defined { t.Fatal("Expected result to be defined") } expectedQueryResult := `{"bool":{"_name":"BoolShouldQuery","should":{"regexp":{"email":{"value":"[a-zA-Z]+@[a-zA-Z]+.org"}}}}}` actualQuerySource, err := result.Query.Source() if err != nil { t.Fatalf("Unexpected error while creating query source %v", err) } actualQueryResult, err := marshalQuery(actualQuerySource) if err != nil { t.Fatalf("Unexpected error while marshalling query: %v", err) } if actualQueryResult != expectedQueryResult { t.Fatalf("Expected %v but got: %v", expectedQueryResult, actualQueryResult) } } func TestCompileBoolFilterQuery(t *testing.T) { input := map[string]interface{}{ "method": "GET", "path": []string{"posts"}, "user": "bob", "clearance": 9, } policy := ` package example allow = true { input.method = "GET" input.path = ["posts"] allowed[x] } allowed[x] { x := data.elastic.posts[_] x.author == input.user x.clearance > input.clearance } ` result, err := Compile(context.Background(), input, []byte(policy)) if err != nil { t.Fatalf("Unexpected error while compiling query: %v", err) } if !result.Defined { t.Fatal("Expected result to be defined") } expectedQueryResult := `{"bool":{"_name":"BoolShouldQuery","should":{"bool":{"_name":"BoolFilterQuery","filter":[{"term":{"author":{"_name":"TermQuery","value":"bob"}}},{"range":{"clearance":{"from":9,"include_lower":false,"include_upper":true,"to":null}}}]}}}}` actualQuerySource, err := result.Query.Source() if err != nil { t.Fatalf("Unexpected error while creating query source %v", err) } actualQueryResult, err := marshalQuery(actualQuerySource) if err != nil { t.Fatalf("Unexpected error while marshalling query: %v", err) } if actualQueryResult != expectedQueryResult { t.Fatalf("Expected %v but got: %v", expectedQueryResult, actualQueryResult) } } func TestCompileBoolShouldQuery(t *testing.T) { input := map[string]interface{}{ "method": "GET", "path": []string{"posts"}, "user": "bob", "clearance": 9, } policy := ` package example allow = true { input.method = "GET" input.path = ["posts"] allowed[x] } allowed[x] { x := data.elastic.posts[_] x.author == input.user } allowed[x] { x := data.elastic.posts[_] x.clearance > input.clearance } ` result, err := Compile(context.Background(), input, []byte(policy)) if err != nil { t.Fatalf("Unexpected error while compiling query: %v", err) } if !result.Defined { t.Fatal("Expected result to be defined") } expectedQueryResult := `{"bool":{"_name":"BoolShouldQuery","should":[{"term":{"author":{"_name":"TermQuery","value":"bob"}}},{"range":{"clearance":{"from":9,"include_lower":false,"include_upper":true,"to":null}}}]}}` actualQuerySource, err := result.Query.Source() if err != nil { t.Fatalf("Unexpected error while creating query source %v", err) } actualQueryResult, err := marshalQuery(actualQuerySource) if err != nil { t.Fatalf("Unexpected error while marshalling query: %v", err) } if actualQueryResult != expectedQueryResult { t.Fatalf("Expected %v but got: %v", expectedQueryResult, actualQueryResult) } } func marshalQuery(x interface{}) (string, error) { d, err := json.Marshal(x) if err != nil { return "", err } return string(d), nil }<|fim▁end|>
"path": []string{"posts"}, "user": "bob",
<|file_name|>per.py<|end_file_name|><|fim▁begin|>import os import pickle import numpy as np from tqdm import tqdm class SumTree:<|fim▁hole|> self.data = np.empty(capacity, dtype=object) self.head = 0 @property def total_priority(self): return self.tree[0] @property def max_priority(self): return np.max(self.tree[-self.capacity:]) @property def min_priority(self): return np.min(self.tree[-self.capacity:]) def _tree_to_data_index(self, i): return i - self.capacity + 1 def _data_to_tree_index(self, i): return i + self.capacity - 1 def add(self, priority, data): tree_index = self._data_to_tree_index(self.head) self.update_priority(tree_index, priority) self.data[self.head] = data self.head += 1 if self.head >= self.capacity: self.head = 0 def update_priority(self, tree_index, priority): delta = priority - self.tree[tree_index] self.tree[tree_index] = priority while tree_index != 0: tree_index = (tree_index - 1) // 2 self.tree[tree_index] += delta def get_leaf(self, value): parent = 0 while True: left = 2 * parent + 1 right = left + 1 if left >= len(self.tree): leaf = parent break else: if value <= self.tree[left]: parent = left else: value -= self.tree[left] parent = right data_index = self._tree_to_data_index(leaf) return leaf, self.tree[leaf], self.data[data_index] class PrioritizedExperienceReplay: def __init__(self, capacity, initial_size, epsilon, alpha, beta, beta_annealing_rate, max_td_error, ckpt_dir): self.tree = SumTree(capacity) self.capacity = capacity self.epsilon = epsilon self.initial_size = initial_size self.alpha = alpha self.beta = beta self.beta_annealing_rate = beta_annealing_rate self.max_td_error = max_td_error self.ckpt_dir = ckpt_dir def add(self, transition): max_priority = self.tree.max_priority if max_priority == 0: max_priority = self.max_td_error self.tree.add(max_priority, transition) def sample(self, batch_size): self.beta = np.min([1., self.beta + self.beta_annealing_rate]) priority_segment = self.tree.total_priority / batch_size min_probability = self.tree.min_priority / self.tree.total_priority max_weight = (min_probability * batch_size) ** (-self.beta) samples, sample_indices, importance_sampling_weights = [], [], [] for i in range(batch_size): value = np.random.uniform(priority_segment * i, priority_segment * (i + 1)) index, priority, transition = self.tree.get_leaf(value) sample_probability = priority / self.tree.total_priority importance_sampling_weights.append(((batch_size * sample_probability) ** -self.beta) / max_weight) sample_indices.append(index) samples.append(transition) return sample_indices, samples, importance_sampling_weights def update_priorities(self, tree_indices, td_errors): td_errors += self.epsilon clipped_errors = np.minimum(td_errors, self.max_td_error) priorities = clipped_errors ** self.alpha for tree_index, priority in zip(tree_indices, priorities): self.tree.update_priority(tree_index, priority) def load_or_instantiate(self, env): if os.path.exists(os.path.join(self.ckpt_dir, "memory.pkl")): self.load() return state = env.reset() for _ in tqdm(range(self.initial_size), desc="Initializing replay memory", unit="transition"): action = env.action_space.sample() next_state, reward, done, info = env.step(action) transition = (state, action, reward, next_state, done) self.add(transition) state = next_state if done: state = env.reset() def load(self): with open(os.path.join(self.ckpt_dir, "memory.pkl"), "rb") as f: self.tree = pickle.load(f) def save(self): with open(os.path.join(self.ckpt_dir, "memory.pkl"), "wb") as f: pickle.dump(self.tree, f)<|fim▁end|>
def __init__(self, capacity): self.capacity = capacity self.tree = np.zeros(2 * capacity - 1, dtype=np.float32)
<|file_name|>operations.rs<|end_file_name|><|fim▁begin|>bitflags! { flags Operations: u32 { const CREATE = 0x0001, const DELETE = 0x0002,<|fim▁hole|> const MODIFY = 0x0020, const OPEN = 0x0040, const CLOSE_WRITE = 0x0080, const CLOSE_NOWRITE = 0x0100, const UNMOUNT = 0x0240, const MOUNT = 0x0400, const IS_DIR = 0x0800, const OVERFLOW = 0x1000, } }<|fim▁end|>
const MOVE = 0x0004, const ACCESS = 0x0008, const ATTRIB = 0x0010,
<|file_name|>feats.js<|end_file_name|><|fim▁begin|>"use strict"; class FeatsPage extends ListPage { constructor () { const pageFilter = new PageFilterFeats(); super({ dataSource: "data/feats.json", pageFilter, listClass: "feats", sublistClass: "subfeats", dataProps: ["feat"], isPreviewable: true, }); } getListItem (feat, ftI, isExcluded) { this._pageFilter.mutateAndAddToFilters(feat, isExcluded); const eleLi = document.createElement("div"); eleLi.className = `lst__row flex-col ${isExcluded ? "lst__row--blacklisted" : ""}`; const source = Parser.sourceJsonToAbv(feat.source); const hash = UrlUtil.autoEncodeHash(feat); eleLi.innerHTML = `<a href="#${hash}" class="lst--border lst__row-inner"> <span class="col-0-3 px-0 flex-vh-center lst__btn-toggle-expand self-flex-stretch">[+]</span> <span class="bold col-3-5 px-1">${feat.name}</span> <span class="col-3-5 ${feat._slAbility === VeCt.STR_NONE ? "list-entry-none " : ""}">${feat._slAbility}</span> <span class="col-3 ${feat._slPrereq === VeCt.STR_NONE ? "list-entry-none " : ""}">${feat._slPrereq}</span> <span class="source col-1-7 text-center ${Parser.sourceJsonToColor(feat.source)} pr-0" title="${Parser.sourceJsonToFull(feat.source)}" ${BrewUtil.sourceJsonToStyle(feat.source)}>${source}</span> </a> <div class="flex ve-hidden relative lst__wrp-preview"> <div class="vr-0 absolute lst__vr-preview"></div> <div class="flex-col py-3 ml-4 lst__wrp-preview-inner"></div> </div>`; const listItem = new ListItem( ftI, eleLi, feat.name, { hash, source, ability: feat._slAbility,<|fim▁hole|> prerequisite: feat._slPrereq, }, { uniqueId: feat.uniqueId ? feat.uniqueId : ftI, isExcluded, }, ); eleLi.addEventListener("click", (evt) => this._list.doSelect(listItem, evt)); eleLi.addEventListener("contextmenu", (evt) => ListUtil.openContextMenu(evt, this._list, listItem)); return listItem; } handleFilterChange () { const f = this._filterBox.getValues(); this._list.filter(item => this._pageFilter.toDisplay(f, this._dataList[item.ix])); FilterBox.selectFirstVisible(this._dataList); } getSublistItem (feat, pinId) { const hash = UrlUtil.autoEncodeHash(feat); const $ele = $(`<div class="lst__row lst__row--sublist flex-col"> <a href="#${hash}" class="lst--border lst__row-inner"> <span class="bold col-4 pl-0">${feat.name}</span> <span class="col-4 ${feat._slAbility === VeCt.STR_NONE ? "list-entry-none" : ""}">${feat._slAbility}</span> <span class="col-4 ${feat._slPrereq === VeCt.STR_NONE ? "list-entry-none" : ""} pr-0">${feat._slPrereq}</span> </a> </div>`) .contextmenu(evt => ListUtil.openSubContextMenu(evt, listItem)) .click(evt => ListUtil.sublist.doSelect(listItem, evt)); const listItem = new ListItem( pinId, $ele, feat.name, { hash, ability: feat._slAbility, prerequisite: feat._slPrereq, }, ); return listItem; } doLoadHash (id) { const feat = this._dataList[id]; $("#pagecontent").empty().append(RenderFeats.$getRenderedFeat(feat)); ListUtil.updateSelected(); } async pDoLoadSubHash (sub) { sub = this._filterBox.setFromSubHashes(sub); await ListUtil.pSetFromSubHashes(sub); } } const featsPage = new FeatsPage(); window.addEventListener("load", () => featsPage.pOnLoad());<|fim▁end|>
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>from datetime import date <|fim▁hole|> class AnnouncementForm(forms.Form): """Form for collecting information about an announcement. This is not a ModelForm, and does not include the group or locale fields, because it should only be used in a context where the group or locale is implicit, and should not be user controllable. If you need a user controllable locale or group, use the admin interface. """ content = forms.CharField(label=_lazy("Content"), max_length=10000, widget=forms.Textarea) show_after = forms.DateField( label=_lazy("Show after"), initial=date.today, input_formats=["%Y-%m-%d"] ) show_until = forms.DateField( label=_lazy("Show until"), required=False, input_formats=["%Y-%m-%d"] )<|fim▁end|>
from django import forms from django.utils.translation import ugettext_lazy as _lazy
<|file_name|>dvips.py<|end_file_name|><|fim▁begin|># This file is part of Rubber and thus covered by the GPL import rubber.dvip_tool import rubber.module_interface class Module (rubber.module_interface.Module):<|fim▁hole|> def __init__ (self, document, opt): self.dep = rubber.dvip_tool.Dvip_Tool_Dep_Node (document, 'dvips')<|fim▁end|>
<|file_name|>run_with_coredumps_enabled.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # Copyright (c) 2021, the Dart project authors. Please see the AUTHORS file # for details. All rights reserved. Use of this source code is governed by a # BSD-style license that can be found in the LICENSE file. from contextlib import ExitStack import subprocess import sys import utils <|fim▁hole|>def Main(): args = sys.argv[1:] with ExitStack() as stack: for ctx in utils.CoreDumpArchiver(args): stack.enter_context(ctx) exit_code = subprocess.call(args) utils.DiagnoseExitCode(exit_code, args) return exit_code if __name__ == '__main__': sys.exit(Main())<|fim▁end|>
<|file_name|>key_encrypt.go<|end_file_name|><|fim▁begin|>package jwe import ( "crypto" "crypto/aes" "crypto/cipher" "crypto/ecdsa" "crypto/rand" "crypto/rsa" "crypto/sha1" "crypto/sha256" "crypto/subtle" "encoding/binary" "fmt" "hash" "github.com/lestrrat/go-jwx/internal/concatkdf" "github.com/lestrrat/go-jwx/internal/debug" "github.com/lestrrat/go-jwx/jwa" "github.com/pkg/errors" ) // NewKeyWrapEncrypt creates a key-wrap encryptor using AES-CGM. // Although the name suggests otherwise, this does the decryption as well. func NewKeyWrapEncrypt(alg jwa.KeyEncryptionAlgorithm, sharedkey []byte) (KeyWrapEncrypt, error) { return KeyWrapEncrypt{ alg: alg, sharedkey: sharedkey, }, nil } // Algorithm returns the key encryption algorithm being used func (kw KeyWrapEncrypt) Algorithm() jwa.KeyEncryptionAlgorithm { return kw.alg } // Kid returns the key ID associated with this encrypter func (kw KeyWrapEncrypt) Kid() string { return kw.KeyID } // KeyDecrypt decrypts the encrypted key using AES-CGM key unwrap func (kw KeyWrapEncrypt) KeyDecrypt(enckey []byte) ([]byte, error) { block, err := aes.NewCipher(kw.sharedkey) if err != nil { return nil, errors.Wrap(err, "failed to create cipher from shared key") } cek, err := keyunwrap(block, enckey) if err != nil { return nil, errors.Wrap(err, "failed to unwrap data") } return cek, nil } // KeyEncrypt encrypts the given content encryption key func (kw KeyWrapEncrypt) KeyEncrypt(cek []byte) (ByteSource, error) { block, err := aes.NewCipher(kw.sharedkey) if err != nil { return nil, errors.Wrap(err, "failed to create cipher from shared key") } encrypted, err := keywrap(block, cek) if err != nil { return nil, errors.Wrap(err, `keywrap: failed to wrap key`) } return ByteKey(encrypted), nil } // NewEcdhesKeyWrapEncrypt creates a new key encrypter based on ECDH-ES func NewEcdhesKeyWrapEncrypt(alg jwa.KeyEncryptionAlgorithm, key *ecdsa.PublicKey) (*EcdhesKeyWrapEncrypt, error) { generator, err := NewEcdhesKeyGenerate(alg, key) if err != nil { return nil, errors.Wrap(err, "failed to create key generator") } return &EcdhesKeyWrapEncrypt{ algorithm: alg, generator: generator, }, nil } // Algorithm returns the key encryption algorithm being used func (kw EcdhesKeyWrapEncrypt) Algorithm() jwa.KeyEncryptionAlgorithm { return kw.algorithm } // Kid returns the key ID associated with this encrypter func (kw EcdhesKeyWrapEncrypt) Kid() string { return kw.KeyID } // KeyEncrypt encrypts the content encryption key using ECDH-ES func (kw EcdhesKeyWrapEncrypt) KeyEncrypt(cek []byte) (ByteSource, error) { kg, err := kw.generator.KeyGenerate() if err != nil { return nil, errors.Wrap(err, "failed to create key generator") } bwpk, ok := kg.(ByteWithECPrivateKey) if !ok { return nil, errors.New("key generator generated invalid key (expected ByteWithECPrivateKey)") } block, err := aes.NewCipher(bwpk.Bytes()) if err != nil { return nil, errors.Wrap(err, "failed to generate cipher from generated key") } jek, err := keywrap(block, cek) if err != nil { return nil, errors.Wrap(err, "failed to wrap data") } bwpk.ByteKey = ByteKey(jek) return bwpk, nil } // NewEcdhesKeyWrapDecrypt creates a new key decrypter using ECDH-ES func NewEcdhesKeyWrapDecrypt(alg jwa.KeyEncryptionAlgorithm, pubkey *ecdsa.PublicKey, apu, apv []byte, privkey *ecdsa.PrivateKey) *EcdhesKeyWrapDecrypt { return &EcdhesKeyWrapDecrypt{ algorithm: alg, apu: apu, apv: apv, privkey: privkey, pubkey: pubkey, } } // Algorithm returns the key encryption algorithm being used func (kw EcdhesKeyWrapDecrypt) Algorithm() jwa.KeyEncryptionAlgorithm { return kw.algorithm } // KeyDecrypt decrypts the encrypted key using ECDH-ES func (kw EcdhesKeyWrapDecrypt) KeyDecrypt(enckey []byte) ([]byte, error) { var keysize uint32 switch kw.algorithm { case jwa.ECDH_ES_A128KW: keysize = 16 case jwa.ECDH_ES_A192KW: keysize = 24 case jwa.ECDH_ES_A256KW: keysize = 32 default: return nil, errors.Wrap(ErrUnsupportedAlgorithm, "invalid ECDH-ES key wrap algorithm") } privkey := kw.privkey pubkey := kw.pubkey pubinfo := make([]byte, 4) binary.BigEndian.PutUint32(pubinfo, keysize*8) z, _ := privkey.PublicKey.Curve.ScalarMult(pubkey.X, pubkey.Y, privkey.D.Bytes()) kdf := concatkdf.New(crypto.SHA256, []byte(kw.algorithm.String()), z.Bytes(), kw.apu, kw.apv, pubinfo, []byte{}) kek := make([]byte, keysize) kdf.Read(kek) block, err := aes.NewCipher(kek) if err != nil { return nil, errors.Wrap(err, "failed to create cipher for ECDH-ES key wrap") } return keyunwrap(block, enckey) } // NewRSAOAEPKeyEncrypt creates a new key encrypter using RSA OAEP func NewRSAOAEPKeyEncrypt(alg jwa.KeyEncryptionAlgorithm, pubkey *rsa.PublicKey) (*RSAOAEPKeyEncrypt, error) { switch alg { case jwa.RSA_OAEP, jwa.RSA_OAEP_256: default: return nil, errors.Wrap(ErrUnsupportedAlgorithm, "invalid RSA OAEP encrypt algorithm") } return &RSAOAEPKeyEncrypt{ alg: alg, pubkey: pubkey, }, nil } // NewRSAPKCSKeyEncrypt creates a new key encrypter using PKCS1v15 func NewRSAPKCSKeyEncrypt(alg jwa.KeyEncryptionAlgorithm, pubkey *rsa.PublicKey) (*RSAPKCSKeyEncrypt, error) { switch alg { case jwa.RSA1_5: default: return nil, errors.Wrap(ErrUnsupportedAlgorithm, "invalid RSA PKCS encrypt algorithm") } return &RSAPKCSKeyEncrypt{ alg: alg, pubkey: pubkey, }, nil } // Algorithm returns the key encryption algorithm being used func (e RSAPKCSKeyEncrypt) Algorithm() jwa.KeyEncryptionAlgorithm { return e.alg } // Kid returns the key ID associated with this encrypter func (e RSAPKCSKeyEncrypt) Kid() string { return e.KeyID } // Algorithm returns the key encryption algorithm being used func (e RSAOAEPKeyEncrypt) Algorithm() jwa.KeyEncryptionAlgorithm { return e.alg } // Kid returns the key ID associated with this encrypter func (e RSAOAEPKeyEncrypt) Kid() string { return e.KeyID } // KeyEncrypt encrypts the content encryption key using RSA PKCS1v15 func (e RSAPKCSKeyEncrypt) KeyEncrypt(cek []byte) (ByteSource, error) { if e.alg != jwa.RSA1_5 {<|fim▁hole|> encrypted, err := rsa.EncryptPKCS1v15(rand.Reader, e.pubkey, cek) if err != nil { return nil, errors.Wrap(err, "failed to encrypt using PKCS1v15") } return ByteKey(encrypted), nil } // KeyEncrypt encrypts the content encryption key using RSA OAEP func (e RSAOAEPKeyEncrypt) KeyEncrypt(cek []byte) (ByteSource, error) { var hash hash.Hash switch e.alg { case jwa.RSA_OAEP: hash = sha1.New() case jwa.RSA_OAEP_256: hash = sha256.New() default: return nil, errors.New("failed to generate key encrypter for RSA-OAEP: RSA_OAEP/RSA_OAEP_256 required") } encrypted, err := rsa.EncryptOAEP(hash, rand.Reader, e.pubkey, cek, []byte{}) if err != nil { return nil, errors.Wrap(err, `failed to OAEP encrypt`) } return ByteKey(encrypted), nil } // NewRSAPKCS15KeyDecrypt creates a new decrypter using RSA PKCS1v15 func NewRSAPKCS15KeyDecrypt(alg jwa.KeyEncryptionAlgorithm, privkey *rsa.PrivateKey, keysize int) *RSAPKCS15KeyDecrypt { generator := NewRandomKeyGenerate(keysize * 2) return &RSAPKCS15KeyDecrypt{ alg: alg, privkey: privkey, generator: generator, } } // Algorithm returns the key encryption algorithm being used func (d RSAPKCS15KeyDecrypt) Algorithm() jwa.KeyEncryptionAlgorithm { return d.alg } // KeyDecrypt decryptes the encrypted key using RSA PKCS1v1.5 func (d RSAPKCS15KeyDecrypt) KeyDecrypt(enckey []byte) ([]byte, error) { if debug.Enabled { debug.Printf("START PKCS.KeyDecrypt") } // Hey, these notes and workarounds were stolen from go-jose defer func() { // DecryptPKCS1v15SessionKey sometimes panics on an invalid payload // because of an index out of bounds error, which we want to ignore. // This has been fixed in Go 1.3.1 (released 2014/08/13), the recover() // only exists for preventing crashes with unpatched versions. // See: https://groups.google.com/forum/#!topic/golang-dev/7ihX6Y6kx9k // See: https://code.google.com/p/go/source/detail?r=58ee390ff31602edb66af41ed10901ec95904d33 _ = recover() }() // Perform some input validation. expectedlen := d.privkey.PublicKey.N.BitLen() / 8 if expectedlen != len(enckey) { // Input size is incorrect, the encrypted payload should always match // the size of the public modulus (e.g. using a 2048 bit key will // produce 256 bytes of output). Reject this since it's invalid input. return nil, fmt.Errorf( "input size for key decrypt is incorrect (expected %d, got %d)", expectedlen, len(enckey), ) } var err error bk, err := d.generator.KeyGenerate() if err != nil { return nil, errors.New("failed to generate key") } cek := bk.Bytes() // When decrypting an RSA-PKCS1v1.5 payload, we must take precautions to // prevent chosen-ciphertext attacks as described in RFC 3218, "Preventing // the Million Message Attack on Cryptographic Message Syntax". We are // therefore deliberatly ignoring errors here. err = rsa.DecryptPKCS1v15SessionKey(rand.Reader, d.privkey, enckey, cek) if err != nil { return nil, errors.Wrap(err, "failed to decrypt via PKCS1v15") } return cek, nil } // NewRSAOAEPKeyDecrypt creates a new key decrypter using RSA OAEP func NewRSAOAEPKeyDecrypt(alg jwa.KeyEncryptionAlgorithm, privkey *rsa.PrivateKey) (*RSAOAEPKeyDecrypt, error) { switch alg { case jwa.RSA_OAEP, jwa.RSA_OAEP_256: default: return nil, errors.Wrap(ErrUnsupportedAlgorithm, "invalid RSA OAEP decrypt algorithm") } return &RSAOAEPKeyDecrypt{ alg: alg, privkey: privkey, }, nil } // Algorithm returns the key encryption algorithm being used func (d RSAOAEPKeyDecrypt) Algorithm() jwa.KeyEncryptionAlgorithm { return d.alg } // KeyDecrypt decryptes the encrypted key using RSA OAEP func (d RSAOAEPKeyDecrypt) KeyDecrypt(enckey []byte) ([]byte, error) { if debug.Enabled { debug.Printf("START OAEP.KeyDecrypt") } var hash hash.Hash switch d.alg { case jwa.RSA_OAEP: hash = sha1.New() case jwa.RSA_OAEP_256: hash = sha256.New() default: return nil, errors.New("failed to generate key encrypter for RSA-OAEP: RSA_OAEP/RSA_OAEP_256 required") } return rsa.DecryptOAEP(hash, rand.Reader, d.privkey, enckey, []byte{}) } // Decrypt for DirectDecrypt does not do anything other than // return a copy of the embedded key func (d DirectDecrypt) Decrypt() ([]byte, error) { cek := make([]byte, len(d.Key)) copy(cek, d.Key) return cek, nil } var keywrapDefaultIV = []byte{0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6} const keywrapChunkLen = 8 func keywrap(kek cipher.Block, cek []byte) ([]byte, error) { if len(cek)%8 != 0 { return nil, ErrInvalidBlockSize } n := len(cek) / keywrapChunkLen r := make([][]byte, n) for i := 0; i < n; i++ { r[i] = make([]byte, keywrapChunkLen) copy(r[i], cek[i*keywrapChunkLen:]) } buffer := make([]byte, keywrapChunkLen*2) tBytes := make([]byte, keywrapChunkLen) copy(buffer, keywrapDefaultIV) for t := 0; t < 6*n; t++ { copy(buffer[keywrapChunkLen:], r[t%n]) kek.Encrypt(buffer, buffer) binary.BigEndian.PutUint64(tBytes, uint64(t+1)) for i := 0; i < keywrapChunkLen; i++ { buffer[i] = buffer[i] ^ tBytes[i] } copy(r[t%n], buffer[keywrapChunkLen:]) } out := make([]byte, (n+1)*keywrapChunkLen) copy(out, buffer[:keywrapChunkLen]) for i := range r { copy(out[(i+1)*8:], r[i]) } return out, nil } func keyunwrap(block cipher.Block, ciphertxt []byte) ([]byte, error) { if len(ciphertxt)%keywrapChunkLen != 0 { return nil, ErrInvalidBlockSize } n := (len(ciphertxt) / keywrapChunkLen) - 1 r := make([][]byte, n) for i := range r { r[i] = make([]byte, keywrapChunkLen) copy(r[i], ciphertxt[(i+1)*keywrapChunkLen:]) } buffer := make([]byte, keywrapChunkLen*2) tBytes := make([]byte, keywrapChunkLen) copy(buffer[:keywrapChunkLen], ciphertxt[:keywrapChunkLen]) for t := 6*n - 1; t >= 0; t-- { binary.BigEndian.PutUint64(tBytes, uint64(t+1)) for i := 0; i < keywrapChunkLen; i++ { buffer[i] = buffer[i] ^ tBytes[i] } copy(buffer[keywrapChunkLen:], r[t%n]) block.Decrypt(buffer, buffer) copy(r[t%n], buffer[keywrapChunkLen:]) } if subtle.ConstantTimeCompare(buffer[:keywrapChunkLen], keywrapDefaultIV) == 0 { return nil, errors.New("keywrap: failed to unwrap key") } out := make([]byte, n*keywrapChunkLen) for i := range r { copy(out[i*keywrapChunkLen:], r[i]) } return out, nil }<|fim▁end|>
return nil, errors.Wrap(ErrUnsupportedAlgorithm, "invalid RSA PKCS encrypt algorithm") }
<|file_name|>aiplatform_generated_aiplatform_v1_dataset_service_list_annotations_sync.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Generated code. DO NOT EDIT! # # Snippet for ListAnnotations # NOTE: This snippet has been automatically generated for illustrative purposes only.<|fim▁hole|># python3 -m pip install google-cloud-aiplatform # [START aiplatform_generated_aiplatform_v1_DatasetService_ListAnnotations_sync] from google.cloud import aiplatform_v1 def sample_list_annotations(): # Create a client client = aiplatform_v1.DatasetServiceClient() # Initialize request argument(s) request = aiplatform_v1.ListAnnotationsRequest( parent="parent_value", ) # Make the request page_result = client.list_annotations(request=request) # Handle the response for response in page_result: print(response) # [END aiplatform_generated_aiplatform_v1_DatasetService_ListAnnotations_sync]<|fim▁end|>
# It may require modifications to work in your environment. # To install the latest published package dependency, execute the following:
<|file_name|>script.ts<|end_file_name|><|fim▁begin|>import { defineComponent } from 'vue'; import ILoader from '@inkline/inkline/components/ILoader/index.vue'; import { LinkableMixin, sizePropValidator, colorVariantClass, defaultPropValue } from '@inkline/inkline/mixins'; import { Classes } from '@inkline/inkline/types'; /** * Slot for default button content * @name default * @kind slot */ /** * Slot for button loading state * @name loading * @kind slot */ const componentName = 'IButton'; export default defineComponent({ name: componentName, components: { ILoader }, mixins: [ LinkableMixin ], inject: { buttonGroup: { default: () => ({}) }, form: { default: () => ({}) }, formGroup: { default: () => ({}) } }, props: { /** * The active state of the button * @type Boolean * @default false * @name active */ active: { type: Boolean, default: false<|fim▁hole|> * @type Boolean * @default false * @name block */ block: { type: Boolean, default: false }, /** * Display the button as a circle * @type Boolean * @default false * @name circle */ circle: { type: Boolean, default: false }, /** * The color variant of the button * @type primary | success | light | dark | info | success | warning | danger | facebook | google | twitter | github * @default light * @name color */ color: { type: String, default: defaultPropValue<string>(componentName, 'color') }, /** * The disabled state of the button * @type Boolean * @default false * @name disabled */ disabled: { type: Boolean, default: false }, /** * Display the button as a link * @type Boolean * @default false * @name link */ link: { type: Boolean, default: false }, /** * The loading state of the button * @type Boolean * @default false * @name loading */ loading: { type: Boolean, default: false }, /** * Display the button as an outline button * @type Boolean * @default false * @name outline */ outline: { type: Boolean, default: false }, /** * Set the HTML tag to be used for rendering the button * @type String * @default button * @name tag */ tag: { type: String, default: 'button' }, /** * The tabindex of the button * @type Number | String * @default 0 * @name tabindex */ tabindex: { type: [Number, String], default: 0 }, /** * The size variant of the button * @type sm | md | lg * @default md * @name size */ size: { type: String, default: defaultPropValue<string>(componentName, 'size'), validator: sizePropValidator } }, computed: { ariaBusy () { if (this.role !== 'button') { return null; } return this.loading ? 'true' : 'false'; }, ariaDisabled () { if (this.role !== 'button') { return null; } return this.disabled ? 'true' : 'false'; }, ariaPressed () { if (this.role !== 'button') { return null; } return this.active ? 'true' : 'false'; }, classes (): Classes { return { ...colorVariantClass(this), [`-${this.size}`]: Boolean(this.size), '-active': this.active, '-block': this.block, '-circle': this.circle, '-disabled': this.isDisabled, '-link': this.link, '-outline': this.outline }; }, isDisabled (): boolean { return this.disabled || (this as any).buttonGroup.disabled || (this as any).form.disabled || (this as any).formGroup.disabled; }, role (): string { return this.$attrs.to || this.$attrs.href ? 'link' : 'button'; }, tabIndex (): number | string { return this.isDisabled ? -1 : this.tabindex; } } });<|fim▁end|>
}, /** * Display the button as a block, spanning the full container width
<|file_name|>Collection.js<|end_file_name|><|fim▁begin|>var assert = require('assert'); describe('src/components/model/classes/Collection', function () { var Collection = require('../../../../src/components/model/classes/Collection'); var collection; var handler = function (event, changes) { callbacks[event]++; }; var callbacks = { add: 0, remove: 0, change: 0 }; describe.only('new Collection(obj)', function () { it('Should return a new Collection instance', function (done) { collection = new Collection([1, 2, 3], handler); done(); }); }); describe.only('.push(item)', function () { it('Should call "add" callback', function (done) { collection.push(4); assert(callbacks.add, 1); done(); }); }); describe.only('.unshift(item)', function () { it('Should call "add" callback', function (done) { collection.unshift(0); assert(callbacks.add, 2); done(); }); }); describe.only('.pop()', function () { it('Should call "remove" callback', function (done) { collection.pop(); assert(callbacks.remove, 1); done(); }); }); describe.only('.shift()', function () { it('Should call "remove" callback', function (done) { collection.shift(); assert(callbacks.remove, 2); done(); }); }); describe.only('.splice(index, number)', function () { it('Should call "remove" callback', function (done) { collection.splice(0, 1); assert(callbacks.remove, 3); done(); }); }); describe.only('.reverse()', function () { it('Should call "change" callback', function (done) { collection.reverse(); assert(callbacks.change, 1); done(); }); }); describe.only('.sort()', function () { it('Should call "change" callback', function (done) { collection.sort(function (a, b) {<|fim▁hole|> }); assert(callbacks.change, 2); done(); }); }); });<|fim▁end|>
return -1;
<|file_name|>utils.py<|end_file_name|><|fim▁begin|><|fim▁hole|> result = [item_group] child_groups = frappe.get_list( "Item Group", filters={"parent_item_group": item_group}, fields=["name"] ) child_groups = [child.name for child in child_groups if child not in result] if len(child_groups) > 0: result = result + child_groups for child in child_groups: flat_item_group_tree_list(child, result) return result<|fim▁end|>
import frappe def flat_item_group_tree_list(item_group, result=None): if not result:
<|file_name|>gunicorn.conf.py<|end_file_name|><|fim▁begin|>wsgi_app = "weasyl.wsgi:make_wsgi_app()" proc_name = "weasyl" preload_app = False<|fim▁hole|> 'X-FORWARDED-PROTO': 'https', } forwarded_allow_ips = '*'<|fim▁end|>
secure_scheme_headers = {
<|file_name|>baseUtils.ts<|end_file_name|><|fim▁begin|>export interface BaseUtils { /** * Returns whether or not the string is a valid protocol address *<|fim▁hole|> * @param {string} address - the address to be validated * @returns {boolean} - the validation result */ isValidAddress(address: string): boolean; /** * Returns whether or not the string is a valid protocol transaction id or not * * @param {string} txId - the transaction id to be validated * @returns {boolean} - the validation result */ isValidTransactionId(txId: string): boolean; /** * Returns whether or not the string is a valid protocol public key * * @param {string} key - the public key to be validated * @returns {boolean} - the validation result */ isValidPublicKey(key: string): boolean; /** * Returns whether or not the string is a valid protocol private key * * @param {string} key - the private key to be validated * @returns {boolean} - the validation result */ isValidPrivateKey(key: string): boolean; /** * Returns whether or not the string is a valid protocol signature * * @param {string} signature - the signature to validate * @returns {boolean} - the validation result */ isValidSignature(signature: string): boolean; /** * Returns whether or not the string is a valid protocol block hash * * @param {string} hash - the address to validate * @returns {boolean} - the validation result */ isValidBlockId(hash: string): boolean; }<|fim▁end|>
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # Copyright 2011 Justin Santa Barbara # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Utilities and helper functions.""" import contextlib import copy import datetime import errno import functools import hashlib import hmac import inspect import logging as std_logging import os import pyclbr import random import re import shutil import socket import struct import sys import tempfile import time from xml.sax import saxutils import eventlet import netaddr from oslo_concurrency import lockutils from oslo_concurrency import processutils from oslo_config import cfg from oslo_context import context as common_context from oslo_log import log as logging import oslo_messaging as messaging from oslo_utils import encodeutils from oslo_utils import excutils from oslo_utils import importutils from oslo_utils import strutils from oslo_utils import timeutils from oslo_utils import units import six from six.moves import range from nova import exception from nova.i18n import _, _LE, _LI, _LW notify_decorator = 'nova.notifications.notify_decorator' monkey_patch_opts = [ cfg.BoolOpt('monkey_patch', default=False, help='Whether to log monkey patching'), cfg.ListOpt('monkey_patch_modules', default=[ 'nova.api.ec2.cloud:%s' % (notify_decorator), 'nova.compute.api:%s' % (notify_decorator) ], help='List of modules/decorators to monkey patch'), ] utils_opts = [ cfg.IntOpt('password_length', default=12, help='Length of generated instance admin passwords'), cfg.StrOpt('instance_usage_audit_period', default='month', help='Time period to generate instance usages for. ' 'Time period must be hour, day, month or year'), cfg.BoolOpt('use_rootwrap_daemon', default=False, help="Start and use a daemon that can run the commands that " "need to be run with root privileges. This option is " "usually enabled on nodes that run nova compute " "processes"), cfg.StrOpt('rootwrap_config', default="/etc/nova/rootwrap.conf", help='Path to the rootwrap configuration file to use for ' 'running commands as root'), cfg.StrOpt('tempdir', help='Explicitly specify the temporary working directory'), ] workarounds_opts = [ cfg.BoolOpt('disable_rootwrap', default=False, help='This option allows a fallback to sudo for performance ' 'reasons. For example see ' 'https://bugs.launchpad.net/nova/+bug/1415106'), cfg.BoolOpt('disable_libvirt_livesnapshot', default=True, help='When using libvirt 1.2.2 live snapshots fail ' 'intermittently under load. This config option provides ' 'a mechanism to enable live snapshot while this is ' 'resolved. See ' 'https://bugs.launchpad.net/nova/+bug/1334398'), cfg.BoolOpt('destroy_after_evacuate', default=True, deprecated_for_removal=True, help='DEPRECATED: Whether to destroy ' 'instances on startup when we suspect ' 'they have previously been evacuated. This can result in ' 'data loss if undesired. See ' 'https://launchpad.net/bugs/1419785'), cfg.BoolOpt('handle_virt_lifecycle_events', default=True, help="Whether or not to handle events raised from the compute " "driver's 'emit_event' method. These are lifecycle " "events raised from compute drivers that implement the " "method. An example of a lifecycle event is an instance " "starting or stopping. If the instance is going through " "task state changes due to an API operation, like " "resize, the events are ignored. However, this is an " "advanced feature which allows the hypervisor to signal " "to the compute service that an unexpected state change " "has occurred in an instance and the instance can be " "shutdown automatically - which can inherently race in " "reboot operations or when the compute service or host " "is rebooted, either planned or due to an unexpected " "outage. Care should be taken when using this and " "sync_power_state_interval is negative since then if any " "instances are out of sync between the hypervisor and " "the Nova database they will have to be synchronized " "manually. See https://bugs.launchpad.net/bugs/1444630"), ] """ The workarounds_opts group is for very specific reasons. If you're: - Working around an issue in a system tool (e.g. libvirt or qemu) where the fix is in flight/discussed in that community. - The tool can be/is fixed in some distributions and rather than patch the code those distributions can trivially set a config option to get the "correct" behavior. Then this is a good place for your workaround. .. warning:: Please use with care! Document the BugID that your workaround is paired with. """ CONF = cfg.CONF CONF.register_opts(monkey_patch_opts) CONF.register_opts(utils_opts) CONF.import_opt('network_api_class', 'nova.network') CONF.register_opts(workarounds_opts, group='workarounds') LOG = logging.getLogger(__name__) # used in limits TIME_UNITS = { 'SECOND': 1, 'MINUTE': 60, 'HOUR': 3600, 'DAY': 86400 } _IS_NEUTRON = None synchronized = lockutils.synchronized_with_prefix('nova-') SM_IMAGE_PROP_PREFIX = "image_" SM_INHERITABLE_KEYS = ( 'min_ram', 'min_disk', 'disk_format', 'container_format', ) # Keys which hold large structured data that won't fit in the # size constraints of the system_metadata table, so we avoid # storing and/or loading them. SM_SKIP_KEYS = ( # Legacy names 'mappings', 'block_device_mapping', # Modern names 'img_mappings', 'img_block_device_mapping', ) # Image attributes which Cinder stores in volume image metadata # as regular properties VIM_IMAGE_ATTRIBUTES = ( 'image_id', 'image_name', 'size', 'checksum', 'container_format', 'disk_format', 'min_ram', 'min_disk', ) _FILE_CACHE = {} def vpn_ping(address, port, timeout=0.05, session_id=None): """Sends a vpn negotiation packet and returns the server session. Returns Boolean indicating whether the vpn_server is listening. Basic packet structure is below. Client packet (14 bytes):: 0 1 8 9 13 +-+--------+-----+ |x| cli_id |?????| +-+--------+-----+ x = packet identifier 0x38 cli_id = 64 bit identifier ? = unknown, probably flags/padding Server packet (26 bytes):: 0 1 8 9 13 14 21 2225 +-+--------+-----+--------+----+ |x| srv_id |?????| cli_id |????| +-+--------+-----+--------+----+ x = packet identifier 0x40 cli_id = 64 bit identifier ? = unknown, probably flags/padding bit 9 was 1 and the rest were 0 in testing """ # NOTE(tonyb) session_id isn't used for a real VPN connection so using a # cryptographically weak value is fine. if session_id is None: session_id = random.randint(0, 0xffffffffffffffff) sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) data = struct.pack('!BQxxxxx', 0x38, session_id) sock.sendto(data, (address, port)) sock.settimeout(timeout) try: received = sock.recv(2048) except socket.timeout: return False finally: sock.close() fmt = '!BQxxxxxQxxxx' if len(received) != struct.calcsize(fmt): LOG.warning(_LW('Expected to receive %(exp)s bytes, ' 'but actually %(act)s'), dict(exp=struct.calcsize(fmt), act=len(received))) return False (identifier, server_sess, client_sess) = struct.unpack(fmt, received) return (identifier == 0x40 and client_sess == session_id) def get_root_helper(): if CONF.workarounds.disable_rootwrap: cmd = 'sudo' else: cmd = 'sudo nova-rootwrap %s' % CONF.rootwrap_config return cmd def _get_rootwrap_helper(): if CONF.use_rootwrap_daemon: return RootwrapDaemonHelper(CONF.rootwrap_config) else: return RootwrapProcessHelper() class RootwrapProcessHelper(object): def trycmd(self, *cmd, **kwargs): kwargs['root_helper'] = get_root_helper() return processutils.trycmd(*cmd, **kwargs) def execute(self, *cmd, **kwargs): kwargs['root_helper'] = get_root_helper() return processutils.execute(*cmd, **kwargs) class RootwrapDaemonHelper(RootwrapProcessHelper): _clients = {} @synchronized('daemon-client-lock') def _get_client(cls, rootwrap_config): try: return cls._clients[rootwrap_config] except KeyError: from oslo_rootwrap import client new_client = client.Client([ "sudo", "nova-rootwrap-daemon", rootwrap_config]) cls._clients[rootwrap_config] = new_client return new_client def __init__(self, rootwrap_config): self.client = self._get_client(rootwrap_config) def trycmd(self, *args, **kwargs): discard_warnings = kwargs.pop('discard_warnings', False) try: out, err = self.execute(*args, **kwargs) failed = False except processutils.ProcessExecutionError as exn: out, err = '', six.text_type(exn) failed = True if not failed and discard_warnings and err: # Handle commands that output to stderr but otherwise succeed err = '' return out, err def execute(self, *cmd, **kwargs): # NOTE(dims): This method is to provide compatibility with the # processutils.execute interface. So that calling daemon or direct # rootwrap to honor the same set of flags in kwargs and to ensure # that we don't regress any current behavior. cmd = [str(c) for c in cmd] loglevel = kwargs.pop('loglevel', std_logging.DEBUG) log_errors = kwargs.pop('log_errors', None) process_input = kwargs.pop('process_input', None) delay_on_retry = kwargs.pop('delay_on_retry', True) attempts = kwargs.pop('attempts', 1) check_exit_code = kwargs.pop('check_exit_code', [0]) ignore_exit_code = False if isinstance(check_exit_code, bool): ignore_exit_code = not check_exit_code check_exit_code = [0] elif isinstance(check_exit_code, int): check_exit_code = [check_exit_code] sanitized_cmd = strutils.mask_password(' '.join(cmd)) LOG.info(_LI('Executing RootwrapDaemonHelper.execute ' 'cmd=[%(cmd)r] kwargs=[%(kwargs)r]'), {'cmd': sanitized_cmd, 'kwargs': kwargs}) while attempts > 0: attempts -= 1 try: start_time = time.time() LOG.log(loglevel, _('Running cmd (subprocess): %s'), sanitized_cmd) (returncode, out, err) = self.client.execute( cmd, process_input) end_time = time.time() - start_time LOG.log(loglevel, 'CMD "%(sanitized_cmd)s" returned: %(return_code)s ' 'in %(end_time)0.3fs', {'sanitized_cmd': sanitized_cmd, 'return_code': returncode, 'end_time': end_time}) if not ignore_exit_code and returncode not in check_exit_code: out = strutils.mask_password(out) err = strutils.mask_password(err) raise processutils.ProcessExecutionError( exit_code=returncode, stdout=out, stderr=err, cmd=sanitized_cmd) return (out, err) except processutils.ProcessExecutionError as err: # if we want to always log the errors or if this is # the final attempt that failed and we want to log that. if log_errors == processutils.LOG_ALL_ERRORS or ( log_errors == processutils.LOG_FINAL_ERROR and not attempts): format = _('%(desc)r\ncommand: %(cmd)r\n' 'exit code: %(code)r\nstdout: %(stdout)r\n' 'stderr: %(stderr)r') LOG.log(loglevel, format, {"desc": err.description, "cmd": err.cmd, "code": err.exit_code, "stdout": err.stdout, "stderr": err.stderr}) if not attempts: LOG.log(loglevel, _('%r failed. Not Retrying.'), sanitized_cmd) raise else: LOG.log(loglevel, _('%r failed. Retrying.'), sanitized_cmd) if delay_on_retry: time.sleep(random.randint(20, 200) / 100.0) def execute(*cmd, **kwargs): """Convenience wrapper around oslo's execute() method.""" if 'run_as_root' in kwargs and kwargs.get('run_as_root'): if CONF.use_rootwrap_daemon: return RootwrapDaemonHelper(CONF.rootwrap_config).execute( *cmd, **kwargs) else: return RootwrapProcessHelper().execute(*cmd, **kwargs) return processutils.execute(*cmd, **kwargs) def ssh_execute(dest, *cmd, **kwargs): """Convenience wrapper to execute ssh command.""" ssh_cmd = ['ssh', '-o', 'BatchMode=yes'] ssh_cmd.append(dest) ssh_cmd.extend(cmd) return execute(*ssh_cmd, **kwargs) def trycmd(*args, **kwargs): """Convenience wrapper around oslo's trycmd() method.""" if kwargs.get('run_as_root', False): if CONF.use_rootwrap_daemon: return RootwrapDaemonHelper(CONF.rootwrap_config).trycmd( *args, **kwargs) else: return RootwrapProcessHelper().trycmd(*args, **kwargs) return processutils.trycmd(*args, **kwargs) def novadir(): import nova return os.path.abspath(nova.__file__).split('nova/__init__.py')[0] def generate_uid(topic, size=8): characters = '01234567890abcdefghijklmnopqrstuvwxyz' choices = [random.choice(characters) for _x in range(size)] return '%s-%s' % (topic, ''.join(choices)) # Default symbols to use for passwords. Avoids visually confusing characters. # ~6 bits per symbol DEFAULT_PASSWORD_SYMBOLS = ('23456789', # Removed: 0,1 'ABCDEFGHJKLMNPQRSTUVWXYZ', # Removed: I, O 'abcdefghijkmnopqrstuvwxyz') # Removed: l # ~5 bits per symbol EASIER_PASSWORD_SYMBOLS = ('23456789', # Removed: 0, 1 'ABCDEFGHJKLMNPQRSTUVWXYZ') # Removed: I, O def last_completed_audit_period(unit=None, before=None): """This method gives you the most recently *completed* audit period. arguments: units: string, one of 'hour', 'day', 'month', 'year' Periods normally begin at the beginning (UTC) of the period unit (So a 'day' period begins at midnight UTC, a 'month' unit on the 1st, a 'year' on Jan, 1) unit string may be appended with an optional offset like so: 'day@18' This will begin the period at 18:00 UTC. 'month@15' starts a monthly period on the 15th, and year@3 begins a yearly one on March 1st. before: Give the audit period most recently completed before <timestamp>. Defaults to now. returns: 2 tuple of datetimes (begin, end) The begin timestamp of this audit period is the same as the end of the previous. """ if not unit: unit = CONF.instance_usage_audit_period offset = 0 if '@' in unit: unit, offset = unit.split("@", 1) offset = int(offset) if before is not None: rightnow = before else: rightnow = timeutils.utcnow() if unit not in ('month', 'day', 'year', 'hour'): raise ValueError('Time period must be hour, day, month or year') if unit == 'month': if offset == 0: offset = 1 end = datetime.datetime(day=offset, month=rightnow.month, year=rightnow.year) if end >= rightnow: year = rightnow.year if 1 >= rightnow.month: year -= 1 month = 12 + (rightnow.month - 1) else: month = rightnow.month - 1 end = datetime.datetime(day=offset, month=month, year=year) year = end.year if 1 >= end.month: year -= 1 month = 12 + (end.month - 1) else: month = end.month - 1 begin = datetime.datetime(day=offset, month=month, year=year) elif unit == 'year': if offset == 0: offset = 1 end = datetime.datetime(day=1, month=offset, year=rightnow.year) if end >= rightnow: end = datetime.datetime(day=1, month=offset, year=rightnow.year - 1) begin = datetime.datetime(day=1, month=offset, year=rightnow.year - 2) else: begin = datetime.datetime(day=1, month=offset, year=rightnow.year - 1) elif unit == 'day': end = datetime.datetime(hour=offset, day=rightnow.day, month=rightnow.month, year=rightnow.year) if end >= rightnow: end = end - datetime.timedelta(days=1) begin = end - datetime.timedelta(days=1) elif unit == 'hour': end = rightnow.replace(minute=offset, second=0, microsecond=0) if end >= rightnow: end = end - datetime.timedelta(hours=1) begin = end - datetime.timedelta(hours=1) return (begin, end) def generate_password(length=None, symbolgroups=DEFAULT_PASSWORD_SYMBOLS): """Generate a random password from the supplied symbol groups. At least one symbol from each group will be included. Unpredictable results if length is less than the number of symbol groups. Believed to be reasonably secure (with a reasonable password length!) """ if length is None: length = CONF.password_length r = random.SystemRandom() # NOTE(jerdfelt): Some password policies require at least one character # from each group of symbols, so start off with one random character # from each symbol group password = [r.choice(s) for s in symbolgroups] # If length < len(symbolgroups), the leading characters will only # be from the first length groups. Try our best to not be predictable # by shuffling and then truncating. r.shuffle(password) password = password[:length] length -= len(password) # then fill with random characters from all symbol groups symbols = ''.join(symbolgroups) password.extend([r.choice(symbols) for _i in range(length)]) # finally shuffle to ensure first x characters aren't from a # predictable group r.shuffle(password) return ''.join(password) def get_my_linklocal(interface): try: if_str = execute('ip', '-f', 'inet6', '-o', 'addr', 'show', interface) condition = '\s+inet6\s+([0-9a-f:]+)/\d+\s+scope\s+link' links = [re.search(condition, x) for x in if_str[0].split('\n')] address = [w.group(1) for w in links if w is not None] if address[0] is not None: return address[0] else: msg = _('Link Local address is not found.:%s') % if_str raise exception.NovaException(msg) except Exception as ex: msg = _("Couldn't get Link Local IP of %(interface)s" " :%(ex)s") % {'interface': interface, 'ex': ex} raise exception.NovaException(msg) def xhtml_escape(value): """Escapes a string so it is valid within XML or XHTML. """ return saxutils.escape(value, {'"': '&quot;', "'": '&apos;'}) def utf8(value): """Try to turn a string into utf-8 if possible. Code is directly from the utf8 function in http://github.com/facebook/tornado/blob/master/tornado/escape.py """ if isinstance(value, six.text_type): return value.encode('utf-8') assert isinstance(value, str) return value def check_isinstance(obj, cls): """Checks that obj is of type cls, and lets PyLint infer types.""" if isinstance(obj, cls): return obj raise Exception(_('Expected object of type: %s') % (str(cls))) def parse_server_string(server_str): """Parses the given server_string and returns a tuple of host and port. If it's not a combination of host part and port, the port element is an empty string. If the input is invalid expression, return a tuple of two empty strings. """ try: # First of all, exclude pure IPv6 address (w/o port). if netaddr.valid_ipv6(server_str): return (server_str, '') # Next, check if this is IPv6 address with a port number combination. if server_str.find("]:") != -1: (address, port) = server_str.replace('[', '', 1).split(']:') return (address, port) # Third, check if this is a combination of an address and a port if server_str.find(':') == -1: return (server_str, '') # This must be a combination of an address and a port (address, port) = server_str.split(':') return (address, port) except (ValueError, netaddr.AddrFormatError): LOG.error(_LE('Invalid server_string: %s'), server_str) return ('', '') def is_valid_ipv6_cidr(address): try: netaddr.IPNetwork(address, version=6).cidr return True except (TypeError, netaddr.AddrFormatError): return False def get_shortened_ipv6(address): addr = netaddr.IPAddress(address, version=6) return str(addr.ipv6()) def get_shortened_ipv6_cidr(address): net = netaddr.IPNetwork(address, version=6) return str(net.cidr) def is_valid_cidr(address): """Check if address is valid The provided address can be a IPv6 or a IPv4 CIDR address. """ try: # Validate the correct CIDR Address netaddr.IPNetwork(address) except netaddr.AddrFormatError: return False # Prior validation partially verify /xx part # Verify it here ip_segment = address.split('/') if (len(ip_segment) <= 1 or ip_segment[1] == ''): return False return True def get_ip_version(network): """Returns the IP version of a network (IPv4 or IPv6). Raises AddrFormatError if invalid network. """ if netaddr.IPNetwork(network).version == 6: return "IPv6" elif netaddr.IPNetwork(network).version == 4: return "IPv4" def safe_ip_format(ip): """Transform ip string to "safe" format. Will return ipv4 addresses unchanged, but will nest ipv6 addresses inside square brackets. """ try: if netaddr.IPAddress(ip).version == 6: return '[%s]' % ip except (TypeError, netaddr.AddrFormatError): # hostname pass # it's IPv4 or hostname return ip def monkey_patch(): """If the CONF.monkey_patch set as True, this function patches a decorator for all functions in specified modules. You can set decorators for each modules using CONF.monkey_patch_modules. The format is "Module path:Decorator function". Example: 'nova.api.ec2.cloud:nova.notifications.notify_decorator' Parameters of the decorator is as follows. (See nova.notifications.notify_decorator) name - name of the function function - object of the function """ # If CONF.monkey_patch is not True, this function do nothing. if not CONF.monkey_patch: return if six.PY3: def is_method(obj): # Unbound methods became regular functions on Python 3 return inspect.ismethod(obj) or inspect.isfunction(obj) else: is_method = inspect.ismethod # Get list of modules and decorators for module_and_decorator in CONF.monkey_patch_modules: module, decorator_name = module_and_decorator.split(':') # import decorator function decorator = importutils.import_class(decorator_name) __import__(module) # Retrieve module information using pyclbr module_data = pyclbr.readmodule_ex(module) for key, value in module_data.items(): # set the decorator for the class methods if isinstance(value, pyclbr.Class): clz = importutils.import_class("%s.%s" % (module, key)) for method, func in inspect.getmembers(clz, is_method): setattr(clz, method, decorator("%s.%s.%s" % (module, key, method), func)) # set the decorator for the function if isinstance(value, pyclbr.Function): func = importutils.import_class("%s.%s" % (module, key)) setattr(sys.modules[module], key, decorator("%s.%s" % (module, key), func)) def convert_to_list_dict(lst, label): """Convert a value or list into a list of dicts.""" if not lst: return None if not isinstance(lst, list): lst = [lst] return [{label: x} for x in lst] def make_dev_path(dev, partition=None, base='/dev'): """Return a path to a particular device. >>> make_dev_path('xvdc') /dev/xvdc >>> make_dev_path('xvdc', 1) /dev/xvdc1 """ path = os.path.join(base, dev) if partition: path += str(partition) return path def sanitize_hostname(hostname, default_name=None): """Return a hostname which conforms to RFC-952 and RFC-1123 specs except the length of hostname. Window, Linux, and Dnsmasq has different limitation: Windows: 255 (net_bios limits to 15, but window will truncate it) Linux: 64 Dnsmasq: 63 Due to nova-network will leverage dnsmasq to set hostname, so we chose 63. """ def truncate_hostname(name): if len(name) > 63: LOG.warning(_LW("Hostname %(hostname)s is longer than 63, " "truncate it to %(truncated_name)s"), {'hostname': name, 'truncated_name': name[:63]}) return name[:63] if isinstance(hostname, six.text_type): # Remove characters outside the Unicode range U+0000-U+00FF hostname = hostname.encode('latin-1', 'ignore') if six.PY3: hostname = hostname.decode('latin-1') hostname = re.sub('[ _]', '-', hostname) hostname = re.sub('[^\w.-]+', '', hostname) hostname = hostname.lower() hostname = hostname.strip('.-') # NOTE(eliqiao): set hostname to default_display_name to avoid # empty hostname if hostname == "" and default_name is not None: return truncate_hostname(default_name) return truncate_hostname(hostname) @contextlib.contextmanager def temporary_mutation(obj, **kwargs): """Temporarily set the attr on a particular object to a given value then revert when finished. One use of this is to temporarily set the read_deleted flag on a context object: with temporary_mutation(context, read_deleted="yes"): do_something_that_needed_deleted_objects() """ def is_dict_like(thing): return hasattr(thing, 'has_key') def get(thing, attr, default): if is_dict_like(thing): return thing.get(attr, default) else: return getattr(thing, attr, default) def set_value(thing, attr, val): if is_dict_like(thing): thing[attr] = val else: setattr(thing, attr, val) def delete(thing, attr): if is_dict_like(thing): del thing[attr] else: delattr(thing, attr) NOT_PRESENT = object() old_values = {} for attr, new_value in kwargs.items(): old_values[attr] = get(obj, attr, NOT_PRESENT) set_value(obj, attr, new_value) try: yield finally: for attr, old_value in old_values.items(): if old_value is NOT_PRESENT: delete(obj, attr) else: set_value(obj, attr, old_value) def generate_mac_address(): """Generate an Ethernet MAC address.""" # NOTE(vish): We would prefer to use 0xfe here to ensure that linux # bridge mac addresses don't change, but it appears to # conflict with libvirt, so we use the next highest octet # that has the unicast and locally administered bits set # properly: 0xfa. # Discussion: https://bugs.launchpad.net/nova/+bug/921838 mac = [0xfa, 0x16, 0x3e, random.randint(0x00, 0xff), random.randint(0x00, 0xff), random.randint(0x00, 0xff)] return ':'.join(map(lambda x: "%02x" % x, mac)) def read_file_as_root(file_path): """Secure helper to read file as root.""" try: out, _err = execute('cat', file_path, run_as_root=True) return out except processutils.ProcessExecutionError: raise exception.FileNotFound(file_path=file_path) @contextlib.contextmanager def temporary_chown(path, owner_uid=None): """Temporarily chown a path. :param owner_uid: UID of temporary owner (defaults to current user) """ if owner_uid is None: owner_uid = os.getuid() orig_uid = os.stat(path).st_uid if orig_uid != owner_uid: execute('chown', owner_uid, path, run_as_root=True) try: yield finally: if orig_uid != owner_uid: execute('chown', orig_uid, path, run_as_root=True) @contextlib.contextmanager def tempdir(**kwargs): argdict = kwargs.copy() if 'dir' not in argdict: argdict['dir'] = CONF.tempdir tmpdir = tempfile.mkdtemp(**argdict) try: yield tmpdir finally: try: shutil.rmtree(tmpdir) except OSError as e: LOG.error(_LE('Could not remove tmpdir: %s'), e) def walk_class_hierarchy(clazz, encountered=None): """Walk class hierarchy, yielding most derived classes first.""" if not encountered: encountered = [] for subclass in clazz.__subclasses__(): if subclass not in encountered: encountered.append(subclass) # drill down to leaves first for subsubclass in walk_class_hierarchy(subclass, encountered): yield subsubclass yield subclass class UndoManager(object): """Provides a mechanism to facilitate rolling back a series of actions when an exception is raised. """ def __init__(self): self.undo_stack = [] def undo_with(self, undo_func): self.undo_stack.append(undo_func) def _rollback(self): for undo_func in reversed(self.undo_stack): undo_func() def rollback_and_reraise(self, msg=None, **kwargs): """Rollback a series of actions then re-raise the exception. .. note:: (sirp) This should only be called within an exception handler. """ with excutils.save_and_reraise_exception(): if msg: LOG.exception(msg, **kwargs) self._rollback() def mkfs(fs, path, label=None, run_as_root=False): """Format a file or block device :param fs: Filesystem type (examples include 'swap', 'ext3', 'ext4' 'btrfs', etc.) :param path: Path to file or block device to format :param label: Volume label to use """ if fs == 'swap': args = ['mkswap'] else: args = ['mkfs', '-t', fs] # add -F to force no interactive execute on non-block device. if fs in ('ext3', 'ext4', 'ntfs'): args.extend(['-F']) if label: if fs in ('msdos', 'vfat'): label_opt = '-n' else: label_opt = '-L' args.extend([label_opt, label]) args.append(path) execute(*args, run_as_root=run_as_root) def last_bytes(file_like_object, num): """Return num bytes from the end of the file, and remaining byte count. :param file_like_object: The file to read :param num: The number of bytes to return :returns (data, remaining) """ try: file_like_object.seek(-num, os.SEEK_END) except IOError as e: # seek() fails with EINVAL when trying to go before the start of the # file. It means that num is larger than the file size, so just # go to the start. if e.errno == errno.EINVAL: file_like_object.seek(0, os.SEEK_SET) else: raise remaining = file_like_object.tell() return (file_like_object.read(), remaining) def metadata_to_dict(metadata, filter_deleted=False): result = {} for item in metadata: if not filter_deleted and item.get('deleted'): continue result[item['key']] = item['value'] return result def dict_to_metadata(metadata): result = [] for key, value in six.iteritems(metadata): result.append(dict(key=key, value=value)) return result def instance_meta(instance): if isinstance(instance['metadata'], dict): return instance['metadata'] else: return metadata_to_dict(instance['metadata']) def instance_sys_meta(instance): if not instance.get('system_metadata'): return {} if isinstance(instance['system_metadata'], dict): return instance['system_metadata'] else: return metadata_to_dict(instance['system_metadata'], filter_deleted=True) def get_wrapped_function(function): """Get the method at the bottom of a stack of decorators.""" if not hasattr(function, '__closure__') or not function.__closure__: return function def _get_wrapped_function(function): if not hasattr(function, '__closure__') or not function.__closure__: return None for closure in function.__closure__: func = closure.cell_contents deeper_func = _get_wrapped_function(func) if deeper_func: return deeper_func elif hasattr(closure.cell_contents, '__call__'): return closure.cell_contents return _get_wrapped_function(function) def expects_func_args(*args): def _decorator_checker(dec): @functools.wraps(dec) def _decorator(f): base_f = get_wrapped_function(f) arg_names, a, kw, _default = inspect.getargspec(base_f) if a or kw or set(args) <= set(arg_names): # NOTE (ndipanov): We can't really tell if correct stuff will # be passed if it's a function with *args or **kwargs so # we still carry on and hope for the best return dec(f) else: raise TypeError("Decorated function %(f_name)s does not " "have the arguments expected by the " "decorator %(d_name)s" % {'f_name': base_f.__name__, 'd_name': dec.__name__}) return _decorator return _decorator_checker class ExceptionHelper(object): """Class to wrap another and translate the ClientExceptions raised by its function calls to the actual ones. """ def __init__(self, target): self._target = target def __getattr__(self, name): func = getattr(self._target, name) @functools.wraps(func) def wrapper(*args, **kwargs): try: return func(*args, **kwargs) except messaging.ExpectedException as e: raise (e.exc_info[1], None, e.exc_info[2]) return wrapper def check_string_length(value, name=None, min_length=0, max_length=None): """Check the length of specified string :param value: the value of the string :param name: the name of the string :param min_length: the min_length of the string :param max_length: the max_length of the string """ if not isinstance(value, six.string_types): if name is None: msg = _("The input is not a string or unicode") else: msg = _("%s is not a string or unicode") % name raise exception.InvalidInput(message=msg) if name is None: name = value if len(value) < min_length: msg = _("%(name)s has a minimum character requirement of " "%(min_length)s.") % {'name': name, 'min_length': min_length} raise exception.InvalidInput(message=msg) if max_length and len(value) > max_length: msg = _("%(name)s has more than %(max_length)s " "characters.") % {'name': name, 'max_length': max_length} raise exception.InvalidInput(message=msg) def validate_integer(value, name, min_value=None, max_value=None): """Make sure that value is a valid integer, potentially within range.""" try: value = int(str(value)) except (ValueError, UnicodeEncodeError): msg = _('%(value_name)s must be an integer') raise exception.InvalidInput(reason=( msg % {'value_name': name})) if min_value is not None: if value < min_value: msg = _('%(value_name)s must be >= %(min_value)d') raise exception.InvalidInput( reason=(msg % {'value_name': name, 'min_value': min_value})) if max_value is not None: if value > max_value: msg = _('%(value_name)s must be <= %(max_value)d') raise exception.InvalidInput( reason=( msg % {'value_name': name, 'max_value': max_value}) ) return value def spawn(func, *args, **kwargs): """Passthrough method for eventlet.spawn. This utility exists so that it can be stubbed for testing without interfering with the service spawns. It will also grab the context from the threadlocal store and add it to the store on the new thread. This allows for continuity in logging the context when using this method to spawn a new thread. """ _context = common_context.get_current() @functools.wraps(func) def context_wrapper(*args, **kwargs): # NOTE: If update_store is not called after spawn it won't be # available for the logger to pull from threadlocal storage. if _context is not None: _context.update_store() return func(*args, **kwargs) return eventlet.spawn(context_wrapper, *args, **kwargs) def spawn_n(func, *args, **kwargs): """Passthrough method for eventlet.spawn_n. This utility exists so that it can be stubbed for testing without interfering with the service spawns. It will also grab the context from the threadlocal store and add it to the store on the new thread. This allows for continuity in logging the context when using this method to spawn a new thread. """ _context = common_context.get_current() @functools.wraps(func) def context_wrapper(*args, **kwargs): # NOTE: If update_store is not called after spawn_n it won't be # available for the logger to pull from threadlocal storage. if _context is not None: _context.update_store() func(*args, **kwargs) eventlet.spawn_n(context_wrapper, *args, **kwargs) def is_none_string(val): """Check if a string represents a None value. """ if not isinstance(val, six.string_types): return False return val.lower() == 'none' def convert_version_to_int(version): try: if isinstance(version, six.string_types): version = convert_version_to_tuple(version) if isinstance(version, tuple): return six.moves.reduce(lambda x, y: (x * 1000) + y, version) except Exception: msg = _("Hypervisor version %s is invalid.") % version raise exception.NovaException(msg) def convert_version_to_str(version_int): version_numbers = [] factor = 1000 while version_int != 0: version_number = version_int - (version_int // factor * factor) version_numbers.insert(0, str(version_number)) version_int = version_int // factor return six.moves.reduce(lambda x, y: "%s.%s" % (x, y), version_numbers) def convert_version_to_tuple(version_str): return tuple(int(part) for part in version_str.split('.')) def is_neutron(): global _IS_NEUTRON if _IS_NEUTRON is not None: return _IS_NEUTRON try: # compatibility with Folsom/Grizzly configs cls_name = CONF.network_api_class if cls_name == 'nova.network.quantumv2.api.API': cls_name = 'nova.network.neutronv2.api.API' from nova.network.neutronv2 import api as neutron_api _IS_NEUTRON = issubclass(importutils.import_class(cls_name), neutron_api.API) except ImportError: _IS_NEUTRON = False return _IS_NEUTRON def is_auto_disk_config_disabled(auto_disk_config_raw): auto_disk_config_disabled = False if auto_disk_config_raw is not None: adc_lowered = auto_disk_config_raw.strip().lower() if adc_lowered == "disabled": auto_disk_config_disabled = True return auto_disk_config_disabled def get_auto_disk_config_from_instance(instance=None, sys_meta=None): if sys_meta is None: sys_meta = instance_sys_meta(instance) return sys_meta.get("image_auto_disk_config") def get_auto_disk_config_from_image_props(image_properties): return image_properties.get("auto_disk_config") def get_system_metadata_from_image(image_meta, flavor=None): system_meta = {} prefix_format = SM_IMAGE_PROP_PREFIX + '%s' for key, value in six.iteritems(image_meta.get('properties', {})): if key in SM_SKIP_KEYS: continue new_value = safe_truncate(six.text_type(value), 255) system_meta[prefix_format % key] = new_value for key in SM_INHERITABLE_KEYS: value = image_meta.get(key) if key == 'min_disk' and flavor: if image_meta.get('disk_format') == 'vhd': value = flavor['root_gb'] else: value = max(value, flavor['root_gb']) if value is None: continue system_meta[prefix_format % key] = value return system_meta def get_image_from_system_metadata(system_meta): image_meta = {} properties = {} if not isinstance(system_meta, dict): system_meta = metadata_to_dict(system_meta, filter_deleted=True) for key, value in six.iteritems(system_meta): if value is None: continue # NOTE(xqueralt): Not sure this has to inherit all the properties or # just the ones we need. Leaving it for now to keep the old behaviour. if key.startswith(SM_IMAGE_PROP_PREFIX): key = key[len(SM_IMAGE_PROP_PREFIX):] if key in SM_SKIP_KEYS: continue if key in SM_INHERITABLE_KEYS: image_meta[key] = value else: properties[key] = value image_meta['properties'] = properties return image_meta def get_image_metadata_from_volume(volume): properties = copy.copy(volume.get('volume_image_metadata', {})) image_meta = {'properties': properties} # Volume size is no longer related to the original image size, # so we take it from the volume directly. Cinder creates # volumes in Gb increments, and stores size in Gb, whereas # glance reports size in bytes. As we're returning glance # metadata here, we need to convert it. image_meta['size'] = volume.get('size', 0) * units.Gi # NOTE(yjiang5): restore the basic attributes # NOTE(mdbooth): These values come from volume_glance_metadata # in cinder. This is a simple key/value table, and all values # are strings. We need to convert them to ints to avoid # unexpected type errors. for attr in VIM_IMAGE_ATTRIBUTES: val = properties.pop(attr, None) if attr in ('min_ram', 'min_disk'): image_meta[attr] = int(val or 0) # NOTE(yjiang5): Always set the image status as 'active' # and depends on followed volume_api.check_attach() to # verify it. This hack should be harmless with that check. image_meta['status'] = 'active' return image_meta def get_hash_str(base_str): """Returns string that represents MD5 hash of base_str (in hex format). If base_str is a Unicode string, encode it to UTF-8. """ if isinstance(base_str, six.text_type): base_str = base_str.encode('utf-8') return hashlib.md5(base_str).hexdigest() if hasattr(hmac, 'compare_digest'): constant_time_compare = hmac.compare_digest else: def constant_time_compare(first, second): """Returns True if both string inputs are equal, otherwise False. This function should take a constant amount of time regardless of how many characters in the strings match. """ if len(first) != len(second): return False result = 0 for x, y in zip(first, second): result |= ord(x) ^ ord(y) return result == 0 def filter_and_format_resource_metadata(resource_type, resource_list, search_filts, metadata_type=None):<|fim▁hole|> Let's call a dict a 'filter block' and an item in the dict a 'filter'. A tag is returned if it matches ALL the filters in a filter block. If more than one values are specified for a filter, a tag is returned if it matches ATLEAST ONE value of the filter. If more than one filter blocks are specified, the tag should match ALL the filter blocks. For example: search_filts = [{'key': ['key1', 'key2'], 'value': 'val1'}, {'value': 'val2'}] The filter translates to 'match any tag for which': ((key=key1 AND value=val1) OR (key=key2 AND value=val1)) AND (value=val2) This example filter will never match a tag. :param resource_type: The resource type as a string, e.g. 'instance' :param resource_list: List of resource objects :param search_filts: Filters to filter metadata to be returned. Can be dict (e.g. {'key': 'env', 'value': 'prod'}, or a list of dicts (e.g. [{'key': 'env'}, {'value': 'beta'}]. Note that the values of the dict can be regular expressions. :param metadata_type: Provided to search for a specific metadata type (e.g. 'system_metadata') :returns: List of dicts where each dict is of the form {'key': 'somekey', 'value': 'somevalue', 'instance_id': 'some-instance-uuid-aaa'} if resource_type is 'instance'. """ if isinstance(search_filts, dict): search_filts = [search_filts] def _get_id(resource): if resource_type == 'instance': return resource.get('uuid') def _match_any(pattern_list, string): if isinstance(pattern_list, str): pattern_list = [pattern_list] return any([re.match(pattern, string) for pattern in pattern_list]) def _filter_metadata(resource, search_filt, input_metadata): ids = search_filt.get('resource_id', []) keys_filter = search_filt.get('key', []) values_filter = search_filt.get('value', []) output_metadata = {} if ids and _get_id(resource) not in ids: return {} for k, v in six.iteritems(input_metadata): # Both keys and value defined -- AND if (keys_filter and values_filter and not _match_any(keys_filter, k) and not _match_any(values_filter, v)): continue # Only keys or value is defined elif ((keys_filter and not _match_any(keys_filter, k)) or (values_filter and not _match_any(values_filter, v))): continue output_metadata[k] = v return output_metadata formatted_metadata_list = [] for res in resource_list: if resource_type == 'instance': # NOTE(rushiagr): metadata_type should be 'metadata' or # 'system_metadata' if resource_type is instance. Defaulting to # 'metadata' if not specified. if metadata_type is None: metadata_type = 'metadata' metadata = res.get(metadata_type, {}) for filt in search_filts: # By chaining the input to the output, the filters are # ANDed together metadata = _filter_metadata(res, filt, metadata) for (k, v) in metadata.items(): formatted_metadata_list.append({'key': k, 'value': v, '%s_id' % resource_type: _get_id(res)}) return formatted_metadata_list def safe_truncate(value, length): """Safely truncates unicode strings such that their encoded length is no greater than the length provided. """ b_value = encodeutils.safe_encode(value)[:length] # NOTE(chaochin) UTF-8 character byte size varies from 1 to 6. If # truncating a long byte string to 255, the last character may be # cut in the middle, so that UnicodeDecodeError will occur when # converting it back to unicode. decode_ok = False while not decode_ok: try: u_value = encodeutils.safe_decode(b_value) decode_ok = True except UnicodeDecodeError: b_value = b_value[:-1] return u_value def read_cached_file(filename, force_reload=False): """Read from a file if it has been modified. :param force_reload: Whether to reload the file. :returns: A tuple with a boolean specifying if the data is fresh or not. """ global _FILE_CACHE if force_reload: delete_cached_file(filename) reloaded = False mtime = os.path.getmtime(filename) cache_info = _FILE_CACHE.setdefault(filename, {}) if not cache_info or mtime > cache_info.get('mtime', 0): LOG.debug("Reloading cached file %s", filename) with open(filename) as fap: cache_info['data'] = fap.read() cache_info['mtime'] = mtime reloaded = True return (reloaded, cache_info['data']) def delete_cached_file(filename): """Delete cached file if present. :param filename: filename to delete """ global _FILE_CACHE if filename in _FILE_CACHE: del _FILE_CACHE[filename]<|fim▁end|>
"""Get all metadata for a list of resources after filtering. Search_filts is a list of dictionaries, where the values in the dictionary can be string or regex string, or a list of strings/regex strings.
<|file_name|>cjk_bigram.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2014 Couchbase, Inc. // Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file // except in compliance with the License. You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software distributed under the // License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, // either express or implied. See the License for the specific language governing permissions // and limitations under the License. package cjk import ( "container/ring" "github.com/blevesearch/bleve/analysis" "github.com/blevesearch/bleve/registry" ) const BigramName = "cjk_bigram" type CJKBigramFilter struct { outputUnigram bool } func NewCJKBigramFilter(outputUnigram bool) *CJKBigramFilter { return &CJKBigramFilter{ outputUnigram: outputUnigram, } } func (s *CJKBigramFilter) Filter(input analysis.TokenStream) analysis.TokenStream { r := ring.New(2) itemsInRing := 0 rv := make(analysis.TokenStream, 0, len(input)) for _, token := range input { if token.Type == analysis.Ideographic { if itemsInRing > 0 { // if items already buffered // check to see if this is aligned curr := r.Value.(*analysis.Token) if token.Start-curr.End != 0 { // not aligned flush flushToken := s.flush(r, &itemsInRing) if flushToken != nil { rv = append(rv, flushToken) } } } // now we can add this token to the buffer r = r.Next() r.Value = token if itemsInRing < 2 { itemsInRing++ } if itemsInRing > 1 && s.outputUnigram { unigram := s.buildUnigram(r, &itemsInRing) if unigram != nil { rv = append(rv, unigram) } } bigramToken := s.outputBigram(r, &itemsInRing) if bigramToken != nil { rv = append(rv, bigramToken) } } else { // flush anything already buffered flushToken := s.flush(r, &itemsInRing) if flushToken != nil { rv = append(rv, flushToken) } // output this token as is rv = append(rv, token) } } // deal with possible trailing unigram if itemsInRing == 1 || s.outputUnigram { if itemsInRing == 2 { r = r.Next() } unigram := s.buildUnigram(r, &itemsInRing) if unigram != nil { rv = append(rv, unigram) } } return rv } func (s *CJKBigramFilter) flush(r *ring.Ring, itemsInRing *int) *analysis.Token { var rv *analysis.Token if *itemsInRing == 1 { rv = s.buildUnigram(r, itemsInRing) } r.Value = nil *itemsInRing = 0 return rv } func (s *CJKBigramFilter) outputBigram(r *ring.Ring, itemsInRing *int) *analysis.Token { if *itemsInRing == 2 { thisShingleRing := r.Move(-1) shingledBytes := make([]byte, 0) // do first token prev := thisShingleRing.Value.(*analysis.Token) shingledBytes = append(shingledBytes, prev.Term...) // do second token thisShingleRing = thisShingleRing.Next() curr := thisShingleRing.Value.(*analysis.Token) shingledBytes = append(shingledBytes, curr.Term...) token := analysis.Token{ Type: analysis.Double, Term: shingledBytes, Position: prev.Position, Start: prev.Start, End: curr.End, } return &token } return nil } func (s *CJKBigramFilter) buildUnigram(r *ring.Ring, itemsInRing *int) *analysis.Token {<|fim▁hole|> if *itemsInRing == 2 { thisShingleRing := r.Move(-1) // do first token prev := thisShingleRing.Value.(*analysis.Token) token := analysis.Token{ Type: analysis.Single, Term: prev.Term, Position: prev.Position, Start: prev.Start, End: prev.End, } return &token } else if *itemsInRing == 1 { // do first token prev := r.Value.(*analysis.Token) token := analysis.Token{ Type: analysis.Single, Term: prev.Term, Position: prev.Position, Start: prev.Start, End: prev.End, } return &token } return nil } func CJKBigramFilterConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.TokenFilter, error) { outputUnigram := false outVal, ok := config["output_unigram"].(bool) if ok { outputUnigram = outVal } return NewCJKBigramFilter(outputUnigram), nil } func init() { registry.RegisterTokenFilter(BigramName, CJKBigramFilterConstructor) }<|fim▁end|>
<|file_name|>inputSelect_CheckWarningsTest.js<|end_file_name|><|fim▁begin|>({ failOnWarning : true, /** * Check to make sure that when the user puts in invalid options (i.e. an option missing the value element) */ testInputSelect_WarningForInvalidOption : { auraWarningsExpectedDuringInit: ["Option at index 1 in select component"], attributes : { "case" : "badsel"}, test : function(cmp) { this.dummyFunc(); } }, /** * Verify that correct usage of InputSelect does not throw warnings */ testInputSelect_WarningDoesNotShowUp : { test : function(cmp) { this.dummyFunc(); }<|fim▁hole|> }, /** * Dummy function that will return true in all cases */ dummyFunc : function(){ return true; } })<|fim▁end|>
<|file_name|>SmoothScrollTimerTask.java<|end_file_name|><|fim▁begin|>package com.sunjian.android_pickview_lib.view; import java.util.TimerTask;<|fim▁hole|> int realOffset; int offset; final WheelView loopView; SmoothScrollTimerTask(WheelView loopview, int offset) { this.loopView = loopview; this.offset = offset; realTotalOffset = Integer.MAX_VALUE; realOffset = 0; } @Override public final void run() { if (realTotalOffset == Integer.MAX_VALUE) { realTotalOffset = offset; } //把要滚动的范围细分成十小份,按是小份单位来重绘 realOffset = (int) ((float) realTotalOffset * 0.1F); if (realOffset == 0) { if (realTotalOffset < 0) { realOffset = -1; } else { realOffset = 1; } } if (Math.abs(realTotalOffset) <= 1) { loopView.cancelFuture(); loopView.handler.sendEmptyMessage(MessageHandler.WHAT_ITEM_SELECTED); } else { loopView.totalScrollY = loopView.totalScrollY + realOffset; //这里如果不是循环模式,则点击空白位置需要回滚,不然就会出现选到-1 item的 情况 if (!loopView.isLoop) { float itemHeight = loopView.itemHeight; float top = (float) (-loopView.initPosition) * itemHeight; float bottom = (float) (loopView.getItemsCount() - 1 - loopView.initPosition) * itemHeight; if (loopView.totalScrollY <= top||loopView.totalScrollY >= bottom) { loopView.totalScrollY = loopView.totalScrollY - realOffset; loopView.cancelFuture(); loopView.handler.sendEmptyMessage(MessageHandler.WHAT_ITEM_SELECTED); return; } } loopView.handler.sendEmptyMessage(MessageHandler.WHAT_INVALIDATE_LOOP_VIEW); realTotalOffset = realTotalOffset - realOffset; } } }<|fim▁end|>
final class SmoothScrollTimerTask extends TimerTask { int realTotalOffset;
<|file_name|>topic.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2018 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. package topic import ( "bytes" "errors" "fmt" "time" "github.com/m3db/m3/src/cluster/kv" "github.com/m3db/m3/src/cluster/services" "github.com/m3db/m3/src/msg/generated/proto/topicpb" ) var ( errEmptyName = errors.New("invalid topic: empty name") errZeroShards = errors.New("invalid topic: zero shards") ) type topic struct { name string numOfShards uint32 consumerServices []ConsumerService version int } // NewTopic creates a new topic. func NewTopic() Topic { return new(topic) } // NewTopicFromValue creates a topic from a kv.Value. func NewTopicFromValue(v kv.Value) (Topic, error) { var topic topicpb.Topic if err := v.Unmarshal(&topic); err != nil { return nil, err } t, err := NewTopicFromProto(&topic) if err != nil { return nil, err } return t.SetVersion(v.Version()), nil } // NewTopicFromProto creates a topic from a proto. func NewTopicFromProto(t *topicpb.Topic) (Topic, error) { css := make([]ConsumerService, len(t.ConsumerServices)) for i, cspb := range t.ConsumerServices { cs, err := NewConsumerServiceFromProto(cspb) if err != nil { return nil, err } css[i] = cs } return NewTopic(). SetName(t.Name). SetNumberOfShards(t.NumberOfShards). SetConsumerServices(css), nil } func (t *topic) Name() string { return t.name } func (t *topic) SetName(value string) Topic { newt := *t newt.name = value return &newt } func (t *topic) NumberOfShards() uint32 { return t.numOfShards } func (t *topic) SetNumberOfShards(value uint32) Topic { newt := *t newt.numOfShards = value return &newt } func (t *topic) ConsumerServices() []ConsumerService { return t.consumerServices } func (t *topic) SetConsumerServices(value []ConsumerService) Topic { newt := *t newt.consumerServices = value return &newt } func (t *topic) Version() int { return t.version } func (t *topic) SetVersion(value int) Topic { newt := *t newt.version = value return &newt } func (t *topic) AddConsumerService(value ConsumerService) (Topic, error) { cur := t.ConsumerServices() for _, cs := range cur { if cs.ServiceID().Equal(value.ServiceID()) { return nil, fmt.Errorf("service %s is already consuming the topic", value.ServiceID().String()) } } return t.SetConsumerServices(append(cur, value)), nil } func (t *topic) RemoveConsumerService(value services.ServiceID) (Topic, error) { cur := t.ConsumerServices() for i, cs := range cur { if cs.ServiceID().Equal(value) { cur = append(cur[:i], cur[i+1:]...) return t.SetConsumerServices(cur), nil } } return nil, fmt.Errorf("could not find consumer service %s in the topic", value.String()) } func (t *topic) UpdateConsumerService(value ConsumerService) (Topic, error) { css := t.ConsumerServices() for i, cs := range css { if !cs.ServiceID().Equal(value.ServiceID()) { continue } if value.ConsumptionType() != cs.ConsumptionType() { return nil, fmt.Errorf("could not change consumption type for consumer service %s", value.ServiceID().String()) } css[i] = value return t.SetConsumerServices(css), nil } return nil, fmt.Errorf("could not find consumer service %s in the topic", value.String()) } func (t *topic) String() string { var buf bytes.Buffer buf.WriteString("\n{\n") buf.WriteString(fmt.Sprintf("\tversion: %d\n", t.version)) buf.WriteString(fmt.Sprintf("\tname: %s\n", t.name)) buf.WriteString(fmt.Sprintf("\tnumOfShards: %d\n", t.numOfShards)) if len(t.consumerServices) > 0 { buf.WriteString("\tconsumerServices: {\n") } for _, cs := range t.consumerServices { buf.WriteString(fmt.Sprintf("\t\t%s\n", cs.String())) } if len(t.consumerServices) > 0 { buf.WriteString("\t}\n") } buf.WriteString("}\n") return buf.String() } func (t *topic) Validate() error { if t.Name() == "" { return errEmptyName } if t.NumberOfShards() == 0 { return errZeroShards } uniqConsumers := make(map[string]struct{}, len(t.ConsumerServices())) for _, cs := range t.ConsumerServices() { _, ok := uniqConsumers[cs.ServiceID().String()] if ok { return fmt.Errorf("invalid topic: duplicated consumer %s", cs.ServiceID().String()) } uniqConsumers[cs.ServiceID().String()] = struct{}{} } return nil } // ToProto creates proto from a topic. func ToProto(t Topic) (*topicpb.Topic, error) { css := t.ConsumerServices() csspb := make([]*topicpb.ConsumerService, len(css)) for i, cs := range css { cspb, err := ConsumerServiceToProto(cs) if err != nil { return nil, err } csspb[i] = cspb } return &topicpb.Topic{ Name: t.Name(), NumberOfShards: t.NumberOfShards(), ConsumerServices: csspb, }, nil } type consumerService struct { sid services.ServiceID ct ConsumptionType ttlNanos int64 } // NewConsumerService creates a ConsumerService. func NewConsumerService() ConsumerService { return new(consumerService) } // NewConsumerServiceFromProto creates a ConsumerService from a proto. func NewConsumerServiceFromProto(cs *topicpb.ConsumerService) (ConsumerService, error) { ct, err := NewConsumptionTypeFromProto(cs.ConsumptionType) if err != nil { return nil, err } return NewConsumerService(). SetServiceID(NewServiceIDFromProto(cs.ServiceId)). SetConsumptionType(ct). SetMessageTTLNanos(cs.MessageTtlNanos), nil } // ConsumerServiceToProto creates proto from a ConsumerService. func ConsumerServiceToProto(cs ConsumerService) (*topicpb.ConsumerService, error) { ct, err := ConsumptionTypeToProto(cs.ConsumptionType()) if err != nil { return nil, err } return &topicpb.ConsumerService{ ConsumptionType: ct, ServiceId: ServiceIDToProto(cs.ServiceID()), MessageTtlNanos: cs.MessageTTLNanos(), }, nil } func (cs *consumerService) ServiceID() services.ServiceID { return cs.sid } func (cs *consumerService) SetServiceID(value services.ServiceID) ConsumerService { newcs := *cs newcs.sid = value return &newcs } func (cs *consumerService) ConsumptionType() ConsumptionType { return cs.ct<|fim▁hole|> newcs.ct = value return &newcs } func (cs *consumerService) MessageTTLNanos() int64 { return cs.ttlNanos } func (cs *consumerService) SetMessageTTLNanos(value int64) ConsumerService { newcs := *cs newcs.ttlNanos = value return &newcs } func (cs *consumerService) String() string { var buf bytes.Buffer buf.WriteString("{") buf.WriteString(fmt.Sprintf("service: %s, consumption type: %s", cs.sid.String(), cs.ct.String())) if cs.ttlNanos != 0 { buf.WriteString(fmt.Sprintf(", ttl: %v", time.Duration(cs.ttlNanos))) } buf.WriteString("}") return buf.String() } // NewServiceIDFromProto creates service id from a proto. func NewServiceIDFromProto(sid *topicpb.ServiceID) services.ServiceID { return services.NewServiceID().SetName(sid.Name).SetEnvironment(sid.Environment).SetZone(sid.Zone) } // ServiceIDToProto creates proto from a service id. func ServiceIDToProto(sid services.ServiceID) *topicpb.ServiceID { return &topicpb.ServiceID{ Name: sid.Name(), Environment: sid.Environment(), Zone: sid.Zone(), } }<|fim▁end|>
} func (cs *consumerService) SetConsumptionType(value ConsumptionType) ConsumerService { newcs := *cs
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>// Generated by typings // Source: https://raw.githubusercontent.com/DefinitelyTyped/DefinitelyTyped/56295f5058cac7ae458540423c50ac2dcf9fc711/lodash/lodash-3.10.d.ts // Type definitions for Lo-Dash // Project: http://lodash.com/ // Definitions by: Brian Zengel <https://github.com/bczengel>, Ilya Mochalov <https://github.com/chrootsu> // Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped declare var _: _.LoDashStatic; declare module _ { interface LoDashStatic { /** * Creates a lodash object which wraps the given value to enable intuitive method chaining. * * In addition to Lo-Dash methods, wrappers also have the following Array methods: * concat, join, pop, push, reverse, shift, slice, sort, splice, and unshift * * Chaining is supported in custom builds as long as the value method is implicitly or * explicitly included in the build. * * The chainable wrapper functions are: * after, assign, bind, bindAll, bindKey, chain, chunk, compact, compose, concat, countBy, * createCallback, curry, debounce, defaults, defer, delay, difference, filter, flatten, * forEach, forEachRight, forIn, forInRight, forOwn, forOwnRight, functions, groupBy, * indexBy, initial, intersection, invert, invoke, keys, map, max, memoize, merge, min, * object, omit, once, pairs, partial, partialRight, pick, pluck, pull, push, range, reject, * remove, rest, reverse, sample, shuffle, slice, sort, sortBy, splice, tap, throttle, times, * toArray, transform, union, uniq, unshift, unzip, values, where, without, wrap, and zip * * The non-chainable wrapper functions are: * clone, cloneDeep, contains, escape, every, find, findIndex, findKey, findLast, * findLastIndex, findLastKey, has, identity, indexOf, isArguments, isArray, isBoolean, * isDate, isElement, isEmpty, isEqual, isFinite, isFunction, isNaN, isNull, isNumber, * isObject, isPlainObject, isRegExp, isString, isUndefined, join, lastIndexOf, mixin, * noConflict, parseInt, pop, random, reduce, reduceRight, result, shift, size, some, * sortedIndex, runInContext, template, unescape, uniqueId, and value * * The wrapper functions first and last return wrapped values when n is provided, otherwise * they return unwrapped values. * * Explicit chaining can be enabled by using the _.chain method. **/ (value: number): LoDashImplicitWrapper<number>; (value: string): LoDashImplicitStringWrapper; (value: boolean): LoDashImplicitWrapper<boolean>; (value: Array<number>): LoDashImplicitNumberArrayWrapper; <T>(value: Array<T>): LoDashImplicitArrayWrapper<T>; <T extends {}>(value: T): LoDashImplicitObjectWrapper<T>; (value: any): LoDashImplicitWrapper<any>; /** * The semantic version number. **/ VERSION: string; /** * An object used to flag environments features. **/ support: Support; /** * By default, the template delimiters used by Lo-Dash are similar to those in embedded Ruby * (ERB). Change the following template settings to use alternative delimiters. **/ templateSettings: TemplateSettings; } /** * By default, the template delimiters used by Lo-Dash are similar to those in embedded Ruby * (ERB). Change the following template settings to use alternative delimiters. **/ interface TemplateSettings { /** * The "escape" delimiter. **/ escape?: RegExp; /** * The "evaluate" delimiter. **/ evaluate?: RegExp; /** * An object to import into the template as local variables. **/ imports?: Dictionary<any>; /** * The "interpolate" delimiter. **/ interpolate?: RegExp; /** * Used to reference the data object in the template text. **/ variable?: string; } /** * Creates a cache object to store key/value pairs. */ interface MapCache { /** * Removes `key` and its value from the cache. * @param key The key of the value to remove. * @return Returns `true` if the entry was removed successfully, else `false`. */ delete(key: string): boolean; /** * Gets the cached value for `key`. * @param key The key of the value to get. * @return Returns the cached value. */ get(key: string): any; /** * Checks if a cached value for `key` exists. * @param key The key of the entry to check. * @return Returns `true` if an entry for `key` exists, else `false`. */ has(key: string): boolean; /** * Sets `value` to `key` of the cache. * @param key The key of the value to cache. * @param value The value to cache. * @return Returns the cache object. */ set(key: string, value: any): _.Dictionary<any>; } /** * An object used to flag environments features. **/ interface Support { /** * Detect if an arguments object's [[Class]] is resolvable (all but Firefox < 4, IE < 9). **/ argsClass: boolean; /** * Detect if arguments objects are Object objects (all but Narwhal and Opera < 10.5). **/ argsObject: boolean; /** * Detect if name or message properties of Error.prototype are enumerable by default. * (IE < 9, Safari < 5.1) **/ enumErrorProps: boolean; /** * Detect if prototype properties are enumerable by default. * * Firefox < 3.6, Opera > 9.50 - Opera < 11.60, and Safari < 5.1 (if the prototype or a property on the * prototype has been set) incorrectly set the [[Enumerable]] value of a function’s prototype property to true. **/ enumPrototypes: boolean; /** * Detect if Function#bind exists and is inferred to be fast (all but V8). **/ fastBind: boolean; /** * Detect if functions can be decompiled by Function#toString (all but PS3 and older Opera * mobile browsers & avoided in Windows 8 apps). **/ funcDecomp: boolean; /** * Detect if Function#name is supported (all but IE). **/ funcNames: boolean; /** * Detect if arguments object indexes are non-enumerable (Firefox < 4, IE < 9, PhantomJS, * Safari < 5.1). **/ nonEnumArgs: boolean; /** * Detect if properties shadowing those on Object.prototype are non-enumerable. * * In IE < 9 an objects own properties, shadowing non-enumerable ones, are made * non-enumerable as well (a.k.a the JScript [[DontEnum]] bug). **/ nonEnumShadows: boolean; /** * Detect if own properties are iterated after inherited properties (all but IE < 9). **/ ownLast: boolean; /** * Detect if Array#shift and Array#splice augment array-like objects correctly. * * Firefox < 10, IE compatibility mode, and IE < 9 have buggy Array shift() and splice() * functions that fail to remove the last element, value[0], of array-like objects even * though the length property is set to 0. The shift() method is buggy in IE 8 compatibility * mode, while splice() is buggy regardless of mode in IE < 9 and buggy in compatibility mode * in IE 9. **/ spliceObjects: boolean; /** * Detect lack of support for accessing string characters by index. * * IE < 8 can't access characters by index and IE 8 can only access characters by index on * string literals. **/ unindexedChars: boolean; } interface LoDashWrapperBase<T, TWrapper> { } interface LoDashImplicitWrapperBase<T, TWrapper> extends LoDashWrapperBase<T, TWrapper> { } interface LoDashExplicitWrapperBase<T, TWrapper> extends LoDashWrapperBase<T, TWrapper> { } interface LoDashImplicitWrapper<T> extends LoDashImplicitWrapperBase<T, LoDashImplicitWrapper<T>> { } interface LoDashExplicitWrapper<T> extends LoDashExplicitWrapperBase<T, LoDashExplicitWrapper<T>> { } interface LoDashImplicitStringWrapper extends LoDashImplicitWrapper<string> { } interface LoDashExplicitStringWrapper extends LoDashExplicitWrapper<string> { } interface LoDashImplicitObjectWrapper<T> extends LoDashImplicitWrapperBase<T, LoDashImplicitObjectWrapper<T>> { } interface LoDashExplicitObjectWrapper<T> extends LoDashExplicitWrapperBase<T, LoDashExplicitObjectWrapper<T>> { } interface LoDashImplicitArrayWrapper<T> extends LoDashImplicitWrapperBase<T[], LoDashImplicitArrayWrapper<T>> { pop(): T; push(...items: T[]): LoDashImplicitArrayWrapper<T>; shift(): T; sort(compareFn?: (a: T, b: T) => number): LoDashImplicitArrayWrapper<T>; splice(start: number): LoDashImplicitArrayWrapper<T>; splice(start: number, deleteCount: number, ...items: any[]): LoDashImplicitArrayWrapper<T>; unshift(...items: T[]): LoDashImplicitArrayWrapper<T>; } interface LoDashExplicitArrayWrapper<T> extends LoDashExplicitWrapperBase<T[], LoDashExplicitArrayWrapper<T>> { } interface LoDashImplicitNumberArrayWrapper extends LoDashImplicitArrayWrapper<number> { } interface LoDashExplicitNumberArrayWrapper extends LoDashExplicitArrayWrapper<number> { } // join (exists only in wrappers) interface LoDashImplicitWrapper<T> { /** * @see _.join */ join(separator?: string): string; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.join */ join(separator?: string): string; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.join */ join(separator?: string): string; } interface LoDashExplicitWrapper<T> { /** * @see _.join */ join(separator?: string): LoDashExplicitWrapper<string>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.join */ join(separator?: string): LoDashExplicitWrapper<string>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.join */ join(separator?: string): LoDashExplicitWrapper<string>; } /********* * Array * *********/ //_.chunk interface LoDashStatic { /** * Creates an array of elements split into groups the length of size. If collection can’t be split evenly, the * final chunk will be the remaining elements. * * @param array The array to process. * @param size The length of each chunk. * @return Returns the new array containing chunks. */ chunk<T>( array: List<T>, size?: number ): T[][]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.chunk */ chunk(size?: number): LoDashImplicitArrayWrapper<T[]>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.chunk */ chunk<TResult>(size?: number): LoDashImplicitArrayWrapper<TResult[]>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.chunk */ chunk(size?: number): LoDashExplicitArrayWrapper<T[]>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.chunk */ chunk<TResult>(size?: number): LoDashExplicitArrayWrapper<TResult[]>; } //_.compact interface LoDashStatic { /** * Creates an array with all falsey values removed. The values false, null, 0, "", undefined, and NaN are * falsey. * * @param array The array to compact. * @return (Array) Returns the new array of filtered values. */ compact<T>(array?: List<T>): T[]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.compact */ compact(): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.compact */ compact<TResult>(): LoDashImplicitArrayWrapper<TResult>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.compact */ compact(): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.compact */ compact<TResult>(): LoDashExplicitArrayWrapper<TResult>; } //_.difference interface LoDashStatic { /** * Creates an array of unique array values not included in the other provided arrays using SameValueZero for * equality comparisons. * * @param array The array to inspect. * @param values The arrays of values to exclude. * @return Returns the new array of filtered values. */ difference<T>( array: T[]|List<T>, ...values: (T[]|List<T>)[] ): T[]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.difference */ difference(...values: (T[]|List<T>)[]): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.difference */ difference<TValue>(...values: (TValue[]|List<TValue>)[]): LoDashImplicitArrayWrapper<TValue>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.difference */ difference(...values: (T[]|List<T>)[]): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.difference */ difference<TValue>(...values: (TValue[]|List<TValue>)[]): LoDashExplicitArrayWrapper<TValue>; } //_.drop interface LoDashStatic { /** * Creates a slice of array with n elements dropped from the beginning. * * @param array The array to query. * @param n The number of elements to drop. * @return Returns the slice of array. */ drop<T>(array: T[]|List<T>, n?: number): T[]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.drop */ drop(n?: number): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.drop */ drop<T>(n?: number): LoDashImplicitArrayWrapper<T>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.drop */ drop(n?: number): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.drop */ drop<T>(n?: number): LoDashExplicitArrayWrapper<T>; } //_.dropRight interface LoDashStatic { /** * Creates a slice of array with n elements dropped from the end. * * @param array The array to query. * @param n The number of elements to drop. * @return Returns the slice of array. */ dropRight<T>( array: List<T>, n?: number ): T[]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.dropRight */ dropRight(n?: number): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.dropRight */ dropRight<TResult>(n?: number): LoDashImplicitArrayWrapper<TResult>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.dropRight */ dropRight(n?: number): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.dropRight */ dropRight<TResult>(n?: number): LoDashExplicitArrayWrapper<TResult>; } //_.dropRightWhile interface LoDashStatic { /** * Creates a slice of array excluding elements dropped from the end. Elements are dropped until predicate * returns falsey. The predicate is bound to thisArg and invoked with three arguments: (value, index, array). * * If a property name is provided for predicate the created _.property style callback returns the property * value of the given element. * * If a value is also provided for thisArg the created _.matchesProperty style callback returns true for * elements that have a matching property value, else false. * * If an object is provided for predicate the created _.matches style callback returns true for elements that * match the properties of the given object, else false. * * @param array The array to query. * @param predicate The function invoked per iteration. * @param thisArg The this binding of predicate. * @return Returns the slice of array. */ dropRightWhile<TValue>( array: List<TValue>, predicate?: ListIterator<TValue, boolean>, thisArg?: any ): TValue[]; /** * @see _.dropRightWhile */ dropRightWhile<TValue>( array: List<TValue>, predicate?: string, thisArg?: any ): TValue[]; /** * @see _.dropRightWhile */ dropRightWhile<TWhere, TValue>( array: List<TValue>, predicate?: TWhere ): TValue[]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.dropRightWhile */ dropRightWhile( predicate?: ListIterator<T, boolean>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.dropRightWhile */ dropRightWhile( predicate?: string, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.dropRightWhile */ dropRightWhile<TWhere>( predicate?: TWhere ): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.dropRightWhile */ dropRightWhile<TValue>( predicate?: ListIterator<TValue, boolean>, thisArg?: any ): LoDashImplicitArrayWrapper<TValue>; /** * @see _.dropRightWhile */ dropRightWhile<TValue>( predicate?: string, thisArg?: any ): LoDashImplicitArrayWrapper<TValue>; /** * @see _.dropRightWhile */ dropRightWhile<TWhere, TValue>( predicate?: TWhere ): LoDashImplicitArrayWrapper<TValue>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.dropRightWhile */ dropRightWhile( predicate?: ListIterator<T, boolean>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.dropRightWhile */ dropRightWhile( predicate?: string, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.dropRightWhile */ dropRightWhile<TWhere>( predicate?: TWhere ): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.dropRightWhile */ dropRightWhile<TValue>( predicate?: ListIterator<TValue, boolean>, thisArg?: any ): LoDashExplicitArrayWrapper<TValue>; /** * @see _.dropRightWhile */ dropRightWhile<TValue>( predicate?: string, thisArg?: any ): LoDashExplicitArrayWrapper<TValue>; /** * @see _.dropRightWhile */ dropRightWhile<TWhere, TValue>( predicate?: TWhere ): LoDashExplicitArrayWrapper<TValue>; } //_.dropWhile interface LoDashStatic { /** * Creates a slice of array excluding elements dropped from the beginning. Elements are dropped until predicate * returns falsey. The predicate is bound to thisArg and invoked with three arguments: (value, index, array). * * If a property name is provided for predicate the created _.property style callback returns the property * value of the given element. * * If a value is also provided for thisArg the created _.matchesProperty style callback returns true for * elements that have a matching property value, else false. * * If an object is provided for predicate the created _.matches style callback returns true for elements that * have the properties of the given object, else false. * * @param array The array to query. * @param predicate The function invoked per iteration. * @param thisArg The this binding of predicate. * @return Returns the slice of array. */ dropWhile<TValue>( array: List<TValue>, predicate?: ListIterator<TValue, boolean>, thisArg?: any ): TValue[]; /** * @see _.dropWhile */ dropWhile<TValue>( array: List<TValue>, predicate?: string, thisArg?: any ): TValue[]; /** * @see _.dropWhile */ dropWhile<TWhere, TValue>( array: List<TValue>, predicate?: TWhere ): TValue[]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.dropWhile */ dropWhile( predicate?: ListIterator<T, boolean>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.dropWhile */ dropWhile( predicate?: string, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.dropWhile */ dropWhile<TWhere>( predicate?: TWhere ): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.dropWhile */ dropWhile<TValue>( predicate?: ListIterator<TValue, boolean>, thisArg?: any ): LoDashImplicitArrayWrapper<TValue>; /** * @see _.dropWhile */ dropWhile<TValue>( predicate?: string, thisArg?: any ): LoDashImplicitArrayWrapper<TValue>; /** * @see _.dropWhile */ dropWhile<TWhere, TValue>( predicate?: TWhere ): LoDashImplicitArrayWrapper<TValue>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.dropWhile */ dropWhile( predicate?: ListIterator<T, boolean>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.dropWhile */ dropWhile( predicate?: string, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.dropWhile */ dropWhile<TWhere>( predicate?: TWhere ): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.dropWhile */ dropWhile<TValue>( predicate?: ListIterator<TValue, boolean>, thisArg?: any ): LoDashExplicitArrayWrapper<TValue>; /** * @see _.dropWhile */ dropWhile<TValue>( predicate?: string, thisArg?: any ): LoDashExplicitArrayWrapper<TValue>; /** * @see _.dropWhile */ dropWhile<TWhere, TValue>( predicate?: TWhere ): LoDashExplicitArrayWrapper<TValue>; } //_.fill interface LoDashStatic { /** * Fills elements of array with value from start up to, but not including, end. * * Note: This method mutates array. * * @param array The array to fill. * @param value The value to fill array with. * @param start The start position. * @param end The end position. * @return Returns array. */ fill<T>( array: any[], value: T, start?: number, end?: number ): T[]; /** * @see _.fill */ fill<T>( array: List<any>, value: T, start?: number, end?: number ): List<T>; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.fill */ fill<T>( value: T, start?: number, end?: number ): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.fill */ fill<T>( value: T, start?: number, end?: number ): LoDashImplicitObjectWrapper<List<T>>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.fill */ fill<T>( value: T, start?: number, end?: number ): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.fill */ fill<T>( value: T, start?: number, end?: number ): LoDashExplicitObjectWrapper<List<T>>; } //_.findIndex interface LoDashStatic { /** * This method is like _.find except that it returns the index of the first element predicate returns truthy * for instead of the element itself. * * If a property name is provided for predicate the created _.property style callback returns the property * value of the given element. * * If a value is also provided for thisArg the created _.matchesProperty style callback returns true for * elements that have a matching property value, else false. * * If an object is provided for predicate the created _.matches style callback returns true for elements that * have the properties of the given object, else false. * * @param array The array to search. * @param predicate The function invoked per iteration. * @param thisArg The this binding of predicate. * @return Returns the index of the found element, else -1. */ findIndex<T>( array: List<T>, predicate?: ListIterator<T, boolean>, thisArg?: any ): number; /** * @see _.findIndex */ findIndex<T>( array: List<T>, predicate?: string, thisArg?: any ): number; /** * @see _.findIndex */ findIndex<W, T>( array: List<T>, predicate?: W ): number; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.findIndex */ findIndex( predicate?: ListIterator<T, boolean>, thisArg?: any ): number; /** * @see _.findIndex */ findIndex( predicate?: string, thisArg?: any ): number; /** * @see _.findIndex */ findIndex<W>( predicate?: W ): number; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.findIndex */ findIndex<TResult>( predicate?: ListIterator<TResult, boolean>, thisArg?: any ): number; /** * @see _.findIndex */ findIndex( predicate?: string, thisArg?: any ): number; /** * @see _.findIndex */ findIndex<W>( predicate?: W ): number; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.findIndex */ findIndex( predicate?: ListIterator<T, boolean>, thisArg?: any ): LoDashExplicitWrapper<number>; /** * @see _.findIndex */ findIndex( predicate?: string, thisArg?: any ): LoDashExplicitWrapper<number>; /** * @see _.findIndex */ findIndex<W>( predicate?: W ): LoDashExplicitWrapper<number>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.findIndex */ findIndex<TResult>( predicate?: ListIterator<TResult, boolean>, thisArg?: any ): LoDashExplicitWrapper<number>; /** * @see _.findIndex */ findIndex( predicate?: string, thisArg?: any ): LoDashExplicitWrapper<number>; /** * @see _.findIndex */ findIndex<W>( predicate?: W ): LoDashExplicitWrapper<number>; } //_.findLastIndex interface LoDashStatic { /** * This method is like _.findIndex except that it iterates over elements of collection from right to left. * * If a property name is provided for predicate the created _.property style callback returns the property * value of the given element. * * If a value is also provided for thisArg the created _.matchesProperty style callback returns true for * elements that have a matching property value, else false. * * If an object is provided for predicate the created _.matches style callback returns true for elements that * have the properties of the given object, else false. * * @param array The array to search. * @param predicate The function invoked per iteration. * @param thisArg The function invoked per iteration. * @return Returns the index of the found element, else -1. */ findLastIndex<T>( array: List<T>, predicate?: ListIterator<T, boolean>, thisArg?: any ): number; /** * @see _.findLastIndex */ findLastIndex<T>( array: List<T>, predicate?: string, thisArg?: any ): number; /** * @see _.findLastIndex */ findLastIndex<W, T>( array: List<T>, predicate?: W ): number; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.findLastIndex */ findLastIndex( predicate?: ListIterator<T, boolean>, thisArg?: any ): number; /** * @see _.findLastIndex */ findLastIndex( predicate?: string, thisArg?: any ): number; /** * @see _.findLastIndex */ findLastIndex<W>( predicate?: W ): number; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.findLastIndex */ findLastIndex<TResult>( predicate?: ListIterator<TResult, boolean>, thisArg?: any ): number; /** * @see _.findLastIndex */ findLastIndex( predicate?: string, thisArg?: any ): number; /** * @see _.findLastIndex */ findLastIndex<W>( predicate?: W ): number; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.findLastIndex */ findLastIndex( predicate?: ListIterator<T, boolean>, thisArg?: any ): LoDashExplicitWrapper<number>; /** * @see _.findLastIndex */ findLastIndex( predicate?: string, thisArg?: any ): LoDashExplicitWrapper<number>; /** * @see _.findLastIndex */ findLastIndex<W>( predicate?: W ): LoDashExplicitWrapper<number>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.findLastIndex */ findLastIndex<TResult>( predicate?: ListIterator<TResult, boolean>, thisArg?: any ): LoDashExplicitWrapper<number>; /** * @see _.findLastIndex */ findLastIndex( predicate?: string, thisArg?: any ): LoDashExplicitWrapper<number>; /** * @see _.findLastIndex */ findLastIndex<W>( predicate?: W ): LoDashExplicitWrapper<number>; } //_.first interface LoDashStatic { /** * Gets the first element of array. * * @alias _.head * * @param array The array to query. * @return Returns the first element of array. */ first<T>(array: List<T>): T; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.first */ first(): T; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.first */ first<TResult>(): TResult; } interface RecursiveArray<T> extends Array<T|RecursiveArray<T>> {} interface ListOfRecursiveArraysOrValues<T> extends List<T|RecursiveArray<T>> {} //_.flatten interface LoDashStatic { /** * Flattens a nested array. If isDeep is true the array is recursively flattened, otherwise it’s only * flattened a single level. * * @param array The array to flatten. * @param isDeep Specify a deep flatten. * @return Returns the new flattened array. */ flatten<T>(array: ListOfRecursiveArraysOrValues<T>, isDeep: boolean): T[]; /** * @see _.flatten */ flatten<T>(array: List<T|T[]>): T[]; /** * @see _.flatten */ flatten<T>(array: ListOfRecursiveArraysOrValues<T>): RecursiveArray<T>; } interface LoDashImplicitWrapper<T> { /** * @see _.flatten */ flatten(): LoDashImplicitArrayWrapper<string>; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.flatten */ flatten<TResult>(isDeep?: boolean): LoDashImplicitArrayWrapper<TResult>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.flatten */ flatten<TResult>(isDeep?: boolean): LoDashImplicitArrayWrapper<TResult>; } interface LoDashExplicitWrapper<T> { /** * @see _.flatten */ flatten(): LoDashExplicitArrayWrapper<string>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.flatten */ flatten<TResult>(isDeep?: boolean): LoDashExplicitArrayWrapper<TResult>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.flatten */ flatten<TResult>(isDeep?: boolean): LoDashExplicitArrayWrapper<TResult>; } //_.flattenDeep interface LoDashStatic { /** * Recursively flattens a nested array. * * @param array The array to recursively flatten. * @return Returns the new flattened array. */ flattenDeep<T>(array: ListOfRecursiveArraysOrValues<T>): T[]; } interface LoDashImplicitWrapper<T> { /** * @see _.flattenDeep */ flattenDeep(): LoDashImplicitArrayWrapper<string>; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.flattenDeep */ flattenDeep<T>(): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.flattenDeep */ flattenDeep<T>(): LoDashImplicitArrayWrapper<T>; } interface LoDashExplicitWrapper<T> { /** * @see _.flattenDeep */ flattenDeep(): LoDashExplicitArrayWrapper<string>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.flattenDeep */ flattenDeep<T>(): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.flattenDeep */ flattenDeep<T>(): LoDashExplicitArrayWrapper<T>; } //_.head interface LoDashStatic { /** * @see _.first */ head<T>(array: List<T>): T; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.first */ head(): T; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.first */ head<TResult>(): TResult; } //_.indexOf interface LoDashStatic { /** * Gets the index at which the first occurrence of value is found in array using SameValueZero for equality * comparisons. If fromIndex is negative, it’s used as the offset from the end of array. If array is sorted * providing true for fromIndex performs a faster binary search. * * @param array The array to search. * @param value The value to search for. * @param fromIndex The index to search from or true to perform a binary search on a sorted array. * @return The index to search from or true to perform a binary search on a sorted array. */ indexOf<T>( array: List<T>, value: T, fromIndex?: boolean|number ): number; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.indexOf */ indexOf( value: T, fromIndex?: boolean|number ): number; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.indexOf */ indexOf<TValue>( value: TValue, fromIndex?: boolean|number ): number; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.indexOf */ indexOf( value: T, fromIndex?: boolean|number ): LoDashExplicitWrapper<number>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.indexOf */ indexOf<TValue>( value: TValue, fromIndex?: boolean|number ): LoDashExplicitWrapper<number>; } //_.initial interface LoDashStatic { /** * Gets all but the last element of array. * * @param array The array to query. * @return Returns the slice of array. */ initial<T>(array: List<T>): T[]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.initial */ initial(): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.initial */ initial<T>(): LoDashImplicitArrayWrapper<T>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.initial */ initial(): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.initial */ initial<T>(): LoDashExplicitArrayWrapper<T>; } //_.intersection interface LoDashStatic { /** * Creates an array of unique values that are included in all of the provided arrays using SameValueZero for * equality comparisons. * * @param arrays The arrays to inspect. * @return Returns the new array of shared values. */ intersection<T>(...arrays: (T[]|List<T>)[]): T[]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.intersection */ intersection<TResult>(...arrays: (TResult[]|List<TResult>)[]): LoDashImplicitArrayWrapper<TResult>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.intersection */ intersection<TResult>(...arrays: (TResult[]|List<TResult>)[]): LoDashImplicitArrayWrapper<TResult>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.intersection */ intersection<TResult>(...arrays: (TResult[]|List<TResult>)[]): LoDashExplicitArrayWrapper<TResult>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.intersection */ intersection<TResult>(...arrays: (TResult[]|List<TResult>)[]): LoDashExplicitArrayWrapper<TResult>; } //_.last interface LoDashStatic { /** * Gets the last element of array. * * @param array The array to query. * @return Returns the last element of array. */ last<T>(array: List<T>): T; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.last */ last(): T; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.last */ last<T>(): T; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.last */ last(): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.last */ last<T>(): LoDashExplicitObjectWrapper<T>; } //_.lastIndexOf interface LoDashStatic { /** * This method is like _.indexOf except that it iterates over elements of array from right to left. * * @param array The array to search. * @param value The value to search for. * @param fromIndex The index to search from or true to perform a binary search on a sorted array. * @return Returns the index of the matched value, else -1. */ lastIndexOf<T>( array: List<T>, value: T, fromIndex?: boolean|number ): number; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.lastIndexOf */ lastIndexOf( value: T, fromIndex?: boolean|number ): number; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.lastIndexOf */ lastIndexOf<TResult>( value: TResult, fromIndex?: boolean|number ): number; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.lastIndexOf */ lastIndexOf( value: T, fromIndex?: boolean|number ): LoDashExplicitWrapper<number>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.lastIndexOf */ lastIndexOf<TResult>( value: TResult, fromIndex?: boolean|number ): LoDashExplicitWrapper<number>; } //_.object interface LoDashStatic { /** * @see _.zipObject */ object<TValues, TResult extends {}>( props: List<StringRepresentable>|List<List<any>>, values?: List<TValues> ): TResult; /** * @see _.zipObject */ object<TResult extends {}>( props: List<StringRepresentable>|List<List<any>>, values?: List<any> ): TResult; /** * @see _.zipObject */ object( props: List<StringRepresentable>|List<List<any>>, values?: List<any> ): _.Dictionary<any>; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.zipObject */ object<TValues, TResult extends {}>( values?: List<TValues> ): _.LoDashImplicitObjectWrapper<TResult>; /** * @see _.zipObject */ object<TResult extends {}>( values?: List<any> ): _.LoDashImplicitObjectWrapper<TResult>; /** * @see _.zipObject */ object( values?: List<any> ): _.LoDashImplicitObjectWrapper<_.Dictionary<any>>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.zipObject */ object<TValues, TResult extends {}>( values?: List<TValues> ): _.LoDashImplicitObjectWrapper<TResult>; /** * @see _.zipObject */ object<TResult extends {}>( values?: List<any> ): _.LoDashImplicitObjectWrapper<TResult>; /** * @see _.zipObject */ object( values?: List<any> ): _.LoDashImplicitObjectWrapper<_.Dictionary<any>>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.zipObject */ object<TValues, TResult extends {}>( values?: List<TValues> ): _.LoDashExplicitObjectWrapper<TResult>; /** * @see _.zipObject */ object<TResult extends {}>( values?: List<any> ): _.LoDashExplicitObjectWrapper<TResult>; /** * @see _.zipObject */ object( values?: List<any> ): _.LoDashExplicitObjectWrapper<_.Dictionary<any>>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.zipObject */ object<TValues, TResult extends {}>( values?: List<TValues> ): _.LoDashExplicitObjectWrapper<TResult>; /** * @see _.zipObject */ object<TResult extends {}>( values?: List<any> ): _.LoDashExplicitObjectWrapper<TResult>; /** * @see _.zipObject */ object( values?: List<any> ): _.LoDashExplicitObjectWrapper<_.Dictionary<any>>; } //_.pull interface LoDashStatic { /** * Removes all provided values from array using SameValueZero for equality comparisons. * * Note: Unlike _.without, this method mutates array. * * @param array The array to modify. * @param values The values to remove. * @return Returns array. */ pull<T>( array: T[], ...values: T[] ): T[]; /** * @see _.pull */ pull<T>( array: List<T>, ...values: T[] ): List<T>; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.pull */ pull(...values: T[]): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.pull */ pull<TValue>(...values: TValue[]): LoDashImplicitObjectWrapper<List<TValue>>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.pull */ pull(...values: T[]): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.pull */ pull<TValue>(...values: TValue[]): LoDashExplicitObjectWrapper<List<TValue>>; } //_.pullAt interface LoDashStatic { /** * Removes elements from array corresponding to the given indexes and returns an array of the removed elements. * Indexes may be specified as an array of indexes or as individual arguments. * * Note: Unlike _.at, this method mutates array. * * @param array The array to modify. * @param indexes The indexes of elements to remove, specified as individual indexes or arrays of indexes. * @return Returns the new array of removed elements. */ pullAt<T>( array: List<T>, ...indexes: (number|number[])[] ): T[]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.pullAt */ pullAt(...indexes: (number|number[])[]): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.pullAt */ pullAt<T>(...indexes: (number|number[])[]): LoDashImplicitArrayWrapper<T>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.pullAt */ pullAt(...indexes: (number|number[])[]): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.pullAt */ pullAt<T>(...indexes: (number|number[])[]): LoDashExplicitArrayWrapper<T>; } //_.remove interface LoDashStatic { /** * Removes all elements from array that predicate returns truthy for and returns an array of the removed * elements. The predicate is bound to thisArg and invoked with three arguments: (value, index, array). * * If a property name is provided for predicate the created _.property style callback returns the property * value of the given element. * * If a value is also provided for thisArg the created _.matchesProperty style callback returns true for * elements that have a matching property value, else false. * * If an object is provided for predicate the created _.matches style callback returns true for elements that * have the properties of the given object, else false. * * Note: Unlike _.filter, this method mutates array. * * @param array The array to modify. * @param predicate The function invoked per iteration. * @param thisArg The this binding of predicate. * @return Returns the new array of removed elements. */ remove<T>( array: List<T>, predicate?: ListIterator<T, boolean>, thisArg?: any ): T[]; /** * @see _.remove */ remove<T>( array: List<T>, predicate?: string, thisArg?: any ): T[]; /** * @see _.remove */ remove<W, T>( array: List<T>, predicate?: W ): T[]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.remove */ remove( predicate?: ListIterator<T, boolean>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.remove */ remove( predicate?: string, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.remove */ remove<W>( predicate?: W ): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.remove */ remove<TResult>( predicate?: ListIterator<TResult, boolean>, thisArg?: any ): LoDashImplicitArrayWrapper<TResult>; /** * @see _.remove */ remove<TResult>( predicate?: string, thisArg?: any ): LoDashImplicitArrayWrapper<TResult>; /** * @see _.remove */ remove<W, TResult>( predicate?: W ): LoDashImplicitArrayWrapper<TResult>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.remove */ remove( predicate?: ListIterator<T, boolean>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.remove */ remove( predicate?: string, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.remove */ remove<W>( predicate?: W ): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.remove */ remove<TResult>( predicate?: ListIterator<TResult, boolean>, thisArg?: any ): LoDashExplicitArrayWrapper<TResult>; /** * @see _.remove */ remove<TResult>( predicate?: string, thisArg?: any ): LoDashExplicitArrayWrapper<TResult>; /** * @see _.remove */ remove<W, TResult>( predicate?: W ): LoDashExplicitArrayWrapper<TResult>; } //_.rest interface LoDashStatic { /** * Gets all but the first element of array. * * @alias _.tail * * @param array The array to query. * @return Returns the slice of array. */ rest<T>(array: List<T>): T[]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.rest */ rest(): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.rest */ rest<T>(): LoDashImplicitArrayWrapper<T>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.rest */ rest(): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.rest */ rest<T>(): LoDashExplicitArrayWrapper<T>; } //_.slice interface LoDashStatic { /** * Creates a slice of array from start up to, but not including, end. * * @param array The array to slice. * @param start The start position. * @param end The end position. * @return Returns the slice of array. */ slice<T>( array: T[], start?: number, end?: number ): T[]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.slice */ slice( start?: number, end?: number ): LoDashImplicitArrayWrapper<T>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.slice */ slice( start?: number, end?: number ): LoDashExplicitArrayWrapper<T>; } //_.sortedIndex interface LoDashStatic { /** * Uses a binary search to determine the lowest index at which value should be inserted into array in order to maintain its sort order. If an iteratee function is provided it’s invoked for value and each element of array to compute their sort ranking. The iteratee is bound to thisArg and invoked with one argument; (value). * * If a property name is provided for iteratee the created _.property style callback returns the property value of the given element. * * If a value is also provided for thisArg the created _.matchesProperty style callback returns true for elements that have a matching property value, else false. * * If an object is provided for iteratee the created _.matches style callback returns true for elements that have the properties of the given object, else false. * * @param array The sorted array to inspect. * @param value The value to evaluate. * @param iteratee The function invoked per iteration. * @return The this binding of iteratee. */ sortedIndex<T, TSort>( array: List<T>, value: T, iteratee?: (x: T) => TSort, thisArg?: any ): number; /** * @see _.sortedIndex */ sortedIndex<T>( array: List<T>, value: T, iteratee?: (x: T) => any, thisArg?: any ): number; /** * @see _.sortedIndex */ sortedIndex<T>( array: List<T>, value: T, iteratee: string ): number; /** * @see _.sortedIndex */ sortedIndex<W, T>( array: List<T>, value: T, iteratee: W ): number; /** * @see _.sortedIndex */ sortedIndex<T>( array: List<T>, value: T, iteratee: Object ): number; } interface LoDashImplicitWrapper<T> { /** * @see _.sortedIndex */ sortedIndex<TSort>( value: string, iteratee?: (x: string) => TSort, thisArg?: any ): number; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.sortedIndex */ sortedIndex<TSort>( value: T, iteratee?: (x: T) => TSort, thisArg?: any ): number; /** * @see _.sortedIndex */ sortedIndex( value: T, iteratee: string ): number; /** * @see _.sortedIndex */ sortedIndex<W>( value: T, iteratee: W ): number; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.sortedIndex */ sortedIndex<T, TSort>( value: T, iteratee?: (x: T) => TSort, thisArg?: any ): number; /** * @see _.sortedIndex */ sortedIndex<T>( value: T, iteratee?: (x: T) => any, thisArg?: any ): number; /** * @see _.sortedIndex */ sortedIndex<T>( value: T, iteratee: string ): number; /** * @see _.sortedIndex */ sortedIndex<W, T>( value: T, iteratee: W ): number; /** * @see _.sortedIndex */ sortedIndex<T>( value: T, iteratee: Object ): number; } interface LoDashExplicitWrapper<T> { /** * @see _.sortedIndex */ sortedIndex<TSort>( value: string, iteratee?: (x: string) => TSort, thisArg?: any ): LoDashExplicitWrapper<number>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.sortedIndex */ sortedIndex<TSort>( value: T, iteratee?: (x: T) => TSort, thisArg?: any ): LoDashExplicitWrapper<number>; /** * @see _.sortedIndex */ sortedIndex( value: T, iteratee: string ): LoDashExplicitWrapper<number>; /** * @see _.sortedIndex */ sortedIndex<W>( value: T, iteratee: W ): LoDashExplicitWrapper<number>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.sortedIndex */ sortedIndex<T, TSort>( value: T, iteratee?: (x: T) => TSort, thisArg?: any ): LoDashExplicitWrapper<number>; /** * @see _.sortedIndex */ sortedIndex<T>( value: T, iteratee?: (x: T) => any, thisArg?: any ): LoDashExplicitWrapper<number>; /** * @see _.sortedIndex */ sortedIndex<T>( value: T, iteratee: string ): LoDashExplicitWrapper<number>; /** * @see _.sortedIndex */ sortedIndex<W, T>( value: T, iteratee: W ): LoDashExplicitWrapper<number>; /** * @see _.sortedIndex */ sortedIndex<T>( value: T, iteratee: Object ): LoDashExplicitWrapper<number>; } //_.sortedLastIndex interface LoDashStatic { /** * This method is like _.sortedIndex except that it returns the highest index at which value should be * inserted into array in order to maintain its sort order. * * @param array The sorted array to inspect. * @param value The value to evaluate. * @param iteratee The function invoked per iteration. * @param thisArg The this binding of iteratee. * @return Returns the index at which value should be inserted into array. */ sortedLastIndex<T, TSort>( array: List<T>, value: T, iteratee?: (x: T) => TSort, thisArg?: any ): number; /** * @see _.sortedLastIndex */ sortedLastIndex<T>( array: List<T>, value: T, iteratee?: (x: T) => any, thisArg?: any ): number; /** * @see _.sortedLastIndex */ sortedLastIndex<T>( array: List<T>, value: T, iteratee: string ): number; /** * @see _.sortedLastIndex */ sortedLastIndex<W, T>( array: List<T>, value: T, iteratee: W ): number; /** * @see _.sortedLastIndex */ sortedLastIndex<T>( array: List<T>, value: T, iteratee: Object ): number; } interface LoDashImplicitWrapper<T> { /** * @see _.sortedLastIndex */ sortedLastIndex<TSort>( value: string, iteratee?: (x: string) => TSort, thisArg?: any ): number; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.sortedLastIndex */ sortedLastIndex<TSort>( value: T, iteratee?: (x: T) => TSort, thisArg?: any ): number; /** * @see _.sortedLastIndex */ sortedLastIndex( value: T, iteratee: string ): number; /** * @see _.sortedLastIndex */ sortedLastIndex<W>( value: T, iteratee: W ): number; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.sortedLastIndex */ sortedLastIndex<T, TSort>( value: T, iteratee?: (x: T) => TSort, thisArg?: any ): number; /** * @see _.sortedLastIndex */ sortedLastIndex<T>( value: T, iteratee?: (x: T) => any, thisArg?: any ): number; /** * @see _.sortedLastIndex */ sortedLastIndex<T>( value: T, iteratee: string ): number; /** * @see _.sortedLastIndex */ sortedLastIndex<W, T>( value: T, iteratee: W ): number; /** * @see _.sortedLastIndex */ sortedLastIndex<T>( value: T, iteratee: Object ): number; } interface LoDashExplicitWrapper<T> { /** * @see _.sortedLastIndex */ sortedLastIndex<TSort>( value: string, iteratee?: (x: string) => TSort, thisArg?: any ): LoDashExplicitWrapper<number>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.sortedLastIndex */ sortedLastIndex<TSort>( value: T, iteratee?: (x: T) => TSort, thisArg?: any ): LoDashExplicitWrapper<number>; /** * @see _.sortedLastIndex */ sortedLastIndex( value: T, iteratee: string ): LoDashExplicitWrapper<number>; /** * @see _.sortedLastIndex */ sortedLastIndex<W>( value: T, iteratee: W ): LoDashExplicitWrapper<number>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.sortedLastIndex */ sortedLastIndex<T, TSort>( value: T, iteratee?: (x: T) => TSort, thisArg?: any ): LoDashExplicitWrapper<number>; /** * @see _.sortedLastIndex */ sortedLastIndex<T>( value: T, iteratee?: (x: T) => any, thisArg?: any ): LoDashExplicitWrapper<number>; /** * @see _.sortedLastIndex */ sortedLastIndex<T>( value: T, iteratee: string ): LoDashExplicitWrapper<number>; /** * @see _.sortedLastIndex */ sortedLastIndex<W, T>( value: T, iteratee: W ): LoDashExplicitWrapper<number>; /** * @see _.sortedLastIndex */ sortedLastIndex<T>( value: T, iteratee: Object ): LoDashExplicitWrapper<number>; } //_.tail interface LoDashStatic { /** * @see _.rest */ tail<T>(array: List<T>): T[]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.rest */ tail(): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.rest */ tail<T>(): LoDashImplicitArrayWrapper<T>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.rest */ tail(): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.rest */ tail<T>(): LoDashExplicitArrayWrapper<T>; } //_.take interface LoDashStatic { /** * Creates a slice of array with n elements taken from the beginning. * * @param array The array to query. * @param n The number of elements to take. * @return Returns the slice of array. */ take<T>( array: List<T>, n?: number ): T[]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.take */ take(n?: number): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.take */ take<TResult>(n?: number): LoDashImplicitArrayWrapper<TResult>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.take */ take(n?: number): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.take */ take<TResult>(n?: number): LoDashExplicitArrayWrapper<TResult>; } //_.takeRight interface LoDashStatic { /** * Creates a slice of array with n elements taken from the end. * * @param array The array to query. * @param n The number of elements to take. * @return Returns the slice of array. */ takeRight<T>( array: List<T>, n?: number ): T[]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.takeRight */ takeRight(n?: number): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.takeRight */ takeRight<TResult>(n?: number): LoDashImplicitArrayWrapper<TResult>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.takeRight */ takeRight(n?: number): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.takeRight */ takeRight<TResult>(n?: number): LoDashExplicitArrayWrapper<TResult>; } //_.takeRightWhile interface LoDashStatic { /** * Creates a slice of array with elements taken from the end. Elements are taken until predicate returns * falsey. The predicate is bound to thisArg and invoked with three arguments: (value, index, array). * * If a property name is provided for predicate the created _.property style callback returns the property * value of the given element. * * If a value is also provided for thisArg the created _.matchesProperty style callback returns true for * elements that have a matching property value, else false. * * If an object is provided for predicate the created _.matches style callback returns true for elements that * have the properties of the given object, else false. * * @param array The array to query. * @param predicate The function invoked per iteration. * @param thisArg The this binding of predicate. * @return Returns the slice of array. */ takeRightWhile<TValue>( array: List<TValue>, predicate?: ListIterator<TValue, boolean>, thisArg?: any ): TValue[]; /** * @see _.takeRightWhile */ takeRightWhile<TValue>( array: List<TValue>, predicate?: string, thisArg?: any ): TValue[]; /** * @see _.takeRightWhile */ takeRightWhile<TWhere, TValue>( array: List<TValue>, predicate?: TWhere ): TValue[]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.takeRightWhile */ takeRightWhile( predicate?: ListIterator<T, boolean>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.takeRightWhile */ takeRightWhile( predicate?: string, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.takeRightWhile */ takeRightWhile<TWhere>( predicate?: TWhere ): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.takeRightWhile */ takeRightWhile<TValue>( predicate?: ListIterator<TValue, boolean>, thisArg?: any ): LoDashImplicitArrayWrapper<TValue>; /** * @see _.takeRightWhile */ takeRightWhile<TValue>( predicate?: string, thisArg?: any ): LoDashImplicitArrayWrapper<TValue>; /** * @see _.takeRightWhile */ takeRightWhile<TWhere, TValue>( predicate?: TWhere ): LoDashImplicitArrayWrapper<TValue>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.takeRightWhile */ takeRightWhile( predicate?: ListIterator<T, boolean>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.takeRightWhile */ takeRightWhile( predicate?: string, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.takeRightWhile */ takeRightWhile<TWhere>( predicate?: TWhere ): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.takeRightWhile */ takeRightWhile<TValue>( predicate?: ListIterator<TValue, boolean>, thisArg?: any ): LoDashExplicitArrayWrapper<TValue>; /** * @see _.takeRightWhile */ takeRightWhile<TValue>( predicate?: string, thisArg?: any ): LoDashExplicitArrayWrapper<TValue>; /** * @see _.takeRightWhile */ takeRightWhile<TWhere, TValue>( predicate?: TWhere ): LoDashExplicitArrayWrapper<TValue>; } //_.takeWhile interface LoDashStatic { /** * Creates a slice of array with elements taken from the beginning. Elements are taken until predicate returns * falsey. The predicate is bound to thisArg and invoked with three arguments: (value, index, array). * * If a property name is provided for predicate the created _.property style callback returns the property * value of the given element. * * If a value is also provided for thisArg the created _.matchesProperty style callback returns true for * elements that have a matching property value, else false. * * If an object is provided for predicate the created _.matches style callback returns true for elements that * have the properties of the given object, else false. * * @param array The array to query. * @param predicate The function invoked per iteration. * @param thisArg The this binding of predicate. * @return Returns the slice of array. */ takeWhile<TValue>( array: List<TValue>, predicate?: ListIterator<TValue, boolean>, thisArg?: any ): TValue[]; /** * @see _.takeWhile */ takeWhile<TValue>( array: List<TValue>, predicate?: string, thisArg?: any ): TValue[]; /** * @see _.takeWhile */ takeWhile<TWhere, TValue>( array: List<TValue>, predicate?: TWhere ): TValue[]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.takeWhile */ takeWhile( predicate?: ListIterator<T, boolean>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.takeWhile */ takeWhile( predicate?: string, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.takeWhile */ takeWhile<TWhere>( predicate?: TWhere ): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.takeWhile */ takeWhile<TValue>( predicate?: ListIterator<TValue, boolean>, thisArg?: any ): LoDashImplicitArrayWrapper<TValue>; /** * @see _.takeWhile */ takeWhile<TValue>( predicate?: string, thisArg?: any ): LoDashImplicitArrayWrapper<TValue>; /** * @see _.takeWhile */ takeWhile<TWhere, TValue>( predicate?: TWhere ): LoDashImplicitArrayWrapper<TValue>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.takeWhile */ takeWhile( predicate?: ListIterator<T, boolean>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.takeWhile */ takeWhile( predicate?: string, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.takeWhile */ takeWhile<TWhere>( predicate?: TWhere ): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.takeWhile */ takeWhile<TValue>( predicate?: ListIterator<TValue, boolean>, thisArg?: any ): LoDashExplicitArrayWrapper<TValue>; /** * @see _.takeWhile */ takeWhile<TValue>( predicate?: string, thisArg?: any ): LoDashExplicitArrayWrapper<TValue>; /** * @see _.takeWhile */ takeWhile<TWhere, TValue>( predicate?: TWhere ): LoDashExplicitArrayWrapper<TValue>; } //_.union interface LoDashStatic { /** * Creates an array of unique values, in order, from all of the provided arrays using SameValueZero for * equality comparisons. * * @param arrays The arrays to inspect. * @return Returns the new array of combined values. */ union<T>(...arrays: List<T>[]): T[]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.union */ union(...arrays: List<T>[]): LoDashImplicitArrayWrapper<T>; /** * @see _.union */ union<T>(...arrays: List<T>[]): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.union */ union<T>(...arrays: List<T>[]): LoDashImplicitArrayWrapper<T>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.union */ union(...arrays: List<T>[]): LoDashExplicitArrayWrapper<T>; /** * @see _.union */ union<T>(...arrays: List<T>[]): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.union */ union<T>(...arrays: List<T>[]): LoDashExplicitArrayWrapper<T>; } //_.uniq interface LoDashStatic { /** * Creates a duplicate-free version of an array, using SameValueZero for equality comparisons, in which only * the first occurrence of each element is kept. Providing true for isSorted performs a faster search * algorithm for sorted arrays. If an iteratee function is provided it’s invoked for each element in the * array to generate the criterion by which uniqueness is computed. The iteratee is bound to thisArg and * invoked with three arguments: (value, index, array). * * If a property name is provided for iteratee the created _.property style callback returns the property * value of the given element. * * If a value is also provided for thisArg the created _.matchesProperty style callback returns true for * elements that have a matching property value, else false. * * If an object is provided for iteratee the created _.matches style callback returns true for elements that * have the properties of the given object, else false. * * @alias _.unique * * @param array The array to inspect. * @param isSorted Specify the array is sorted. * @param iteratee The function invoked per iteration. * @param thisArg iteratee * @return Returns the new duplicate-value-free array. */ uniq<T>( array: List<T>, isSorted?: boolean, iteratee?: ListIterator<T, any>, thisArg?: any ): T[]; /** * @see _.uniq */ uniq<T, TSort>( array: List<T>, isSorted?: boolean, iteratee?: ListIterator<T, TSort>, thisArg?: any ): T[]; /** * @see _.uniq */ uniq<T>( array: List<T>, iteratee?: ListIterator<T, any>, thisArg?: any ): T[]; /** * @see _.uniq */ uniq<T, TSort>( array: List<T>, iteratee?: ListIterator<T, TSort>, thisArg?: any ): T[]; /** * @see _.uniq */ uniq<T>( array: List<T>, isSorted?: boolean, iteratee?: string, thisArg?: any ): T[]; /** * @see _.uniq */ uniq<T>( array: List<T>, iteratee?: string, thisArg?: any ): T[]; /** * @see _.uniq */ uniq<T>( array: List<T>, isSorted?: boolean, iteratee?: Object ): T[]; /** * @see _.uniq */ uniq<TWhere extends {}, T>( array: List<T>, isSorted?: boolean, iteratee?: TWhere ): T[]; /** * @see _.uniq */ uniq<T>( array: List<T>, iteratee?: Object ): T[]; /** * @see _.uniq */ uniq<TWhere extends {}, T>( array: List<T>, iteratee?: TWhere ): T[]; } interface LoDashImplicitWrapper<T> { /** * @see _.uniq */ uniq<TSort>( isSorted?: boolean, iteratee?: ListIterator<T, TSort>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.uniq */ uniq<TSort>( iteratee?: ListIterator<T, TSort>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.uniq */ uniq<TSort>( isSorted?: boolean, iteratee?: ListIterator<T, TSort>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.uniq */ uniq<TSort>( iteratee?: ListIterator<T, TSort>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.uniq */ uniq( isSorted?: boolean, iteratee?: string, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.uniq */ uniq( iteratee?: string, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.uniq */ uniq<TWhere extends {}>( isSorted?: boolean, iteratee?: TWhere ): LoDashImplicitArrayWrapper<T>; /** * @see _.uniq */ uniq<TWhere extends {}>( iteratee?: TWhere ): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { uniq<T>( isSorted?: boolean, iteratee?: ListIterator<T, any>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.uniq */ uniq<T, TSort>( isSorted?: boolean, iteratee?: ListIterator<T, TSort>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.uniq */ uniq<T>( iteratee?: ListIterator<T, any>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.uniq */ uniq<T, TSort>( iteratee?: ListIterator<T, TSort>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.uniq */ uniq<T>( isSorted?: boolean, iteratee?: string, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.uniq */ uniq<T>( iteratee?: string, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.uniq */ uniq<T>( isSorted?: boolean, iteratee?: Object ): LoDashImplicitArrayWrapper<T>; /** * @see _.uniq */ uniq<TWhere extends {}, T>( isSorted?: boolean, iteratee?: TWhere ): LoDashImplicitArrayWrapper<T>; /** * @see _.uniq */ uniq<T>( iteratee?: Object ): LoDashImplicitArrayWrapper<T>; /** * @see _.uniq */ uniq<TWhere extends {}, T>( iteratee?: TWhere ): LoDashImplicitArrayWrapper<T>; } interface LoDashExplicitWrapper<T> { /** * @see _.uniq */ uniq<TSort>( isSorted?: boolean, iteratee?: ListIterator<T, TSort>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.uniq */ uniq<TSort>( iteratee?: ListIterator<T, TSort>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.uniq */ uniq<TSort>( isSorted?: boolean, iteratee?: ListIterator<T, TSort>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.uniq */ uniq<TSort>( iteratee?: ListIterator<T, TSort>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.uniq */ uniq( isSorted?: boolean, iteratee?: string, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.uniq */ uniq( iteratee?: string, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.uniq */ uniq<TWhere extends {}>( isSorted?: boolean, iteratee?: TWhere ): LoDashExplicitArrayWrapper<T>; /** * @see _.uniq */ uniq<TWhere extends {}>( iteratee?: TWhere ): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { uniq<T>( isSorted?: boolean, iteratee?: ListIterator<T, any>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.uniq */ uniq<T, TSort>( isSorted?: boolean, iteratee?: ListIterator<T, TSort>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.uniq */ uniq<T>( iteratee?: ListIterator<T, any>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.uniq */ uniq<T, TSort>( iteratee?: ListIterator<T, TSort>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.uniq */ uniq<T>( isSorted?: boolean, iteratee?: string, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.uniq */ uniq<T>( iteratee?: string, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.uniq */ uniq<T>( isSorted?: boolean, iteratee?: Object ): LoDashExplicitArrayWrapper<T>; /** * @see _.uniq */ uniq<TWhere extends {}, T>( isSorted?: boolean, iteratee?: TWhere ): LoDashExplicitArrayWrapper<T>; /** * @see _.uniq */ uniq<T>( iteratee?: Object ): LoDashExplicitArrayWrapper<T>; /** * @see _.uniq */ uniq<TWhere extends {}, T>( iteratee?: TWhere ): LoDashExplicitArrayWrapper<T>; } //_.unique interface LoDashStatic { /** * @see _.uniq */ unique<T>( array: List<T>, isSorted?: boolean, iteratee?: ListIterator<T, any>, thisArg?: any ): T[]; /** * @see _.uniq */ unique<T, TSort>( array: List<T>, isSorted?: boolean, iteratee?: ListIterator<T, TSort>, thisArg?: any ): T[]; /** * @see _.uniq */ unique<T>( array: List<T>, iteratee?: ListIterator<T, any>, thisArg?: any ): T[]; /** * @see _.uniq */ unique<T, TSort>( array: List<T>, iteratee?: ListIterator<T, TSort>, thisArg?: any ): T[]; /** * @see _.uniq */ unique<T>( array: List<T>, isSorted?: boolean, iteratee?: string, thisArg?: any ): T[]; /** * @see _.uniq */ unique<T>( array: List<T>, iteratee?: string, thisArg?: any ): T[]; /** * @see _.uniq */ unique<T>( array: List<T>, isSorted?: boolean, iteratee?: Object ): T[]; /** * @see _.uniq */ unique<TWhere extends {}, T>( array: List<T>, isSorted?: boolean, iteratee?: TWhere ): T[]; /** * @see _.uniq */ unique<T>( array: List<T>, iteratee?: Object ): T[]; /** * @see _.uniq */ unique<TWhere extends {}, T>( array: List<T>, iteratee?: TWhere ): T[]; } interface LoDashImplicitWrapper<T> { /** * @see _.uniq */ unique<TSort>( isSorted?: boolean, iteratee?: ListIterator<T, TSort>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.uniq */ unique<TSort>( iteratee?: ListIterator<T, TSort>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.uniq */ unique<TSort>( isSorted?: boolean, iteratee?: ListIterator<T, TSort>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.uniq */ unique<TSort>( iteratee?: ListIterator<T, TSort>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.uniq */ unique( isSorted?: boolean, iteratee?: string, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.uniq */ unique( iteratee?: string, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.uniq */ unique<TWhere extends {}>( isSorted?: boolean, iteratee?: TWhere ): LoDashImplicitArrayWrapper<T>; /** * @see _.uniq */ unique<TWhere extends {}>( iteratee?: TWhere ): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { unique<T>( isSorted?: boolean, iteratee?: ListIterator<T, any>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.uniq */ unique<T, TSort>( isSorted?: boolean, iteratee?: ListIterator<T, TSort>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.uniq */ unique<T>( iteratee?: ListIterator<T, any>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.uniq */ unique<T, TSort>( iteratee?: ListIterator<T, TSort>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.uniq */ unique<T>( isSorted?: boolean, iteratee?: string, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.uniq */ unique<T>( iteratee?: string, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.uniq */ unique<T>( isSorted?: boolean, iteratee?: Object ): LoDashImplicitArrayWrapper<T>; /** * @see _.uniq */ unique<TWhere extends {}, T>( isSorted?: boolean, iteratee?: TWhere ): LoDashImplicitArrayWrapper<T>; /** * @see _.uniq */ unique<T>( iteratee?: Object ): LoDashImplicitArrayWrapper<T>; /** * @see _.uniq */ unique<TWhere extends {}, T>( iteratee?: TWhere ): LoDashImplicitArrayWrapper<T>; } interface LoDashExplicitWrapper<T> { /** * @see _.uniq */ unique<TSort>( isSorted?: boolean, iteratee?: ListIterator<T, TSort>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.uniq */ unique<TSort>( iteratee?: ListIterator<T, TSort>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.uniq */ unique<TSort>( isSorted?: boolean, iteratee?: ListIterator<T, TSort>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.uniq */ unique<TSort>( iteratee?: ListIterator<T, TSort>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.uniq */ unique( isSorted?: boolean, iteratee?: string, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.uniq */ unique( iteratee?: string, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.uniq */ unique<TWhere extends {}>( isSorted?: boolean, iteratee?: TWhere ): LoDashExplicitArrayWrapper<T>; /** * @see _.uniq */ unique<TWhere extends {}>( iteratee?: TWhere ): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { unique<T>( isSorted?: boolean, iteratee?: ListIterator<T, any>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.uniq */ unique<T, TSort>( isSorted?: boolean, iteratee?: ListIterator<T, TSort>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.uniq */ unique<T>( iteratee?: ListIterator<T, any>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.uniq */ unique<T, TSort>( iteratee?: ListIterator<T, TSort>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.uniq */ unique<T>( isSorted?: boolean, iteratee?: string, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.uniq */ unique<T>( iteratee?: string, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.uniq */ unique<T>( isSorted?: boolean, iteratee?: Object ): LoDashExplicitArrayWrapper<T>; /** * @see _.uniq */ unique<TWhere extends {}, T>( isSorted?: boolean, iteratee?: TWhere ): LoDashExplicitArrayWrapper<T>; /** * @see _.uniq */ unique<T>( iteratee?: Object ): LoDashExplicitArrayWrapper<T>; /** * @see _.uniq */ unique<TWhere extends {}, T>( iteratee?: TWhere ): LoDashExplicitArrayWrapper<T>; } //_.unzip interface LoDashStatic { /** * This method is like _.zip except that it accepts an array of grouped elements and creates an array * regrouping the elements to their pre-zip configuration. * * @param array The array of grouped elements to process. * @return Returns the new array of regrouped elements. */ unzip<T>(array: List<List<T>>): T[][]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.unzip */ unzip<T>(): LoDashImplicitArrayWrapper<T[]>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.unzip */ unzip<T>(): LoDashImplicitArrayWrapper<T[]>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.unzip */ unzip<T>(): LoDashExplicitArrayWrapper<T[]>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.unzip */ unzip<T>(): LoDashExplicitArrayWrapper<T[]>; } //_.unzipWith interface LoDashStatic { /** * This method is like _.unzip except that it accepts an iteratee to specify how regrouped values should be * combined. The iteratee is bound to thisArg and invoked with four arguments: (accumulator, value, index, * group). * * @param array The array of grouped elements to process. * @param iteratee The function to combine regrouped values. * @param thisArg The this binding of iteratee. * @return Returns the new array of regrouped elements. */ unzipWith<TArray, TResult>( array: List<List<TArray>>, iteratee?: MemoIterator<TArray, TResult>, thisArg?: any ): TResult[]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.unzipWith */ unzipWith<TArr, TResult>( iteratee?: MemoIterator<TArr, TResult>, thisArg?: any ): LoDashImplicitArrayWrapper<TResult>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.unzipWith */ unzipWith<TArr, TResult>( iteratee?: MemoIterator<TArr, TResult>, thisArg?: any ): LoDashImplicitArrayWrapper<TResult>; } //_.without interface LoDashStatic { /** * Creates an array excluding all provided values using SameValueZero for equality comparisons. * * @param array The array to filter. * @param values The values to exclude. * @return Returns the new array of filtered values. */ without<T>( array: List<T>, ...values: T[] ): T[]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.without */ without(...values: T[]): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.without */ without<T>(...values: T[]): LoDashImplicitArrayWrapper<T>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.without */ without(...values: T[]): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.without */ without<T>(...values: T[]): LoDashExplicitArrayWrapper<T>; } //_.xor interface LoDashStatic { /** * Creates an array of unique values that is the symmetric difference of the provided arrays. * * @param arrays The arrays to inspect. * @return Returns the new array of values. */ xor<T>(...arrays: List<T>[]): T[]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.xor */ xor(...arrays: List<T>[]): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.xor */ xor<T>(...arrays: List<T>[]): LoDashImplicitArrayWrapper<T>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.xor */ xor(...arrays: List<T>[]): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.xor */ xor<T>(...arrays: List<T>[]): LoDashExplicitArrayWrapper<T>; } //_.zip interface LoDashStatic { /** * Creates an array of grouped elements, the first of which contains the first elements of the given arrays, * the second of which contains the second elements of the given arrays, and so on. * * @param arrays The arrays to process. * @return Returns the new array of grouped elements. */ zip<T>(...arrays: List<T>[]): T[][]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.zip */ zip<T>(...arrays: List<T>[]): _.LoDashImplicitArrayWrapper<T[]>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.zip */ zip<T>(...arrays: List<T>[]): _.LoDashImplicitArrayWrapper<T[]>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.zip */ zip<T>(...arrays: List<T>[]): _.LoDashExplicitArrayWrapper<T[]>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.zip */ zip<T>(...arrays: List<T>[]): _.LoDashExplicitArrayWrapper<T[]>; } //_.zipObject interface LoDashStatic { /** * The inverse of _.pairs; this method returns an object composed from arrays of property names and values. * Provide either a single two dimensional array, e.g. [[key1, value1], [key2, value2]] or two arrays, one of * property names and one of corresponding values. * * @alias _.object * * @param props The property names. * @param values The property values. * @return Returns the new object. */ zipObject<TValues, TResult extends {}>( props: List<StringRepresentable>|List<List<any>>, values?: List<TValues> ): TResult; /** * @see _.zipObject */ zipObject<TResult extends {}>( props: List<StringRepresentable>|List<List<any>>, values?: List<any> ): TResult; /** * @see _.zipObject */ zipObject( props: List<StringRepresentable>|List<List<any>>, values?: List<any> ): _.Dictionary<any>; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.zipObject */ zipObject<TValues, TResult extends {}>( values?: List<TValues> ): _.LoDashImplicitObjectWrapper<TResult>; /** * @see _.zipObject */ zipObject<TResult extends {}>( values?: List<any> ): _.LoDashImplicitObjectWrapper<TResult>; /** * @see _.zipObject */ zipObject( values?: List<any> ): _.LoDashImplicitObjectWrapper<_.Dictionary<any>>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.zipObject */ zipObject<TValues, TResult extends {}>( values?: List<TValues> ): _.LoDashImplicitObjectWrapper<TResult>; /** * @see _.zipObject */ zipObject<TResult extends {}>( values?: List<any> ): _.LoDashImplicitObjectWrapper<TResult>; /** * @see _.zipObject */ zipObject( values?: List<any> ): _.LoDashImplicitObjectWrapper<_.Dictionary<any>>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.zipObject */ zipObject<TValues, TResult extends {}>( values?: List<TValues> ): _.LoDashExplicitObjectWrapper<TResult>; /** * @see _.zipObject */ zipObject<TResult extends {}>( values?: List<any> ): _.LoDashExplicitObjectWrapper<TResult>; /** * @see _.zipObject */ zipObject( values?: List<any> ): _.LoDashExplicitObjectWrapper<_.Dictionary<any>>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.zipObject */ zipObject<TValues, TResult extends {}>( values?: List<TValues> ): _.LoDashExplicitObjectWrapper<TResult>; /** * @see _.zipObject */ zipObject<TResult extends {}>( values?: List<any> ): _.LoDashExplicitObjectWrapper<TResult>; /** * @see _.zipObject */ zipObject( values?: List<any> ): _.LoDashExplicitObjectWrapper<_.Dictionary<any>>; } //_.zipWith interface LoDashStatic { /** * This method is like _.zip except that it accepts an iteratee to specify how grouped values should be * combined. The iteratee is bound to thisArg and invoked with four arguments: (accumulator, value, index, * group). * @param {...Array} [arrays] The arrays to process. * @param {Function} [iteratee] The function to combine grouped values. * @param {*} [thisArg] The `this` binding of `iteratee`. * @return Returns the new array of grouped elements. */ zipWith<TResult>(...args: any[]): TResult[]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.zipWith */ zipWith<TResult>(...args: any[]): LoDashImplicitArrayWrapper<TResult>; } /********* * Chain * *********/ //_.chain interface LoDashStatic { /** * Creates a lodash object that wraps value with explicit method chaining enabled. * * @param value The value to wrap. * @return Returns the new lodash wrapper instance. */ chain(value: number): LoDashExplicitWrapper<number>; chain(value: string): LoDashExplicitWrapper<string>; chain(value: boolean): LoDashExplicitWrapper<boolean>; chain<T>(value: T[]): LoDashExplicitArrayWrapper<T>; chain<T extends {}>(value: T): LoDashExplicitObjectWrapper<T>; chain(value: any): LoDashExplicitWrapper<any>; } interface LoDashImplicitWrapper<T> { /** * @see _.chain */ chain(): LoDashExplicitWrapper<T>; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.chain */ chain(): LoDashExplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.chain */ chain(): LoDashExplicitObjectWrapper<T>; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * @see _.chain */ chain(): TWrapper; } //_.tap interface LoDashStatic { /** * This method invokes interceptor and returns value. The interceptor is bound to thisArg and invoked with one * argument; (value). The purpose of this method is to "tap into" a method chain in order to perform operations * on intermediate results within the chain. * * @param value The value to provide to interceptor. * @param interceptor The function to invoke. * @parem thisArg The this binding of interceptor. * @return Returns value. **/ tap<T>( value: T, interceptor: (value: T) => void, thisArg?: any ): T; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * @see _.tap */ tap( interceptor: (value: T) => void, thisArg?: any ): TWrapper; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * @see _.tap */ tap( interceptor: (value: T) => void, thisArg?: any ): TWrapper; } //_.thru interface LoDashStatic { /** * This method is like _.tap except that it returns the result of interceptor. * * @param value The value to provide to interceptor. * @param interceptor The function to invoke. * @param thisArg The this binding of interceptor. * @return Returns the result of interceptor. */ thru<T, TResult>( value: T, interceptor: (value: T) => TResult, thisArg?: any ): TResult; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * @see _.thru */ thru<TResult extends number>( interceptor: (value: T) => TResult, thisArg?: any): LoDashImplicitWrapper<TResult>; /** * @see _.thru */ thru<TResult extends string>( interceptor: (value: T) => TResult, thisArg?: any): LoDashImplicitWrapper<TResult>; /** * @see _.thru */ thru<TResult extends boolean>( interceptor: (value: T) => TResult, thisArg?: any): LoDashImplicitWrapper<TResult>; /** * @see _.thru */ thru<TResult extends {}>( interceptor: (value: T) => TResult, thisArg?: any): LoDashImplicitObjectWrapper<TResult>; /** * @see _.thru */ thru<TResult>( interceptor: (value: T) => TResult[], thisArg?: any): LoDashImplicitArrayWrapper<TResult>; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * @see _.thru */ thru<TResult extends number>( interceptor: (value: T) => TResult, thisArg?: any ): LoDashExplicitWrapper<TResult>; /** * @see _.thru */ thru<TResult extends string>( interceptor: (value: T) => TResult, thisArg?: any ): LoDashExplicitWrapper<TResult>; /** * @see _.thru */ thru<TResult extends boolean>( interceptor: (value: T) => TResult, thisArg?: any ): LoDashExplicitWrapper<TResult>; /** * @see _.thru */ thru<TResult extends {}>( interceptor: (value: T) => TResult, thisArg?: any ): LoDashExplicitObjectWrapper<TResult>; /** * @see _.thru */ thru<TResult>( interceptor: (value: T) => TResult[], thisArg?: any ): LoDashExplicitArrayWrapper<TResult>; } //_.prototype.commit interface LoDashImplicitWrapperBase<T, TWrapper> { /** * Executes the chained sequence and returns the wrapped result. * * @return Returns the new lodash wrapper instance. */ commit(): TWrapper; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * @see _.commit */ commit(): TWrapper; } //_.prototype.concat interface LoDashImplicitWrapperBase<T, TWrapper> { /** * Creates a new array joining a wrapped array with any additional arrays and/or values. * * @param items * @return Returns the new concatenated array. */ concat<TItem>(...items: Array<TItem|Array<TItem>>): LoDashImplicitArrayWrapper<TItem>; /** * @see _.concat */ concat(...items: Array<T|Array<T>>): LoDashImplicitArrayWrapper<T>; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * @see _.concat */ concat<TItem>(...items: Array<TItem|Array<TItem>>): LoDashExplicitArrayWrapper<TItem>; /** * @see _.concat */ concat(...items: Array<T|Array<T>>): LoDashExplicitArrayWrapper<T>; } //_.prototype.plant interface LoDashImplicitWrapperBase<T, TWrapper> { /** * Creates a clone of the chained sequence planting value as the wrapped value. * @param value The value to plant as the wrapped value. * @return Returns the new lodash wrapper instance. */ plant(value: number): LoDashImplicitWrapper<number>; /** * @see _.plant */ plant(value: string): LoDashImplicitStringWrapper; /** * @see _.plant */ plant(value: boolean): LoDashImplicitWrapper<boolean>; /** * @see _.plant */ plant(value: number[]): LoDashImplicitNumberArrayWrapper; /** * @see _.plant */ plant<T>(value: T[]): LoDashImplicitArrayWrapper<T>; /** * @see _.plant */ plant<T extends {}>(value: T): LoDashImplicitObjectWrapper<T>; /** * @see _.plant */ plant(value: any): LoDashImplicitWrapper<any>; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * @see _.plant */ plant(value: number): LoDashExplicitWrapper<number>; /** * @see _.plant */ plant(value: string): LoDashExplicitStringWrapper; /** * @see _.plant */ plant(value: boolean): LoDashExplicitWrapper<boolean>; /** * @see _.plant */ plant(value: number[]): LoDashExplicitNumberArrayWrapper; /** * @see _.plant */ plant<T>(value: T[]): LoDashExplicitArrayWrapper<T>; /** * @see _.plant */ plant<T extends {}>(value: T): LoDashExplicitObjectWrapper<T>; /** * @see _.plant */ plant(value: any): LoDashExplicitWrapper<any>; } //_.prototype.reverse interface LoDashImplicitArrayWrapper<T> { /** * Reverses the wrapped array so the first element becomes the last, the second element becomes the second to * last, and so on. * * Note: This method mutates the wrapped array. * * @return Returns the new reversed lodash wrapper instance. */ reverse(): LoDashImplicitArrayWrapper<T>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.reverse */ reverse(): LoDashExplicitArrayWrapper<T>; } //_.prototype.run interface LoDashWrapperBase<T, TWrapper> { /** * @see _.value */ run(): T; } //_.prototype.toJSON interface LoDashWrapperBase<T, TWrapper> { /** * @see _.value */ toJSON(): T; } //_.prototype.toString interface LoDashWrapperBase<T, TWrapper> { /** * Produces the result of coercing the unwrapped value to a string. * * @return Returns the coerced string value. */ toString(): string; } //_.prototype.value interface LoDashWrapperBase<T, TWrapper> { /** * Executes the chained sequence to extract the unwrapped value. * * @alias _.run, _.toJSON, _.valueOf * * @return Returns the resolved unwrapped value. */ value(): T; } //_.valueOf interface LoDashWrapperBase<T, TWrapper> { /** * @see _.value */ valueOf(): T; } /************** * Collection * **************/ //_.all interface LoDashStatic { /** * @see _.every */ all<T>( collection: List<T>, predicate?: ListIterator<T, boolean>, thisArg?: any ): boolean; /** * @see _.every */ all<T>( collection: Dictionary<T>, predicate?: DictionaryIterator<T, boolean>, thisArg?: any ): boolean; /** * @see _.every */ all<T>( collection: List<T>|Dictionary<T>, predicate?: string, thisArg?: any ): boolean; /** * @see _.every */ all<TObject extends {}, T>( collection: List<T>|Dictionary<T>, predicate?: TObject ): boolean; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.every */ all( predicate?: ListIterator<T, boolean>, thisArg?: any ): boolean; /** * @see _.every */ all( predicate?: string, thisArg?: any ): boolean; /** * @see _.every */ all<TObject extends {}>( predicate?: TObject ): boolean; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.every */ all<TResult>( predicate?: ListIterator<TResult, boolean>|DictionaryIterator<TResult, boolean>, thisArg?: any ): boolean; /** * @see _.every */ all( predicate?: string, thisArg?: any ): boolean; /** * @see _.every */ all<TObject extends {}>( predicate?: TObject ): boolean; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.every */ all( predicate?: ListIterator<T, boolean>, thisArg?: any ): LoDashExplicitWrapper<boolean>; /** * @see _.every */ all( predicate?: string, thisArg?: any ): LoDashExplicitWrapper<boolean>; /** * @see _.every */ all<TObject extends {}>( predicate?: TObject ): LoDashExplicitWrapper<boolean>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.every */ all<TResult>( predicate?: ListIterator<TResult, boolean>|DictionaryIterator<TResult, boolean>, thisArg?: any ): LoDashExplicitWrapper<boolean>; /** * @see _.every */ all( predicate?: string, thisArg?: any ): LoDashExplicitWrapper<boolean>; /** * @see _.every */ all<TObject extends {}>( predicate?: TObject ): LoDashExplicitWrapper<boolean>; } //_.any interface LoDashStatic { /** * @see _.some */ any<T>( collection: List<T>, predicate?: ListIterator<T, boolean>, thisArg?: any ): boolean; /** * @see _.some */ any<T>( collection: Dictionary<T>, predicate?: DictionaryIterator<T, boolean>, thisArg?: any ): boolean; /** * @see _.some */ any<T>( collection: NumericDictionary<T>, predicate?: NumericDictionaryIterator<T, boolean>, thisArg?: any ): boolean; /** * @see _.some */ any<T>( collection: List<T>|Dictionary<T>|NumericDictionary<T>, predicate?: string, thisArg?: any ): boolean; /** * @see _.some */ any<TObject extends {}, T>( collection: List<T>|Dictionary<T>|NumericDictionary<T>, predicate?: TObject ): boolean; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.some */ any( predicate?: ListIterator<T, boolean>|NumericDictionaryIterator<T, boolean>, thisArg?: any ): boolean; /** * @see _.some */ any( predicate?: string, thisArg?: any ): boolean; /** * @see _.some */ any<TObject extends {}>( predicate?: TObject ): boolean; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.some */ any<TResult>( predicate?: ListIterator<TResult, boolean>|DictionaryIterator<TResult, boolean>|NumericDictionaryIterator<T, boolean>, thisArg?: any ): boolean; /** * @see _.some */ any( predicate?: string, thisArg?: any ): boolean; /** * @see _.some */ any<TObject extends {}>( predicate?: TObject ): boolean; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.some */ any( predicate?: ListIterator<T, boolean>|NumericDictionaryIterator<T, boolean>, thisArg?: any ): LoDashExplicitWrapper<boolean>; /** * @see _.some */ any( predicate?: string, thisArg?: any ): LoDashExplicitWrapper<boolean>; /** * @see _.some */ any<TObject extends {}>( predicate?: TObject ): LoDashExplicitWrapper<boolean>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.some */ any<TResult>( predicate?: ListIterator<TResult, boolean>|DictionaryIterator<TResult, boolean>|NumericDictionaryIterator<T, boolean>, thisArg?: any ): LoDashExplicitWrapper<boolean>; /** * @see _.some */ any( predicate?: string, thisArg?: any ): LoDashExplicitWrapper<boolean>; /** * @see _.some */ any<TObject extends {}>( predicate?: TObject ): LoDashExplicitWrapper<boolean>; } //_.at interface LoDashStatic { /** * Creates an array of elements corresponding to the given keys, or indexes, of collection. Keys may be * specified as individual arguments or as arrays of keys. * * @param collection The collection to iterate over. * @param props The property names or indexes of elements to pick, specified individually or in arrays. * @return Returns the new array of picked elements. */ at<T>( collection: List<T>|Dictionary<T>, ...props: (number|string|(number|string)[])[] ): T[]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.at */ at(...props: (number|string|(number|string)[])[]): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.at */ at<T>(...props: (number|string|(number|string)[])[]): LoDashImplicitArrayWrapper<T>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.at */ at(...props: (number|string|(number|string)[])[]): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.at */ at<T>(...props: (number|string|(number|string)[])[]): LoDashExplicitArrayWrapper<T>; } //_.collect interface LoDashStatic { /** * @see _.map */ collect<T, TResult>( collection: List<T>, iteratee?: ListIterator<T, TResult>, thisArg?: any ): TResult[]; /** * @see _.map */ collect<T extends {}, TResult>( collection: Dictionary<T>, iteratee?: DictionaryIterator<T, TResult>, thisArg?: any ): TResult[]; /** * @see _.map */ collect<T, TResult>( collection: List<T>|Dictionary<T>, iteratee?: string ): TResult[]; /** * @see _.map */ collect<T, TObject extends {}>( collection: List<T>|Dictionary<T>, iteratee?: TObject ): boolean[]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.map */ collect<TResult>( iteratee?: ListIterator<T, TResult>, thisArg?: any ): LoDashImplicitArrayWrapper<TResult>; /** * @see _.map */ collect<TResult>( iteratee?: string ): LoDashImplicitArrayWrapper<TResult>; /** * @see _.map */ collect<TObject extends {}>( iteratee?: TObject ): LoDashImplicitArrayWrapper<boolean>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.map */ collect<TValue, TResult>( iteratee?: ListIterator<TValue, TResult>|DictionaryIterator<TValue, TResult>, thisArg?: any ): LoDashImplicitArrayWrapper<TResult>; /** * @see _.map */ collect<TValue, TResult>( iteratee?: string ): LoDashImplicitArrayWrapper<TResult>; /** * @see _.map */ collect<TObject extends {}>( iteratee?: TObject ): LoDashImplicitArrayWrapper<boolean>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.map */ collect<TResult>( iteratee?: ListIterator<T, TResult>, thisArg?: any ): LoDashExplicitArrayWrapper<TResult>; /** * @see _.map */ collect<TResult>( iteratee?: string ): LoDashExplicitArrayWrapper<TResult>; /** * @see _.map */ collect<TObject extends {}>( iteratee?: TObject ): LoDashExplicitArrayWrapper<boolean>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.map */ collect<TValue, TResult>( iteratee?: ListIterator<TValue, TResult>|DictionaryIterator<TValue, TResult>, thisArg?: any ): LoDashExplicitArrayWrapper<TResult>; /** * @see _.map */ collect<TValue, TResult>( iteratee?: string ): LoDashExplicitArrayWrapper<TResult>; /** * @see _.map */ collect<TObject extends {}>( iteratee?: TObject ): LoDashExplicitArrayWrapper<boolean>; } //_.contains interface LoDashStatic { /** * @see _.includes */ contains<T>( collection: List<T>|Dictionary<T>, target: T, fromIndex?: number ): boolean; /** * @see _.includes */ contains( collection: string, target: string, fromIndex?: number ): boolean; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.includes */ contains( target: T, fromIndex?: number ): boolean; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.includes */ contains<TValue>( target: TValue, fromIndex?: number ): boolean; } interface LoDashImplicitWrapper<T> { /** * @see _.includes */ contains( target: string, fromIndex?: number ): boolean; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.includes */ contains( target: T, fromIndex?: number ): LoDashExplicitWrapper<boolean>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.includes */ contains<TValue>( target: TValue, fromIndex?: number ): LoDashExplicitWrapper<boolean>; } interface LoDashExplicitWrapper<T> { /** * @see _.includes */ contains( target: string, fromIndex?: number ): LoDashExplicitWrapper<boolean>; } //_.countBy interface LoDashStatic { /** * Creates an object composed of keys generated from the results of running each element of collection through * iteratee. The corresponding value of each key is the number of times the key was returned by iteratee. The * iteratee is bound to thisArg and invoked with three arguments: * (value, index|key, collection). * * If a property name is provided for iteratee the created _.property style callback returns the property * value of the given element. * * If a value is also provided for thisArg the created _.matchesProperty style callback returns true for * elements that have a matching property value, else false. * * If an object is provided for iteratee the created _.matches style callback returns true for elements that * have the properties of the given object, else false. * * @param collection The collection to iterate over. * @param iteratee The function invoked per iteration. * @param thisArg The this binding of iteratee. * @return Returns the composed aggregate object. */ countBy<T>( collection: List<T>, iteratee?: ListIterator<T, any>, thisArg?: any ): Dictionary<number>; /** * @see _.countBy */ countBy<T>( collection: Dictionary<T>, iteratee?: DictionaryIterator<T, any>, thisArg?: any ): Dictionary<number>; /** * @see _.countBy */ countBy<T>( collection: NumericDictionary<T>, iteratee?: NumericDictionaryIterator<T, any>, thisArg?: any ): Dictionary<number>; /** * @see _.countBy */ countBy<T>( collection: List<T>|Dictionary<T>|NumericDictionary<T>, iteratee?: string, thisArg?: any ): Dictionary<number>; /** * @see _.countBy */ countBy<W, T>( collection: List<T>|Dictionary<T>|NumericDictionary<T>, iteratee?: W ): Dictionary<number>; /** * @see _.countBy */ countBy<T>( collection: List<T>|Dictionary<T>|NumericDictionary<T>, iteratee?: Object ): Dictionary<number>; } interface LoDashImplicitWrapper<T> { /** * @see _.countBy */ countBy( iteratee?: ListIterator<T, any>, thisArg?: any ): LoDashImplicitObjectWrapper<Dictionary<number>>; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.countBy */ countBy( iteratee?: ListIterator<T, any>, thisArg?: any ): LoDashImplicitObjectWrapper<Dictionary<number>>; /** * @see _.countBy */ countBy( iteratee?: string, thisArg?: any ): LoDashImplicitObjectWrapper<Dictionary<number>>; /** * @see _.countBy */ countBy<W>( iteratee?: W ): LoDashImplicitObjectWrapper<Dictionary<number>>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.countBy */ countBy<T>( iteratee?: ListIterator<T, any>|DictionaryIterator<T, any>|NumericDictionaryIterator<T, any>, thisArg?: any ): LoDashImplicitObjectWrapper<Dictionary<number>>; /** * @see _.countBy */ countBy( iteratee?: string, thisArg?: any ): LoDashImplicitObjectWrapper<Dictionary<number>>; /** * @see _.countBy */ countBy<W>( iteratee?: W ): LoDashImplicitObjectWrapper<Dictionary<number>>; } interface LoDashExplicitWrapper<T> { /** * @see _.countBy */ countBy( iteratee?: ListIterator<T, any>, thisArg?: any ): LoDashExplicitObjectWrapper<Dictionary<number>>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.countBy */ countBy( iteratee?: ListIterator<T, any>, thisArg?: any ): LoDashExplicitObjectWrapper<Dictionary<number>>; /** * @see _.countBy */ countBy( iteratee?: string, thisArg?: any ): LoDashExplicitObjectWrapper<Dictionary<number>>; /** * @see _.countBy */ countBy<W>( iteratee?: W ): LoDashExplicitObjectWrapper<Dictionary<number>>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.countBy */ countBy<T>( iteratee?: ListIterator<T, any>|DictionaryIterator<T, any>|NumericDictionaryIterator<T, any>, thisArg?: any ): LoDashExplicitObjectWrapper<Dictionary<number>>; /** * @see _.countBy */ countBy( iteratee?: string, thisArg?: any ): LoDashExplicitObjectWrapper<Dictionary<number>>; /** * @see _.countBy */ countBy<W>( iteratee?: W ): LoDashExplicitObjectWrapper<Dictionary<number>>; } //_.detect interface LoDashStatic { /** * @see _.find */ detect<T>( collection: List<T>, predicate?: ListIterator<T, boolean>, thisArg?: any ): T; /** * @see _.find */ detect<T>( collection: Dictionary<T>, predicate?: DictionaryIterator<T, boolean>, thisArg?: any ): T; /** * @see _.find */ detect<T>( collection: List<T>|Dictionary<T>, predicate?: string, thisArg?: any ): T; /** * @see _.find */ detect<TObject extends {}, T>( collection: List<T>|Dictionary<T>, predicate?: TObject ): T; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.find */ detect( predicate?: ListIterator<T, boolean>, thisArg?: any ): T; /** * @see _.find */ detect( predicate?: string, thisArg?: any ): T; /** * @see _.find */ detect<TObject extends {}>( predicate?: TObject ): T; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.find */ detect<TResult>( predicate?: ListIterator<TResult, boolean>|DictionaryIterator<TResult, boolean>, thisArg?: any ): TResult; /** * @see _.find */ detect<TResult>( predicate?: string, thisArg?: any ): TResult; /** * @see _.find */ detect<TObject extends {}, TResult>( predicate?: TObject ): TResult; } //_.each interface LoDashStatic { /** * @see _.forEach */ each<T>( collection: T[], iteratee?: ListIterator<T, any>, thisArg?: any ): T[]; /** * @see _.forEach */ each<T>( collection: List<T>, iteratee?: ListIterator<T, any>, thisArg?: any ): List<T>; /** * @see _.forEach */ each<T>( collection: Dictionary<T>, iteratee?: DictionaryIterator<T, any>, thisArg?: any ): Dictionary<T>; /** * @see _.forEach */ each<T extends {}>( collection: T, iteratee?: ObjectIterator<any, any>, thisArgs?: any ): T; /** * @see _.forEach */ each<T extends {}, TValue>( collection: T, iteratee?: ObjectIterator<TValue, any>, thisArgs?: any ): T; } interface LoDashImplicitWrapper<T> { /** * @see _.forEach */ each( iteratee: ListIterator<string, any>, thisArg?: any ): LoDashImplicitWrapper<string>; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.forEach */ each( iteratee: ListIterator<T, any>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.forEach */ each<TValue>( iteratee?: ListIterator<TValue, any>|DictionaryIterator<TValue, any>, thisArg?: any ): LoDashImplicitObjectWrapper<T>; } interface LoDashExplicitWrapper<T> { /** * @see _.forEach */ each( iteratee: ListIterator<string, any>, thisArg?: any ): LoDashExplicitWrapper<string>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.forEach */ each( iteratee: ListIterator<T, any>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.forEach */ each<TValue>( iteratee?: ListIterator<TValue, any>|DictionaryIterator<TValue, any>, thisArg?: any ): LoDashExplicitObjectWrapper<T>; } //_.eachRight interface LoDashStatic { /** * @see _.forEachRight */ eachRight<T>( collection: T[], iteratee?: ListIterator<T, any>, thisArg?: any ): T[]; /** * @see _.forEachRight */ eachRight<T>( collection: List<T>, iteratee?: ListIterator<T, any>, thisArg?: any ): List<T>; /** * @see _.forEachRight */ eachRight<T>( collection: Dictionary<T>, iteratee?: DictionaryIterator<T, any>, thisArg?: any ): Dictionary<T>; /** * @see _.forEachRight */ eachRight<T extends {}>( collection: T, iteratee?: ObjectIterator<any, any>, thisArgs?: any ): T; /** * @see _.forEachRight */ eachRight<T extends {}, TValue>( collection: T, iteratee?: ObjectIterator<TValue, any>, thisArgs?: any ): T; } interface LoDashImplicitWrapper<T> { /** * @see _.forEachRight */ eachRight( iteratee: ListIterator<string, any>, thisArg?: any ): LoDashImplicitWrapper<string>; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.forEachRight */ eachRight( iteratee: ListIterator<T, any>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.forEachRight */ eachRight<TValue>( iteratee?: ListIterator<TValue, any>|DictionaryIterator<TValue, any>, thisArg?: any ): LoDashImplicitObjectWrapper<T>; } interface LoDashExplicitWrapper<T> { /** * @see _.forEachRight */ eachRight( iteratee: ListIterator<string, any>, thisArg?: any ): LoDashExplicitWrapper<string>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.forEachRight */ eachRight( iteratee: ListIterator<T, any>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.forEachRight */ eachRight<TValue>( iteratee?: ListIterator<TValue, any>|DictionaryIterator<TValue, any>, thisArg?: any ): LoDashExplicitObjectWrapper<T>; } //_.every interface LoDashStatic { /** * Checks if predicate returns truthy for all elements of collection. The predicate is bound to thisArg and * invoked with three arguments: (value, index|key, collection). * * If a property name is provided for predicate the created _.property style callback returns the property * value of the given element. * * If a value is also provided for thisArg the created _.matchesProperty style callback returns true for * elements that have a matching property value, else false. * * If an object is provided for predicate the created _.matches style callback returns true for elements that * have the properties of the given object, else false. * * @alias _.all * * @param collection The collection to iterate over. * @param predicate The function invoked per iteration. * @param thisArg The this binding of predicate. * @return Returns true if all elements pass the predicate check, else false. */ every<T>( collection: List<T>, predicate?: ListIterator<T, boolean>, thisArg?: any ): boolean; /** * @see _.every */ every<T>( collection: Dictionary<T>, predicate?: DictionaryIterator<T, boolean>, thisArg?: any ): boolean; /** * @see _.every */ every<T>( collection: List<T>|Dictionary<T>, predicate?: string, thisArg?: any ): boolean; /** * @see _.every */ every<TObject extends {}, T>( collection: List<T>|Dictionary<T>, predicate?: TObject ): boolean; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.every */ every( predicate?: ListIterator<T, boolean>, thisArg?: any ): boolean; /** * @see _.every */ every( predicate?: string, thisArg?: any ): boolean; /** * @see _.every */ every<TObject extends {}>( predicate?: TObject ): boolean; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.every */ every<TResult>( predicate?: ListIterator<TResult, boolean>|DictionaryIterator<TResult, boolean>, thisArg?: any ): boolean; /** * @see _.every */ every( predicate?: string, thisArg?: any ): boolean; /** * @see _.every */ every<TObject extends {}>( predicate?: TObject ): boolean; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.every */ every( predicate?: ListIterator<T, boolean>, thisArg?: any ): LoDashExplicitWrapper<boolean>; /** * @see _.every */ every( predicate?: string, thisArg?: any ): LoDashExplicitWrapper<boolean>; /** * @see _.every */ every<TObject extends {}>( predicate?: TObject ): LoDashExplicitWrapper<boolean>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.every */ every<TResult>( predicate?: ListIterator<TResult, boolean>|DictionaryIterator<TResult, boolean>, thisArg?: any ): LoDashExplicitWrapper<boolean>; /** * @see _.every */ every( predicate?: string, thisArg?: any ): LoDashExplicitWrapper<boolean>; /** * @see _.every */ every<TObject extends {}>( predicate?: TObject ): LoDashExplicitWrapper<boolean>; } //_.filter interface LoDashStatic { /** * Iterates over elements of collection, returning an array of all elements predicate returns truthy for. The * predicate is bound to thisArg and invoked with three arguments: (value, index|key, collection). * * If a property name is provided for predicate the created _.property style callback returns the property * value of the given element. * * If a value is also provided for thisArg the created _.matchesProperty style callback returns true for * elements that have a matching property value, else false. * * If an object is provided for predicate the created _.matches style callback returns true for elements that * have the properties of the given object, else false. * * @alias _.select * * @param collection The collection to iterate over. * @param predicate The function invoked per iteration. * @param thisArg The this binding of predicate. * @return Returns the new filtered array. */ filter<T>( collection: List<T>, predicate?: ListIterator<T, boolean>, thisArg?: any ): T[]; /** * @see _.filter */ filter<T>( collection: Dictionary<T>, predicate?: DictionaryIterator<T, boolean>, thisArg?: any ): T[]; /** * @see _.filter */ filter( collection: string, predicate?: StringIterator<boolean>, thisArg?: any ): string[]; /** * @see _.filter */ filter<T>( collection: List<T>|Dictionary<T>, predicate: string, thisArg?: any ): T[]; /** * @see _.filter */ filter<W extends {}, T>( collection: List<T>|Dictionary<T>, predicate: W ): T[]; } interface LoDashImplicitWrapper<T> { /** * @see _.filter */ filter( predicate?: StringIterator<boolean>, thisArg?: any ): LoDashImplicitArrayWrapper<string>; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.filter */ filter( predicate: ListIterator<T, boolean>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.filter */ filter( predicate: string, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.filter */ filter<W>(predicate: W): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.filter */ filter<T>( predicate: ListIterator<T, boolean>|DictionaryIterator<T, boolean>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.filter */ filter<T>( predicate: string, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.filter */ filter<W, T>(predicate: W): LoDashImplicitArrayWrapper<T>; } interface LoDashExplicitWrapper<T> { /** * @see _.filter */ filter( predicate?: StringIterator<boolean>, thisArg?: any ): LoDashExplicitArrayWrapper<string>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.filter */ filter( predicate: ListIterator<T, boolean>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.filter */ filter( predicate: string, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.filter */ filter<W>(predicate: W): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.filter */ filter<T>( predicate: ListIterator<T, boolean>|DictionaryIterator<T, boolean>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.filter */ filter<T>( predicate: string, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.filter */ filter<W, T>(predicate: W): LoDashExplicitArrayWrapper<T>; } //_.find interface LoDashStatic { /** * Iterates over elements of collection, returning the first element predicate returns truthy for. * The predicate is bound to thisArg and invoked with three arguments: (value, index|key, collection). * * If a property name is provided for predicate the created _.property style callback returns the property * value of the given element. * * If a value is also provided for thisArg the created _.matchesProperty style callback returns true for * elements that have a matching property value, else false. * * If an object is provided for predicate the created _.matches style callback returns true for elements that * have the properties of the given object, else false. * * @alias _.detect * * @param collection The collection to search. * @param predicate The function invoked per iteration. * @param thisArg The this binding of predicate. * @return Returns the matched element, else undefined. */ find<T>( collection: List<T>, predicate?: ListIterator<T, boolean>, thisArg?: any ): T; /** * @see _.find */ find<T>( collection: Dictionary<T>, predicate?: DictionaryIterator<T, boolean>, thisArg?: any ): T; /** * @see _.find */ find<T>( collection: List<T>|Dictionary<T>, predicate?: string, thisArg?: any ): T; /** * @see _.find */ find<TObject extends {}, T>( collection: List<T>|Dictionary<T>, predicate?: TObject ): T; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.find */ find( predicate?: ListIterator<T, boolean>, thisArg?: any ): T; /** * @see _.find */ find( predicate?: string, thisArg?: any ): T; /** * @see _.find */ find<TObject extends {}>( predicate?: TObject ): T; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.find */ find<TResult>( predicate?: ListIterator<TResult, boolean>|DictionaryIterator<TResult, boolean>, thisArg?: any ): TResult; /** * @see _.find */ find<TResult>( predicate?: string, thisArg?: any ): TResult; /** * @see _.find */ find<TObject extends {}, TResult>( predicate?: TObject ): TResult; } //_.findWhere interface LoDashStatic { /** * @see _.find **/ findWhere<T>( collection: Array<T>, callback: ListIterator<T, boolean>, thisArg?: any): T; /** * @see _.find **/ findWhere<T>( collection: List<T>, callback: ListIterator<T, boolean>, thisArg?: any): T; /** * @see _.find **/ findWhere<T>( collection: Dictionary<T>, callback: DictionaryIterator<T, boolean>, thisArg?: any): T; /** * @see _.find * @param _.matches style callback **/ findWhere<W, T>( collection: Array<T>, whereValue: W): T; /** * @see _.find * @param _.matches style callback **/ findWhere<W, T>( collection: List<T>, whereValue: W): T; /** * @see _.find * @param _.matches style callback **/ findWhere<W, T>( collection: Dictionary<T>, whereValue: W): T; /** * @see _.find * @param _.property style callback **/ findWhere<T>( collection: Array<T>, pluckValue: string): T; /** * @see _.find * @param _.property style callback **/ findWhere<T>( collection: List<T>, pluckValue: string): T; /** * @see _.find * @param _.property style callback **/ findWhere<T>( collection: Dictionary<T>, pluckValue: string): T; } //_.findLast interface LoDashStatic { /** * This method is like _.find except that it iterates over elements of a collection from * right to left. * @param collection Searches for a value in this list. * @param callback The function called per iteration. * @param thisArg The this binding of callback. * @return The found element, else undefined. **/ findLast<T>( collection: Array<T>, callback: ListIterator<T, boolean>, thisArg?: any): T; /** * @see _.find **/ findLast<T>( collection: List<T>, callback: ListIterator<T, boolean>, thisArg?: any): T; /** * @see _.find **/ findLast<T>( collection: Dictionary<T>, callback: DictionaryIterator<T, boolean>, thisArg?: any): T; /** * @see _.find * @param _.pluck style callback **/ findLast<W, T>( collection: Array<T>, whereValue: W): T; /** * @see _.find * @param _.pluck style callback **/ findLast<W, T>( collection: List<T>, whereValue: W): T; /** * @see _.find * @param _.pluck style callback **/ findLast<W, T>( collection: Dictionary<T>, whereValue: W): T; /** * @see _.find * @param _.where style callback **/ findLast<T>( collection: Array<T>, pluckValue: string): T; /** * @see _.find * @param _.where style callback **/ findLast<T>( collection: List<T>, pluckValue: string): T; /** * @see _.find * @param _.where style callback **/ findLast<T>( collection: Dictionary<T>, pluckValue: string): T; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.findLast */ findLast( callback: ListIterator<T, boolean>, thisArg?: any): T; /** * @see _.findLast * @param _.where style callback */ findLast<W>( whereValue: W): T; /** * @see _.findLast * @param _.where style callback */ findLast( pluckValue: string): T; } //_.forEach interface LoDashStatic { /** * Iterates over elements of collection invoking iteratee for each element. The iteratee is bound to thisArg * and invoked with three arguments: * (value, index|key, collection). Iteratee functions may exit iteration early by explicitly returning false. * * Note: As with other "Collections" methods, objects with a "length" property are iterated like arrays. To * avoid this behavior _.forIn or _.forOwn may be used for object iteration. * * @alias _.each * * @param collection The collection to iterate over. * @param iteratee The function invoked per iteration. * @param thisArg The this binding of iteratee. */ forEach<T>( collection: T[], iteratee?: ListIterator<T, any>, thisArg?: any ): T[]; /** * @see _.forEach */ forEach<T>( collection: List<T>, iteratee?: ListIterator<T, any>, thisArg?: any ): List<T>; /** * @see _.forEach */ forEach<T>( collection: Dictionary<T>, iteratee?: DictionaryIterator<T, any>, thisArg?: any ): Dictionary<T>; /** * @see _.forEach */ forEach<T extends {}>( collection: T, iteratee?: ObjectIterator<any, any>, thisArgs?: any ): T; /** * @see _.forEach */ forEach<T extends {}, TValue>( collection: T, iteratee?: ObjectIterator<TValue, any>, thisArgs?: any ): T; } interface LoDashImplicitWrapper<T> { /** * @see _.forEach */ forEach( iteratee: ListIterator<string, any>, thisArg?: any ): LoDashImplicitWrapper<string>; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.forEach */ forEach( iteratee: ListIterator<T, any>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.forEach */ forEach<TValue>( iteratee?: ListIterator<TValue, any>|DictionaryIterator<TValue, any>, thisArg?: any ): LoDashImplicitObjectWrapper<T>; } interface LoDashExplicitWrapper<T> { /** * @see _.forEach */ forEach( iteratee: ListIterator<string, any>, thisArg?: any ): LoDashExplicitWrapper<string>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.forEach */ forEach( iteratee: ListIterator<T, any>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.forEach */ forEach<TValue>( iteratee?: ListIterator<TValue, any>|DictionaryIterator<TValue, any>, thisArg?: any ): LoDashExplicitObjectWrapper<T>; } //_.forEachRight interface LoDashStatic { /** * This method is like _.forEach except that it iterates over elements of collection from right to left. * * @alias _.eachRight * * @param collection The collection to iterate over. * @param iteratee The function called per iteration. * @param thisArg The this binding of callback. */ forEachRight<T>( collection: T[], iteratee?: ListIterator<T, any>, thisArg?: any ): T[]; /** * @see _.forEachRight */ forEachRight<T>( collection: List<T>, iteratee?: ListIterator<T, any>, thisArg?: any ): List<T>; /** * @see _.forEachRight */ forEachRight<T>( collection: Dictionary<T>, iteratee?: DictionaryIterator<T, any>, thisArg?: any ): Dictionary<T>; /** * @see _.forEachRight */ forEachRight<T extends {}>( collection: T, iteratee?: ObjectIterator<any, any>, thisArgs?: any ): T; /** * @see _.forEachRight */ forEachRight<T extends {}, TValue>( collection: T, iteratee?: ObjectIterator<TValue, any>, thisArgs?: any ): T; } interface LoDashImplicitWrapper<T> { /** * @see _.forEachRight */ forEachRight( iteratee: ListIterator<string, any>, thisArg?: any ): LoDashImplicitWrapper<string>; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.forEachRight */ forEachRight( iteratee: ListIterator<T, any>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.forEachRight */ forEachRight<TValue>( iteratee?: ListIterator<TValue, any>|DictionaryIterator<TValue, any>, thisArg?: any ): LoDashImplicitObjectWrapper<T>; } interface LoDashExplicitWrapper<T> { /** * @see _.forEachRight */ forEachRight( iteratee: ListIterator<string, any>, thisArg?: any ): LoDashExplicitWrapper<string>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.forEachRight */ forEachRight( iteratee: ListIterator<T, any>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.forEachRight */ forEachRight<TValue>( iteratee?: ListIterator<TValue, any>|DictionaryIterator<TValue, any>, thisArg?: any ): LoDashExplicitObjectWrapper<T>; } //_.groupBy interface LoDashStatic { /** * Creates an object composed of keys generated from the results of running each element of collection through * iteratee. The corresponding value of each key is an array of the elements responsible for generating the * key. The iteratee is bound to thisArg and invoked with three arguments: * (value, index|key, collection). * * If a property name is provided for iteratee the created _.property style callback returns the property * value of the given element. * * If a value is also provided for thisArg the created _.matchesProperty style callback returns true for * elements that have a matching property value, else false. * * If an object is provided for iteratee the created _.matches style callback returns true for elements that * have the properties of the given object, else false. * * @param collection The collection to iterate over. * @param iteratee The function invoked per iteration. * @param thisArg The this binding of iteratee. * @return Returns the composed aggregate object. */ groupBy<T, TKey>( collection: List<T>, iteratee?: ListIterator<T, TKey>, thisArg?: any ): Dictionary<T[]>; /** * @see _.groupBy */ groupBy<T>( collection: List<any>, iteratee?: ListIterator<T, any>, thisArg?: any ): Dictionary<T[]>; /** * @see _.groupBy */ groupBy<T, TKey>( collection: Dictionary<T>, iteratee?: DictionaryIterator<T, TKey>, thisArg?: any ): Dictionary<T[]>; /** * @see _.groupBy */ groupBy<T>( collection: Dictionary<any>, iteratee?: DictionaryIterator<T, any>, thisArg?: any ): Dictionary<T[]>; /** * @see _.groupBy */ groupBy<T, TValue>( collection: List<T>|Dictionary<T>, iteratee?: string, thisArg?: TValue ): Dictionary<T[]>; /** * @see _.groupBy */ groupBy<T>( collection: List<T>|Dictionary<T>, iteratee?: string, thisArg?: any ): Dictionary<T[]>; /** * @see _.groupBy */ groupBy<TWhere, T>( collection: List<T>|Dictionary<T>, iteratee?: TWhere ): Dictionary<T[]>; /** * @see _.groupBy */ groupBy<T>( collection: List<T>|Dictionary<T>, iteratee?: Object ): Dictionary<T[]>; } interface LoDashImplicitWrapper<T> { /** * @see _.groupBy */ groupBy<TKey>( iteratee?: ListIterator<T, TKey>, thisArg?: any ): LoDashImplicitObjectWrapper<Dictionary<T[]>>; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.groupBy */ groupBy<TKey>( iteratee?: ListIterator<T, TKey>, thisArg?: any ): LoDashImplicitObjectWrapper<Dictionary<T[]>>; /** * @see _.groupBy */ groupBy<TValue>( iteratee?: string, thisArg?: TValue ): LoDashImplicitObjectWrapper<Dictionary<T[]>>; /** * @see _.groupBy */ groupBy<TWhere>( iteratee?: TWhere ): LoDashImplicitObjectWrapper<Dictionary<T[]>>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.groupBy */ groupBy<T, TKey>( iteratee?: ListIterator<T, TKey>|DictionaryIterator<T, TKey>, thisArg?: any ): LoDashImplicitObjectWrapper<Dictionary<T[]>>; /** * @see _.groupBy */ groupBy<T>( iteratee?: ListIterator<T, any>|DictionaryIterator<T, any>, thisArg?: any ): LoDashImplicitObjectWrapper<Dictionary<T[]>>; /** * @see _.groupBy */ groupBy<T, TValue>( iteratee?: string, thisArg?: TValue ): LoDashImplicitObjectWrapper<Dictionary<T[]>>; /** * @see _.groupBy */ groupBy<T>( iteratee?: string, thisArg?: any ): LoDashImplicitObjectWrapper<Dictionary<T[]>>; /** * @see _.groupBy */ groupBy<TWhere, T>( iteratee?: TWhere ): LoDashImplicitObjectWrapper<Dictionary<T[]>>; /** * @see _.groupBy */ groupBy<T>( iteratee?: Object ): LoDashImplicitObjectWrapper<Dictionary<T[]>>; } interface LoDashExplicitWrapper<T> { /** * @see _.groupBy */ groupBy<TKey>( iteratee?: ListIterator<T, TKey>, thisArg?: any ): LoDashExplicitObjectWrapper<Dictionary<T[]>>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.groupBy */ groupBy<TKey>( iteratee?: ListIterator<T, TKey>, thisArg?: any ): LoDashExplicitObjectWrapper<Dictionary<T[]>>; /** * @see _.groupBy */ groupBy<TValue>( iteratee?: string, thisArg?: TValue ): LoDashExplicitObjectWrapper<Dictionary<T[]>>; /** * @see _.groupBy */ groupBy<TWhere>( iteratee?: TWhere ): LoDashExplicitObjectWrapper<Dictionary<T[]>>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.groupBy */ groupBy<T, TKey>( iteratee?: ListIterator<T, TKey>|DictionaryIterator<T, TKey>, thisArg?: any ): LoDashExplicitObjectWrapper<Dictionary<T[]>>; /** * @see _.groupBy */ groupBy<T>( iteratee?: ListIterator<T, any>|DictionaryIterator<T, any>, thisArg?: any ): LoDashExplicitObjectWrapper<Dictionary<T[]>>; /** * @see _.groupBy */ groupBy<T, TValue>( iteratee?: string, thisArg?: TValue ): LoDashExplicitObjectWrapper<Dictionary<T[]>>; /** * @see _.groupBy */ groupBy<T>( iteratee?: string, thisArg?: any ): LoDashExplicitObjectWrapper<Dictionary<T[]>>; /** * @see _.groupBy */ groupBy<TWhere, T>( iteratee?: TWhere ): LoDashExplicitObjectWrapper<Dictionary<T[]>>; /** * @see _.groupBy */ groupBy<T>( iteratee?: Object ): LoDashExplicitObjectWrapper<Dictionary<T[]>>; } //_.include interface LoDashStatic { /** * @see _.includes */ include<T>( collection: List<T>|Dictionary<T>, target: T, fromIndex?: number ): boolean; /** * @see _.includes */ include( collection: string, target: string, fromIndex?: number ): boolean; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.includes */ include( target: T, fromIndex?: number ): boolean; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.includes */ include<TValue>( target: TValue, fromIndex?: number ): boolean; } interface LoDashImplicitWrapper<T> { /** * @see _.includes */ include( target: string, fromIndex?: number ): boolean; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.includes */ include( target: T, fromIndex?: number ): LoDashExplicitWrapper<boolean>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.includes */ include<TValue>( target: TValue, fromIndex?: number ): LoDashExplicitWrapper<boolean>; } interface LoDashExplicitWrapper<T> { /** * @see _.includes */ include( target: string, fromIndex?: number ): LoDashExplicitWrapper<boolean>; } //_.includes interface LoDashStatic { /** * Checks if target is in collection using SameValueZero for equality comparisons. If fromIndex is negative, * it’s used as the offset from the end of collection. * * @alias _.contains, _.include * * @param collection The collection to search. * @param target The value to search for. * @param fromIndex The index to search from. * @return True if the target element is found, else false. */ includes<T>( collection: List<T>|Dictionary<T>, target: T, fromIndex?: number ): boolean; /** * @see _.includes */ includes( collection: string, target: string, fromIndex?: number ): boolean; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.includes */ includes( target: T, fromIndex?: number ): boolean; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.includes */ includes<TValue>( target: TValue, fromIndex?: number ): boolean; } interface LoDashImplicitWrapper<T> { /** * @see _.includes */ includes( target: string, fromIndex?: number ): boolean; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.includes */ includes( target: T, fromIndex?: number ): LoDashExplicitWrapper<boolean>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.includes */ includes<TValue>( target: TValue, fromIndex?: number ): LoDashExplicitWrapper<boolean>; } interface LoDashExplicitWrapper<T> { /** * @see _.includes */ includes( target: string, fromIndex?: number ): LoDashExplicitWrapper<boolean>; } //_.indexBy interface LoDashStatic { /** * Creates an object composed of keys generated from the results of running each element of collection through * iteratee. The corresponding value of each key is the last element responsible for generating the key. The * iteratee function is bound to thisArg and invoked with three arguments: * (value, index|key, collection). * * If a property name is provided for iteratee the created _.property style callback returns the property * value of the given element. * * If a value is also provided for thisArg the created _.matchesProperty style callback returns true for * elements that have a matching property value, else false. * * If an object is provided for iteratee the created _.matches style callback returns true for elements that * have the properties of the given object, else false. * * @param collection The collection to iterate over. * @param iteratee The function invoked per iteration. * @param thisArg The this binding of iteratee. * @return Returns the composed aggregate object. */ indexBy<T>( collection: List<T>, iteratee?: ListIterator<T, any>, thisArg?: any ): Dictionary<T>; /** * @see _.indexBy */ indexBy<T>( collection: NumericDictionary<T>, iteratee?: NumericDictionaryIterator<T, any>, thisArg?: any ): Dictionary<T>; /** * @see _.indexBy */ indexBy<T>( collection: Dictionary<T>, iteratee?: DictionaryIterator<T, any>, thisArg?: any ): Dictionary<T>; /** * @see _.indexBy */ indexBy<T>( collection: List<T>|NumericDictionary<T>|Dictionary<T>, iteratee?: string, thisArg?: any ): Dictionary<T>; /** * @see _.indexBy */ indexBy<W extends Object, T>( collection: List<T>|NumericDictionary<T>|Dictionary<T>, iteratee?: W ): Dictionary<T>; /** * @see _.indexBy */ indexBy<T>( collection: List<T>|NumericDictionary<T>|Dictionary<T>, iteratee?: Object ): Dictionary<T>; } interface LoDashImplicitWrapper<T> { /** * @see _.indexBy */ indexBy( iteratee?: ListIterator<T, any>, thisArg?: any ): LoDashImplicitObjectWrapper<Dictionary<T>>; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.indexBy */ indexBy( iteratee?: ListIterator<T, any>, thisArg?: any ): LoDashImplicitObjectWrapper<Dictionary<T>>; /** * @see _.indexBy */ indexBy( iteratee?: string, thisArg?: any ): LoDashImplicitObjectWrapper<Dictionary<T>>; /** * @see _.indexBy */ indexBy<W extends Object>( iteratee?: W ): LoDashImplicitObjectWrapper<Dictionary<T>>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.indexBy */ indexBy<T>( iteratee?: ListIterator<T, any>|NumericDictionaryIterator<T, any>|DictionaryIterator<T, any>, thisArg?: any ): LoDashImplicitObjectWrapper<Dictionary<T>>; /** * @see _.indexBy */ indexBy<T>( iteratee?: string, thisArg?: any ): LoDashImplicitObjectWrapper<Dictionary<T>>; /** * @see _.indexBy */ indexBy<W extends Object, T>( iteratee?: W ): LoDashImplicitObjectWrapper<Dictionary<T>>; /** * @see _.indexBy */ indexBy<T>( iteratee?: Object ): LoDashImplicitObjectWrapper<Dictionary<T>>; } interface LoDashExplicitWrapper<T> { /** * @see _.indexBy */ indexBy( iteratee?: ListIterator<T, any>, thisArg?: any ): LoDashExplicitObjectWrapper<Dictionary<T>>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.indexBy */ indexBy( iteratee?: ListIterator<T, any>, thisArg?: any ): LoDashExplicitObjectWrapper<Dictionary<T>>; /** * @see _.indexBy */ indexBy( iteratee?: string, thisArg?: any ): LoDashExplicitObjectWrapper<Dictionary<T>>; /** * @see _.indexBy */ indexBy<W extends Object>( iteratee?: W ): LoDashExplicitObjectWrapper<Dictionary<T>>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.indexBy */ indexBy<T>( iteratee?: ListIterator<T, any>|NumericDictionaryIterator<T, any>|DictionaryIterator<T, any>, thisArg?: any ): LoDashExplicitObjectWrapper<Dictionary<T>>; /** * @see _.indexBy */ indexBy<T>( iteratee?: string, thisArg?: any ): LoDashExplicitObjectWrapper<Dictionary<T>>; /** * @see _.indexBy */ indexBy<W extends Object, T>( iteratee?: W ): LoDashExplicitObjectWrapper<Dictionary<T>>; /** * @see _.indexBy */ indexBy<T>( iteratee?: Object ): LoDashExplicitObjectWrapper<Dictionary<T>>; } //_.invoke interface LoDashStatic { /** * Invokes the method named by methodName on each element in the collection returning * an array of the results of each invoked method. Additional arguments will be provided * to each invoked method. If methodName is a function it will be invoked for, and this * bound to, each element in the collection. * @param collection The collection to iterate over. * @param methodName The name of the method to invoke. * @param args Arguments to invoke the method with. **/ invoke<T extends {}>( collection: Array<T>, methodName: string, ...args: any[]): any; /** * @see _.invoke **/ invoke<T extends {}>( collection: List<T>, methodName: string, ...args: any[]): any; /** * @see _.invoke **/ invoke<T extends {}>( collection: Dictionary<T>, methodName: string, ...args: any[]): any; /** * @see _.invoke **/ invoke<T extends {}>( collection: Array<T>, method: Function, ...args: any[]): any; /** * @see _.invoke **/ invoke<T extends {}>( collection: List<T>, method: Function, ...args: any[]): any; /** * @see _.invoke **/ invoke<T extends {}>( collection: Dictionary<T>, method: Function, ...args: any[]): any; } //_.map interface LoDashStatic { /** * Creates an array of values by running each element in collection through iteratee. The iteratee is bound to * thisArg and invoked with three arguments: (value, index|key, collection). * * If a property name is provided for iteratee the created _.property style callback returns the property value * of the given element. * * If a value is also provided for thisArg the created _.matchesProperty style callback returns true for * elements that have a matching property value, else false. * * If an object is provided for iteratee the created _.matches style callback returns true for elements that * have the properties of the given object, else false. * * Many lodash methods are guarded to work as iteratees for methods like _.every, _.filter, _.map, _.mapValues, * _.reject, and _.some. * * The guarded methods are: * ary, callback, chunk, clone, create, curry, curryRight, drop, dropRight, every, fill, flatten, invert, max, * min, parseInt, slice, sortBy, take, takeRight, template, trim, trimLeft, trimRight, trunc, random, range, * sample, some, sum, uniq, and words * * @alias _.collect * * @param collection The collection to iterate over. * @param iteratee The function invoked per iteration. * @param thisArg The this binding of iteratee. * @return Returns the new mapped array. */ map<T, TResult>( collection: List<T>, iteratee?: ListIterator<T, TResult>, thisArg?: any ): TResult[]; /** * @see _.map */ map<T extends {}, TResult>( collection: Dictionary<T>, iteratee?: DictionaryIterator<T, TResult>, thisArg?: any ): TResult[]; /** * @see _.map */ map<T, TResult>( collection: List<T>|Dictionary<T>, iteratee?: string ): TResult[]; /** * @see _.map */ map<T, TObject extends {}>( collection: List<T>|Dictionary<T>, iteratee?: TObject ): boolean[]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.map */ map<TResult>( iteratee?: ListIterator<T, TResult>, thisArg?: any ): LoDashImplicitArrayWrapper<TResult>; /** * @see _.map */ map<TResult>( iteratee?: string ): LoDashImplicitArrayWrapper<TResult>; /** * @see _.map */ map<TObject extends {}>( iteratee?: TObject ): LoDashImplicitArrayWrapper<boolean>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.map */ map<TValue, TResult>( iteratee?: ListIterator<TValue, TResult>|DictionaryIterator<TValue, TResult>, thisArg?: any ): LoDashImplicitArrayWrapper<TResult>; /** * @see _.map */ map<TValue, TResult>( iteratee?: string ): LoDashImplicitArrayWrapper<TResult>; /** * @see _.map */ map<TObject extends {}>( iteratee?: TObject ): LoDashImplicitArrayWrapper<boolean>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.map */ map<TResult>( iteratee?: ListIterator<T, TResult>, thisArg?: any ): LoDashExplicitArrayWrapper<TResult>; /** * @see _.map */ map<TResult>( iteratee?: string ): LoDashExplicitArrayWrapper<TResult>; /** * @see _.map */ map<TObject extends {}>( iteratee?: TObject ): LoDashExplicitArrayWrapper<boolean>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.map */ map<TValue, TResult>( iteratee?: ListIterator<TValue, TResult>|DictionaryIterator<TValue, TResult>, thisArg?: any ): LoDashExplicitArrayWrapper<TResult>; /** * @see _.map */ map<TValue, TResult>( iteratee?: string ): LoDashExplicitArrayWrapper<TResult>; /** * @see _.map */ map<TObject extends {}>( iteratee?: TObject ): LoDashExplicitArrayWrapper<boolean>; } //_.partition interface LoDashStatic { /** * Creates an array of elements split into two groups, the first of which contains elements predicate returns truthy for, * while the second of which contains elements predicate returns falsey for. * The predicate is bound to thisArg and invoked with three arguments: (value, index|key, collection). * * If a property name is provided for predicate the created _.property style callback * returns the property value of the given element. * * If a value is also provided for thisArg the created _.matchesProperty style callback * returns true for elements that have a matching property value, else false. * * If an object is provided for predicate the created _.matches style callback returns * true for elements that have the properties of the given object, else false. * * @param collection The collection to iterate over. * @param callback The function called per iteration. * @param thisArg The this binding of predicate. * @return Returns the array of grouped elements. **/ partition<T>( collection: List<T>, callback: ListIterator<T, boolean>, thisArg?: any): T[][]; /** * @see _.partition **/ partition<T>( collection: Dictionary<T>, callback: DictionaryIterator<T, boolean>, thisArg?: any): T[][]; /** * @see _.partition **/ partition<W, T>( collection: List<T>, whereValue: W): T[][]; /** * @see _.partition **/ partition<W, T>( collection: Dictionary<T>, whereValue: W): T[][]; /** * @see _.partition **/ partition<T>( collection: List<T>, path: string, srcValue: any): T[][]; /** * @see _.partition **/ partition<T>( collection: Dictionary<T>, path: string, srcValue: any): T[][]; /** * @see _.partition **/ partition<T>( collection: List<T>, pluckValue: string): T[][]; /** * @see _.partition **/ partition<T>( collection: Dictionary<T>, pluckValue: string): T[][]; } interface LoDashImplicitStringWrapper { /** * @see _.partition */ partition( callback: ListIterator<string, boolean>, thisArg?: any): LoDashImplicitArrayWrapper<string[]>; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.partition */ partition( callback: ListIterator<T, boolean>, thisArg?: any): LoDashImplicitArrayWrapper<T[]>; /** * @see _.partition */ partition<W>( whereValue: W): LoDashImplicitArrayWrapper<T[]>; /** * @see _.partition */ partition( path: string, srcValue: any): LoDashImplicitArrayWrapper<T[]>; /** * @see _.partition */ partition( pluckValue: string): LoDashImplicitArrayWrapper<T[]>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.partition */ partition<TResult>( callback: ListIterator<TResult, boolean>, thisArg?: any): LoDashImplicitArrayWrapper<TResult[]>; /** * @see _.partition */ partition<TResult>( callback: DictionaryIterator<TResult, boolean>, thisArg?: any): LoDashImplicitArrayWrapper<TResult[]>; /** * @see _.partition */ partition<W, TResult>( whereValue: W): LoDashImplicitArrayWrapper<TResult[]>; /** * @see _.partition */ partition<TResult>( path: string, srcValue: any): LoDashImplicitArrayWrapper<TResult[]>; /** * @see _.partition */ partition<TResult>( pluckValue: string): LoDashImplicitArrayWrapper<TResult[]>; } //_.pluck interface LoDashStatic { /** * Gets the property value of path from all elements in collection. * * @param collection The collection to iterate over. * @param path The path of the property to pluck. * @return A new array of property values. */ pluck<T extends {}>( collection: List<T>|Dictionary<T>, path: StringRepresentable|StringRepresentable[] ): any[]; /** * @see _.pluck */ pluck<T extends {}, TResult>( collection: List<T>|Dictionary<T>, path: StringRepresentable|StringRepresentable[] ): TResult[]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.pluck */ pluck<TResult>(path: StringRepresentable|StringRepresentable[]): LoDashImplicitArrayWrapper<TResult>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.pluck */ pluck<TResult>(path: StringRepresentable|StringRepresentable[]): LoDashImplicitArrayWrapper<TResult>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.pluck */ pluck<TResult>(path: StringRepresentable|StringRepresentable[]): LoDashExplicitArrayWrapper<TResult>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.pluck */ pluck<TResult>(path: StringRepresentable|StringRepresentable[]): LoDashExplicitArrayWrapper<TResult>; } //_.reduce interface LoDashStatic { /** * Reduces a collection to a value which is the accumulated result of running each * element in the collection through the callback, where each successive callback execution * consumes the return value of the previous execution. If accumulator is not provided the * first element of the collection will be used as the initial accumulator value. The callback * is bound to thisArg and invoked with four arguments; (accumulator, value, index|key, collection). * @param collection The collection to iterate over. * @param callback The function called per iteration. * @param accumulator Initial value of the accumulator. * @param thisArg The this binding of callback. * @return Returns the accumulated value. **/ reduce<T, TResult>( collection: Array<T>, callback: MemoIterator<T, TResult>, accumulator: TResult, thisArg?: any): TResult; /** * @see _.reduce **/ reduce<T, TResult>( collection: List<T>, callback: MemoIterator<T, TResult>, accumulator: TResult, thisArg?: any): TResult; /** * @see _.reduce **/ reduce<T, TResult>( collection: Dictionary<T>, callback: MemoIterator<T, TResult>, accumulator: TResult, thisArg?: any): TResult; /** * @see _.reduce **/ reduce<T, TResult>( collection: Array<T>, callback: MemoIterator<T, TResult>, thisArg?: any): TResult; /** * @see _.reduce **/ reduce<T, TResult>( collection: List<T>, callback: MemoIterator<T, TResult>, thisArg?: any): TResult; /** * @see _.reduce **/ reduce<T, TResult>( collection: Dictionary<T>, callback: MemoIterator<T, TResult>, thisArg?: any): TResult; /** * @see _.reduce **/ inject<T, TResult>( collection: Array<T>, callback: MemoIterator<T, TResult>, accumulator: TResult, thisArg?: any): TResult; /** * @see _.reduce **/ inject<T, TResult>( collection: List<T>, callback: MemoIterator<T, TResult>, accumulator: TResult, thisArg?: any): TResult; /** * @see _.reduce **/ inject<T, TResult>( collection: Dictionary<T>, callback: MemoIterator<T, TResult>, accumulator: TResult, thisArg?: any): TResult; /** * @see _.reduce **/ inject<T, TResult>( collection: Array<T>, callback: MemoIterator<T, TResult>, thisArg?: any): TResult; /** * @see _.reduce **/ inject<T, TResult>( collection: List<T>, callback: MemoIterator<T, TResult>, thisArg?: any): TResult; /** * @see _.reduce **/ inject<T, TResult>( collection: Dictionary<T>, callback: MemoIterator<T, TResult>, thisArg?: any): TResult; /** * @see _.reduce **/ foldl<T, TResult>( collection: Array<T>, callback: MemoIterator<T, TResult>, accumulator: TResult, thisArg?: any): TResult; /** * @see _.reduce **/ foldl<T, TResult>( collection: List<T>, callback: MemoIterator<T, TResult>, accumulator: TResult, thisArg?: any): TResult; /** * @see _.reduce **/ foldl<T, TResult>( collection: Dictionary<T>, callback: MemoIterator<T, TResult>, accumulator: TResult, thisArg?: any): TResult; /** * @see _.reduce **/ foldl<T, TResult>( collection: Array<T>, callback: MemoIterator<T, TResult>, thisArg?: any): TResult; /** * @see _.reduce **/ foldl<T, TResult>( collection: List<T>, callback: MemoIterator<T, TResult>, thisArg?: any): TResult; /** * @see _.reduce **/ foldl<T, TResult>( collection: Dictionary<T>, callback: MemoIterator<T, TResult>, thisArg?: any): TResult; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.reduce **/ reduce<TResult>( callback: MemoIterator<T, TResult>, accumulator: TResult, thisArg?: any): TResult; /** * @see _.reduce **/ reduce<TResult>( callback: MemoIterator<T, TResult>, thisArg?: any): TResult; /** * @see _.reduce **/ inject<TResult>( callback: MemoIterator<T, TResult>, accumulator: TResult, thisArg?: any): TResult; /** * @see _.reduce **/ inject<TResult>( callback: MemoIterator<T, TResult>, thisArg?: any): TResult; /** * @see _.reduce **/ foldl<TResult>( callback: MemoIterator<T, TResult>, accumulator: TResult, thisArg?: any): TResult; /** * @see _.reduce **/ foldl<TResult>( callback: MemoIterator<T, TResult>, thisArg?: any): TResult; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.reduce **/ reduce<TValue, TResult>( callback: MemoIterator<TValue, TResult>, accumulator: TResult, thisArg?: any): TResult; /** * @see _.reduce **/ reduce<TValue, TResult>( callback: MemoIterator<TValue, TResult>, thisArg?: any): TResult; /** * @see _.reduce **/ inject<TValue, TResult>( callback: MemoIterator<TValue, TResult>, accumulator: TResult, thisArg?: any): TResult; /** * @see _.reduce **/ inject<TValue, TResult>( callback: MemoIterator<TValue, TResult>, thisArg?: any): TResult; /** * @see _.reduce **/ foldl<TValue, TResult>( callback: MemoIterator<TValue, TResult>, accumulator: TResult, thisArg?: any): TResult; /** * @see _.reduce **/ foldl<TValue, TResult>( callback: MemoIterator<TValue, TResult>, thisArg?: any): TResult; } //_.reduceRight interface LoDashStatic { /** * This method is like _.reduce except that it iterates over elements of a collection from * right to left. * @param collection The collection to iterate over. * @param callback The function called per iteration. * @param accumulator Initial value of the accumulator. * @param thisArg The this binding of callback. * @return The accumulated value. **/ reduceRight<T, TResult>( collection: Array<T>, callback: MemoIterator<T, TResult>, accumulator: TResult, thisArg?: any): TResult; /** * @see _.reduceRight **/ reduceRight<T, TResult>( collection: List<T>, callback: MemoIterator<T, TResult>, accumulator: TResult, thisArg?: any): TResult; /** * @see _.reduceRight **/ reduceRight<T, TResult>( collection: Dictionary<T>, callback: MemoIterator<T, TResult>, accumulator: TResult, thisArg?: any): TResult; /** * @see _.reduceRight **/ reduceRight<T, TResult>( collection: Array<T>, callback: MemoIterator<T, TResult>, thisArg?: any): TResult; /** * @see _.reduceRight **/ reduceRight<T, TResult>( collection: List<T>, callback: MemoIterator<T, TResult>, thisArg?: any): TResult; /** * @see _.reduceRight **/ reduceRight<T, TResult>( collection: Dictionary<T>, callback: MemoIterator<T, TResult>, thisArg?: any): TResult; /** * @see _.reduceRight **/ foldr<T, TResult>( collection: Array<T>, callback: MemoIterator<T, TResult>, accumulator: TResult, thisArg?: any): TResult; /** * @see _.reduceRight **/ foldr<T, TResult>( collection: List<T>, callback: MemoIterator<T, TResult>, accumulator: TResult, thisArg?: any): TResult; /** * @see _.reduceRight **/ foldr<T, TResult>( collection: Dictionary<T>, callback: MemoIterator<T, TResult>, accumulator: TResult, thisArg?: any): TResult; /** * @see _.reduceRight **/ foldr<T, TResult>( collection: Array<T>, callback: MemoIterator<T, TResult>, thisArg?: any): TResult; /** * @see _.reduceRight **/ foldr<T, TResult>( collection: List<T>, callback: MemoIterator<T, TResult>, thisArg?: any): TResult; /** * @see _.reduceRight **/ foldr<T, TResult>( collection: Dictionary<T>, callback: MemoIterator<T, TResult>, thisArg?: any): TResult; } //_.reject interface LoDashStatic { /** * The opposite of _.filter; this method returns the elements of collection that predicate does not return * truthy for. * * @param collection The collection to iterate over. * @param predicate The function invoked per iteration. * @param thisArg The this binding of predicate. * @return Returns the new filtered array. */ reject<T>( collection: List<T>, predicate?: ListIterator<T, boolean>, thisArg?: any ): T[]; /** * @see _.reject */ reject<T>( collection: Dictionary<T>, predicate?: DictionaryIterator<T, boolean>, thisArg?: any ): T[]; /** * @see _.reject */ reject( collection: string, predicate?: StringIterator<boolean>, thisArg?: any ): string[]; /** * @see _.reject */ reject<T>( collection: List<T>|Dictionary<T>, predicate: string, thisArg?: any ): T[]; /** * @see _.reject */ reject<W extends {}, T>( collection: List<T>|Dictionary<T>, predicate: W ): T[]; } interface LoDashImplicitWrapper<T> { /** * @see _.reject */ reject( predicate?: StringIterator<boolean>, thisArg?: any ): LoDashImplicitArrayWrapper<string>; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.reject */ reject( predicate: ListIterator<T, boolean>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.reject */ reject( predicate: string, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.reject */ reject<W>(predicate: W): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.reject */ reject<T>( predicate: ListIterator<T, boolean>|DictionaryIterator<T, boolean>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.reject */ reject<T>( predicate: string, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.reject */ reject<W, T>(predicate: W): LoDashImplicitArrayWrapper<T>; } interface LoDashExplicitWrapper<T> { /** * @see _.reject */ reject( predicate?: StringIterator<boolean>, thisArg?: any ): LoDashExplicitArrayWrapper<string>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.reject */ reject( predicate: ListIterator<T, boolean>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.reject */ reject( predicate: string, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.reject */ reject<W>(predicate: W): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.reject */ reject<T>( predicate: ListIterator<T, boolean>|DictionaryIterator<T, boolean>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.reject */ reject<T>( predicate: string, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.reject */ reject<W, T>(predicate: W): LoDashExplicitArrayWrapper<T>; } //_.sample interface LoDashStatic { /** * Retrieves a random element or n random elements from a collection. * @param collection The collection to sample. * @return Returns the random sample(s) of collection. **/ sample<T>(collection: Array<T>): T; /** * @see _.sample **/ sample<T>(collection: List<T>): T; /** * @see _.sample **/ sample<T>(collection: Dictionary<T>): T; /** * @see _.sample * @param n The number of elements to sample. **/ sample<T>(collection: Array<T>, n: number): T[]; /** * @see _.sample * @param n The number of elements to sample. **/ sample<T>(collection: List<T>, n: number): T[]; /** * @see _.sample * @param n The number of elements to sample. **/ sample<T>(collection: Dictionary<T>, n: number): T[]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.sample **/ sample(n: number): LoDashImplicitArrayWrapper<T>; /** * @see _.sample **/ sample(): LoDashImplicitWrapper<T>; } //_.select interface LoDashStatic { /** * @see _.filter */ select<T>( collection: List<T>, predicate?: ListIterator<T, boolean>, thisArg?: any ): T[]; /** * @see _.filter */ select<T>( collection: Dictionary<T>, predicate?: DictionaryIterator<T, boolean>, thisArg?: any ): T[]; /** * @see _.filter */ select( collection: string, predicate?: StringIterator<boolean>, thisArg?: any ): string[]; /** * @see _.filter */ select<T>( collection: List<T>|Dictionary<T>, predicate: string, thisArg?: any ): T[]; /** * @see _.filter */ select<W extends {}, T>( collection: List<T>|Dictionary<T>, predicate: W ): T[]; } interface LoDashImplicitWrapper<T> { /** * @see _.filter */ select( predicate?: StringIterator<boolean>, thisArg?: any ): LoDashImplicitArrayWrapper<string>; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.filter */ select( predicate: ListIterator<T, boolean>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.filter */ select( predicate: string, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.filter */ select<W>(predicate: W): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.filter */ select<T>( predicate: ListIterator<T, boolean>|DictionaryIterator<T, boolean>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.filter */ select<T>( predicate: string, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.filter */ select<W, T>(predicate: W): LoDashImplicitArrayWrapper<T>; } interface LoDashExplicitWrapper<T> { /** * @see _.filter */ select( predicate?: StringIterator<boolean>, thisArg?: any ): LoDashExplicitArrayWrapper<string>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.filter */ select( predicate: ListIterator<T, boolean>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.filter */ select( predicate: string, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.filter */ select<W>(predicate: W): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.filter */ select<T>( predicate: ListIterator<T, boolean>|DictionaryIterator<T, boolean>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.filter */ select<T>( predicate: string, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.filter */ select<W, T>(predicate: W): LoDashExplicitArrayWrapper<T>; } //_.shuffle interface LoDashStatic { /** * Creates an array of shuffled values, using a version of the Fisher-Yates shuffle. * * @param collection The collection to shuffle. * @return Returns the new shuffled array. */ shuffle<T>(collection: List<T>|Dictionary<T>): T[]; /** * @see _.shuffle */ shuffle(collection: string): string[]; } interface LoDashImplicitWrapper<T> { /** * @see _.shuffle */ shuffle(): LoDashImplicitArrayWrapper<string>; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.shuffle */ shuffle(): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.shuffle */ shuffle<T>(): LoDashImplicitArrayWrapper<T>; } interface LoDashExplicitWrapper<T> { /** * @see _.shuffle */ shuffle(): LoDashExplicitArrayWrapper<string>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.shuffle */ shuffle(): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.shuffle */ shuffle<T>(): LoDashExplicitArrayWrapper<T>; } //_.size interface LoDashStatic { /** * Gets the size of collection by returning its length for array-like values or the number of own enumerable * properties for objects. * * @param collection The collection to inspect. * @return Returns the size of collection. */ size<T>(collection: List<T>|Dictionary<T>): number; /** * @see _.size */ size(collection: string): number; } interface LoDashImplicitWrapper<T> { /** * @see _.size */ size(): number; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.size */ size(): number; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.size */ size(): number; } interface LoDashExplicitWrapper<T> { /** * @see _.size */ size(): LoDashExplicitWrapper<number>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.size */ size(): LoDashExplicitWrapper<number>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.size */ size(): LoDashExplicitWrapper<number>; } //_.some interface LoDashStatic { /** * Checks if predicate returns truthy for any element of collection. The function returns as soon as it finds * a passing value and does not iterate over the entire collection. The predicate is bound to thisArg and * invoked with three arguments: (value, index|key, collection). * * If a property name is provided for predicate the created _.property style callback returns the property * value of the given element. * * If a value is also provided for thisArg the created _.matchesProperty style callback returns true for * elements that have a matching property value, else false. * * If an object is provided for predicate the created _.matches style callback returns true for elements that * have the properties of the given object, else false. * * @alias _.any * * @param collection The collection to iterate over. * @param predicate The function invoked per iteration. * @param thisArg The this binding of predicate. * @return Returns true if any element passes the predicate check, else false. */ some<T>( collection: List<T>, predicate?: ListIterator<T, boolean>, thisArg?: any ): boolean; /** * @see _.some */ some<T>( collection: Dictionary<T>, predicate?: DictionaryIterator<T, boolean>, thisArg?: any ): boolean; /** * @see _.some */ some<T>( collection: NumericDictionary<T>, predicate?: NumericDictionaryIterator<T, boolean>, thisArg?: any ): boolean; /** * @see _.some */ some<T>( collection: List<T>|Dictionary<T>|NumericDictionary<T>, predicate?: string, thisArg?: any ): boolean; /** * @see _.some */ some<TObject extends {}, T>( collection: List<T>|Dictionary<T>|NumericDictionary<T>, predicate?: TObject ): boolean; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.some */ some( predicate?: ListIterator<T, boolean>|NumericDictionaryIterator<T, boolean>, thisArg?: any ): boolean; /** * @see _.some */ some( predicate?: string, thisArg?: any ): boolean; /** * @see _.some */ some<TObject extends {}>( predicate?: TObject ): boolean; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.some */ some<TResult>( predicate?: ListIterator<TResult, boolean>|DictionaryIterator<TResult, boolean>|NumericDictionaryIterator<T, boolean>, thisArg?: any ): boolean; /** * @see _.some */ some( predicate?: string, thisArg?: any ): boolean; /** * @see _.some */ some<TObject extends {}>( predicate?: TObject ): boolean; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.some */ some( predicate?: ListIterator<T, boolean>|NumericDictionaryIterator<T, boolean>, thisArg?: any ): LoDashExplicitWrapper<boolean>; /** * @see _.some */ some( predicate?: string, thisArg?: any ): LoDashExplicitWrapper<boolean>; /** * @see _.some */ some<TObject extends {}>( predicate?: TObject ): LoDashExplicitWrapper<boolean>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.some */ some<TResult>( predicate?: ListIterator<TResult, boolean>|DictionaryIterator<TResult, boolean>|NumericDictionaryIterator<T, boolean>, thisArg?: any ): LoDashExplicitWrapper<boolean>; /** * @see _.some */ some( predicate?: string, thisArg?: any ): LoDashExplicitWrapper<boolean>; /** * @see _.some */ some<TObject extends {}>( predicate?: TObject ): LoDashExplicitWrapper<boolean>; } //_.sortBy interface LoDashStatic { /** * Creates an array of elements, sorted in ascending order by the results of running each element in a * collection through iteratee. This method performs a stable sort, that is, it preserves the original sort * order of equal elements. The iteratee is bound to thisArg and invoked with three arguments: * (value, index|key, collection). * * If a property name is provided for iteratee the created _.property style callback returns the property * valueof the given element. * * If a value is also provided for thisArg the created _.matchesProperty style callback returns true for * elements that have a matching property value, else false. * * If an object is provided for iteratee the created _.matches style callback returns true for elements that * have the properties of the given object, else false. * * @param collection The collection to iterate over. * @param iteratee The function invoked per iteration. * @param thisArg The this binding of iteratee. * @return Returns the new sorted array. */ sortBy<T, TSort>( collection: List<T>, iteratee?: ListIterator<T, TSort>, thisArg?: any ): T[]; /** * @see _.sortBy */ sortBy<T, TSort>( collection: Dictionary<T>, iteratee?: DictionaryIterator<T, TSort>, thisArg?: any ): T[]; /** * @see _.sortBy */ sortBy<T>( collection: List<T>|Dictionary<T>, iteratee: string ): T[]; /** * @see _.sortBy */ sortBy<W extends {}, T>( collection: List<T>|Dictionary<T>, whereValue: W ): T[]; /** * @see _.sortBy */ sortBy<T>( collection: List<T>|Dictionary<T> ): T[]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.sortBy */ sortBy<TSort>( iteratee?: ListIterator<T, TSort>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.sortBy */ sortBy(iteratee: string): LoDashImplicitArrayWrapper<T>; /** * @see _.sortBy */ sortBy<W extends {}>(whereValue: W): LoDashImplicitArrayWrapper<T>; /** * @see _.sortBy */ sortBy(): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.sortBy */ sortBy<T, TSort>( iteratee?: ListIterator<T, TSort>|DictionaryIterator<T, TSort>, thisArg?: any ): LoDashImplicitArrayWrapper<T>; /** * @see _.sortBy */ sortBy<T>(iteratee: string): LoDashImplicitArrayWrapper<T>; /** * @see _.sortBy */ sortBy<W extends {}, T>(whereValue: W): LoDashImplicitArrayWrapper<T>; /** * @see _.sortBy */ sortBy<T>(): LoDashImplicitArrayWrapper<T>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.sortBy */ sortBy<TSort>( iteratee?: ListIterator<T, TSort>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.sortBy */ sortBy(iteratee: string): LoDashExplicitArrayWrapper<T>; /** * @see _.sortBy */ sortBy<W extends {}>(whereValue: W): LoDashExplicitArrayWrapper<T>; /** * @see _.sortBy */ sortBy(): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.sortBy */ sortBy<T, TSort>( iteratee?: ListIterator<T, TSort>|DictionaryIterator<T, TSort>, thisArg?: any ): LoDashExplicitArrayWrapper<T>; /** * @see _.sortBy */ sortBy<T>(iteratee: string): LoDashExplicitArrayWrapper<T>; /** * @see _.sortBy */ sortBy<W extends {}, T>(whereValue: W): LoDashExplicitArrayWrapper<T>; /** * @see _.sortBy */ sortBy<T>(): LoDashExplicitArrayWrapper<T>; } //_.sortByAll interface LoDashStatic { /** * This method is like "_.sortBy" except that it can sort by multiple iteratees or * property names. * * If a property name is provided for an iteratee the created "_.property" style callback * returns the property value of the given element. * * If a value is also provided for thisArg the created "_.matchesProperty" style callback * returns true for elements that have a matching property value, else false. * * If an object is provided for an iteratee the created "_.matches" style callback returns * true for elements that have the properties of the given object, else false. * * @param collection The collection to iterate over. * @param callback The function called per iteration. * @param thisArg The this binding of callback. * @return A new array of sorted elements. **/ sortByAll<T>( collection: Array<T>, iteratees: (ListIterator<T, any>|string|Object)[]): T[]; /** * @see _.sortByAll **/ sortByAll<T>( collection: List<T>, iteratees: (ListIterator<T, any>|string|Object)[]): T[]; /** * @see _.sortByAll **/ sortByAll<T>( collection: Array<T>, ...iteratees: (ListIterator<T, any>|string|Object)[]): T[]; /** * @see _.sortByAll **/ sortByAll<T>( collection: List<T>, ...iteratees: (ListIterator<T, any>|string|Object)[]): T[]; /** * Sorts by all the given arguments, using either ListIterator, pluckValue, or whereValue foramts * @param args The rules by which to sort */ sortByAll<T>( collection: (Array<T>|List<T>), ...args: (ListIterator<T, boolean>|Object|string)[] ): T[]; } interface LoDashImplicitArrayWrapper<T> { /** * Sorts by all the given arguments, using either ListIterator, pluckValue, or whereValue foramts * @param args The rules by which to sort */ sortByAll(...args: (ListIterator<T, boolean>|Object|string)[]): LoDashImplicitArrayWrapper<T>; /** * @see _.sortByAll **/ sortByAll( iteratees: (ListIterator<T, any>|string|Object)[]): LoDashImplicitArrayWrapper<T>; /** * @see _.sortByAll **/ sortByAll( ...iteratees: (ListIterator<T, any>|string|Object)[]): LoDashImplicitArrayWrapper<T>; } //_.sortByOrder interface LoDashStatic { /** * This method is like _.sortByAll except that it allows specifying the sort orders of the iteratees to sort * by. If orders is unspecified, all values are sorted in ascending order. Otherwise, a value is sorted in * ascending order if its corresponding order is "asc", and descending if "desc". * * If a property name is provided for an iteratee the created _.property style callback returns the property * value of the given element. * * If an object is provided for an iteratee the created _.matches style callback returns true for elements * that have the properties of the given object, else false. * * @param collection The collection to iterate over. * @param iteratees The iteratees to sort by. * @param orders The sort orders of iteratees. * @return Returns the new sorted array. */ sortByOrder<W extends Object, T>( collection: List<T>, iteratees: ListIterator<T, any>|string|W|(ListIterator<T, any>|string|W)[], orders?: boolean|string|(boolean|string)[] ): T[]; /** * @see _.sortByOrder */ sortByOrder<T>( collection: List<T>, iteratees: ListIterator<T, any>|string|Object|(ListIterator<T, any>|string|Object)[], orders?: boolean|string|(boolean|string)[] ): T[]; /** * @see _.sortByOrder */ sortByOrder<W extends Object, T>( collection: NumericDictionary<T>, iteratees: NumericDictionaryIterator<T, any>|string|W|(NumericDictionaryIterator<T, any>|string|W)[], orders?: boolean|string|(boolean|string)[] ): T[]; /** * @see _.sortByOrder */ sortByOrder<T>( collection: NumericDictionary<T>, iteratees: NumericDictionaryIterator<T, any>|string|Object|(NumericDictionaryIterator<T, any>|string|Object)[], orders?: boolean|string|(boolean|string)[] ): T[]; /** * @see _.sortByOrder */ sortByOrder<W extends Object, T>( collection: Dictionary<T>, iteratees: DictionaryIterator<T, any>|string|W|(DictionaryIterator<T, any>|string|W)[], orders?: boolean|string|(boolean|string)[] ): T[]; /** * @see _.sortByOrder */ sortByOrder<T>( collection: Dictionary<T>, iteratees: DictionaryIterator<T, any>|string|Object|(DictionaryIterator<T, any>|string|Object)[], orders?: boolean|string|(boolean|string)[] ): T[]; } interface LoDashImplicitWrapper<T> { /** * @see _.sortByOrder */ sortByOrder( iteratees: ListIterator<T, any>|string|(ListIterator<T, any>|string)[], orders?: boolean|string|(boolean|string)[] ): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.sortByOrder */ sortByOrder<W extends Object>( iteratees: ListIterator<T, any>|string|W|(ListIterator<T, any>|string|W)[], orders?: boolean|string|(boolean|string)[] ): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.sortByOrder */ sortByOrder<W extends Object, T>( iteratees: ListIterator<T, any>|string|W|(ListIterator<T, any>|string|W)[], orders?: boolean|string|(boolean|string)[] ): LoDashImplicitArrayWrapper<T>; /** * @see _.sortByOrder */ sortByOrder<T>( iteratees: ListIterator<T, any>|string|Object|(ListIterator<T, any>|string|Object)[], orders?: boolean|string|(boolean|string)[] ): LoDashImplicitArrayWrapper<T>; /** * @see _.sortByOrder */ sortByOrder<W extends Object, T>( iteratees: NumericDictionaryIterator<T, any>|string|W|(NumericDictionaryIterator<T, any>|string|W)[], orders?: boolean|string|(boolean|string)[] ): LoDashImplicitArrayWrapper<T>; /** * @see _.sortByOrder */ sortByOrder<T>( iteratees: NumericDictionaryIterator<T, any>|string|Object|(NumericDictionaryIterator<T, any>|string|Object)[], orders?: boolean|string|(boolean|string)[] ): LoDashImplicitArrayWrapper<T>; /** * @see _.sortByOrder */ sortByOrder<W extends Object, T>( iteratees: DictionaryIterator<T, any>|string|W|(DictionaryIterator<T, any>|string|W)[], orders?: boolean|string|(boolean|string)[] ): LoDashImplicitArrayWrapper<T>; /** * @see _.sortByOrder */ sortByOrder<T>( iteratees: DictionaryIterator<T, any>|string|Object|(DictionaryIterator<T, any>|string|Object)[], orders?: boolean|string|(boolean|string)[] ): LoDashImplicitArrayWrapper<T>; } interface LoDashExplicitWrapper<T> { /** * @see _.sortByOrder */ sortByOrder( iteratees: ListIterator<T, any>|string|(ListIterator<T, any>|string)[], orders?: boolean|string|(boolean|string)[] ): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.sortByOrder */ sortByOrder<W extends Object>( iteratees: ListIterator<T, any>|string|W|(ListIterator<T, any>|string|W)[], orders?: boolean|string|(boolean|string)[] ): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.sortByOrder */ sortByOrder<W extends Object, T>( iteratees: ListIterator<T, any>|string|W|(ListIterator<T, any>|string|W)[], orders?: boolean|string|(boolean|string)[] ): LoDashExplicitArrayWrapper<T>; /** * @see _.sortByOrder */ sortByOrder<T>( iteratees: ListIterator<T, any>|string|Object|(ListIterator<T, any>|string|Object)[], orders?: boolean|string|(boolean|string)[] ): LoDashExplicitArrayWrapper<T>; /** * @see _.sortByOrder */ sortByOrder<W extends Object, T>( iteratees: NumericDictionaryIterator<T, any>|string|W|(NumericDictionaryIterator<T, any>|string|W)[], orders?: boolean|string|(boolean|string)[] ): LoDashExplicitArrayWrapper<T>; /** * @see _.sortByOrder */ sortByOrder<T>( iteratees: NumericDictionaryIterator<T, any>|string|Object|(NumericDictionaryIterator<T, any>|string|Object)[], orders?: boolean|string|(boolean|string)[] ): LoDashExplicitArrayWrapper<T>; /** * @see _.sortByOrder */ sortByOrder<W extends Object, T>( iteratees: DictionaryIterator<T, any>|string|W|(DictionaryIterator<T, any>|string|W)[], orders?: boolean|string|(boolean|string)[] ): LoDashExplicitArrayWrapper<T>; /** * @see _.sortByOrder */ sortByOrder<T>( iteratees: DictionaryIterator<T, any>|string|Object|(DictionaryIterator<T, any>|string|Object)[], orders?: boolean|string|(boolean|string)[] ): LoDashExplicitArrayWrapper<T>; } //_.where interface LoDashStatic { /** * Performs a deep comparison of each element in a collection to the given properties * object, returning an array of all elements that have equivalent property values. * @param collection The collection to iterate over. * @param properties The object of property values to filter by. * @return A new array of elements that have the given properties. **/ where<T, U extends {}>( list: Array<T>, properties: U): T[]; /** * @see _.where **/ where<T, U extends {}>( list: List<T>, properties: U): T[]; /** * @see _.where **/ where<T, U extends {}>( list: Dictionary<T>, properties: U): T[]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.where **/ where<U extends {}>(properties: U): LoDashImplicitArrayWrapper<T>; } /******** * Date * ********/ //_.now interface LoDashStatic { /** * Gets the number of milliseconds that have elapsed since the Unix epoch (1 January 1970 00:00:00 UTC). * * @return The number of milliseconds. */ now(): number; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * @see _.now */ now(): number; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * @see _.now */ now(): LoDashExplicitWrapper<number>; } /************* * Functions * *************/ //_.after interface LoDashStatic { /** * The opposite of _.before; this method creates a function that invokes func once it’s called n or more times. * * @param n The number of calls before func is invoked. * @param func The function to restrict. * @return Returns the new restricted function. */ after<TFunc extends Function>( n: number, func: TFunc ): TFunc; } interface LoDashImplicitWrapper<T> { /** * @see _.after **/ after<TFunc extends Function>(func: TFunc): LoDashImplicitObjectWrapper<TFunc>; } interface LoDashExplicitWrapper<T> { /** * @see _.after **/ after<TFunc extends Function>(func: TFunc): LoDashExplicitObjectWrapper<TFunc>; } //_.ary interface LoDashStatic { /** * Creates a function that accepts up to n arguments ignoring any additional arguments. * * @param func The function to cap arguments for. * @param n The arity cap. * @returns Returns the new function. */ ary<TResult extends Function>( func: Function, n?: number ): TResult; ary<T extends Function, TResult extends Function>( func: T, n?: number ): TResult; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.ary */ ary<TResult extends Function>(n?: number): LoDashImplicitObjectWrapper<TResult>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.ary */ ary<TResult extends Function>(n?: number): LoDashExplicitObjectWrapper<TResult>; } //_.backflow interface LoDashStatic { /** * @see _.flowRight */ backflow<TResult extends Function>(...funcs: Function[]): TResult; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.flowRight */ backflow<TResult extends Function>(...funcs: Function[]): LoDashImplicitObjectWrapper<TResult>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.flowRight */ backflow<TResult extends Function>(...funcs: Function[]): LoDashExplicitObjectWrapper<TResult>; } //_.before interface LoDashStatic { /** * Creates a function that invokes func, with the this binding and arguments of the created function, while * it’s called less than n times. Subsequent calls to the created function return the result of the last func * invocation. * * @param n The number of calls at which func is no longer invoked. * @param func The function to restrict. * @return Returns the new restricted function. */ before<TFunc extends Function>( n: number, func: TFunc ): TFunc; } interface LoDashImplicitWrapper<T> { /** * @see _.before **/ before<TFunc extends Function>(func: TFunc): LoDashImplicitObjectWrapper<TFunc>; } interface LoDashExplicitWrapper<T> { /** * @see _.before **/ before<TFunc extends Function>(func: TFunc): LoDashExplicitObjectWrapper<TFunc>; } //_.bind interface FunctionBind { placeholder: any; <T extends Function, TResult extends Function>( func: T, thisArg: any, ...partials: any[] ): TResult; <TResult extends Function>( func: Function, thisArg: any, ...partials: any[] ): TResult; } interface LoDashStatic { /** * Creates a function that invokes func with the this binding of thisArg and prepends any additional _.bind * arguments to those provided to the bound function. * * The _.bind.placeholder value, which defaults to _ in monolithic builds, may be used as a placeholder for * partially applied arguments. * * Note: Unlike native Function#bind this method does not set the "length" property of bound functions. * * @param func The function to bind. * @param thisArg The this binding of func. * @param partials The arguments to be partially applied. * @return Returns the new bound function. */ bind: FunctionBind; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.bind */ bind<TResult extends Function>( thisArg: any, ...partials: any[] ): LoDashImplicitObjectWrapper<TResult>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.bind */ bind<TResult extends Function>( thisArg: any, ...partials: any[] ): LoDashExplicitObjectWrapper<TResult>; } //_.bindAll interface LoDashStatic { /** * Binds methods of an object to the object itself, overwriting the existing method. Method names may be * specified as individual arguments or as arrays of method names. If no method names are provided all * enumerable function properties, own and inherited, of object are bound. * * Note: This method does not set the "length" property of bound functions. * * @param object The object to bind and assign the bound methods to. * @param methodNames The object method names to bind, specified as individual method names or arrays of * method names. * @return Returns object. */ bindAll<T>( object: T, ...methodNames: (string|string[])[] ): T; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.bindAll */ bindAll(...methodNames: (string|string[])[]): LoDashImplicitObjectWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.bindAll */ bindAll(...methodNames: (string|string[])[]): LoDashExplicitObjectWrapper<T>; } //_.bindKey interface FunctionBindKey { placeholder: any; <T extends Object, TResult extends Function>( object: T, key: any, ...partials: any[] ): TResult; <TResult extends Function>( object: Object, key: any, ...partials: any[] ): TResult; } interface LoDashStatic { /** * Creates a function that invokes the method at object[key] and prepends any additional _.bindKey arguments * to those provided to the bound function. * * This method differs from _.bind by allowing bound functions to reference methods that may be redefined * or don’t yet exist. See Peter Michaux’s article for more details. * * The _.bindKey.placeholder value, which defaults to _ in monolithic builds, may be used as a placeholder * for partially applied arguments. * * @param object The object the method belongs to. * @param key The key of the method. * @param partials The arguments to be partially applied. * @return Returns the new bound function. */ bindKey: FunctionBindKey; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.bindKey */ bindKey<TResult extends Function>( key: any, ...partials: any[] ): LoDashImplicitObjectWrapper<TResult>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.bindKey */ bindKey<TResult extends Function>( key: any, ...partials: any[] ): LoDashExplicitObjectWrapper<TResult>; } //_.compose interface LoDashStatic { /** * @see _.flowRight */ compose<TResult extends Function>(...funcs: Function[]): TResult; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.flowRight */ compose<TResult extends Function>(...funcs: Function[]): LoDashImplicitObjectWrapper<TResult>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.flowRight */ compose<TResult extends Function>(...funcs: Function[]): LoDashExplicitObjectWrapper<TResult>; } //_.createCallback interface LoDashStatic { /** * Produces a callback bound to an optional thisArg. If func is a property name the created * callback will return the property value for a given element. If func is an object the created * callback will return true for elements that contain the equivalent object properties, * otherwise it will return false. * @param func The value to convert to a callback. * @param thisArg The this binding of the created callback. * @param argCount The number of arguments the callback accepts. * @return A callback function. **/ createCallback( func: string, thisArg?: any, argCount?: number): () => any; /** * @see _.createCallback **/ createCallback( func: Dictionary<any>, thisArg?: any, argCount?: number): () => boolean; } interface LoDashImplicitWrapper<T> { /** * @see _.createCallback **/ createCallback( thisArg?: any, argCount?: number): LoDashImplicitObjectWrapper<() => any>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.createCallback **/ createCallback( thisArg?: any, argCount?: number): LoDashImplicitObjectWrapper<() => any>; } //_.curry interface LoDashStatic { /** * Creates a function that accepts one or more arguments of func that when called either invokes func returning * its result, if all func arguments have been provided, or returns a function that accepts one or more of the * remaining func arguments, and so on. The arity of func may be specified if func.length is not sufficient. * @param func The function to curry. * @return Returns the new curried function. */ curry<T1, R>(func: (t1: T1) => R): CurriedFunction1<T1, R>; /** * Creates a function that accepts one or more arguments of func that when called either invokes func returning * its result, if all func arguments have been provided, or returns a function that accepts one or more of the * remaining func arguments, and so on. The arity of func may be specified if func.length is not sufficient. * @param func The function to curry. * @return Returns the new curried function. */ curry<T1, T2, R>(func: (t1: T1, t2: T2) => R): CurriedFunction2<T1, T2, R>; /** * Creates a function that accepts one or more arguments of func that when called either invokes func returning * its result, if all func arguments have been provided, or returns a function that accepts one or more of the * remaining func arguments, and so on. The arity of func may be specified if func.length is not sufficient. * @param func The function to curry. * @return Returns the new curried function. */ curry<T1, T2, T3, R>(func: (t1: T1, t2: T2, t3: T3) => R): CurriedFunction3<T1, T2, T3, R>; /** * Creates a function that accepts one or more arguments of func that when called either invokes func returning * its result, if all func arguments have been provided, or returns a function that accepts one or more of the * remaining func arguments, and so on. The arity of func may be specified if func.length is not sufficient. * @param func The function to curry. * @return Returns the new curried function. */ curry<T1, T2, T3, T4, R>(func: (t1: T1, t2: T2, t3: T3, t4: T4) => R): CurriedFunction4<T1, T2, T3, T4, R>; /** * Creates a function that accepts one or more arguments of func that when called either invokes func returning * its result, if all func arguments have been provided, or returns a function that accepts one or more of the * remaining func arguments, and so on. The arity of func may be specified if func.length is not sufficient. * @param func The function to curry. * @return Returns the new curried function. */ curry<T1, T2, T3, T4, T5, R>(func: (t1: T1, t2: T2, t3: T3, t4: T4, t5: T5) => R): CurriedFunction5<T1, T2, T3, T4, T5, R>; /** * Creates a function that accepts one or more arguments of func that when called either invokes func returning * its result, if all func arguments have been provided, or returns a function that accepts one or more of the * remaining func arguments, and so on. The arity of func may be specified if func.length is not sufficient. * @param func The function to curry. * @param arity The arity of func. * @return Returns the new curried function. */ curry<TResult extends Function>( func: Function, arity?: number): TResult; } interface CurriedFunction1<T1, R> { (): CurriedFunction1<T1, R>; (t1: T1): R; } interface CurriedFunction2<T1, T2, R> { (): CurriedFunction2<T1, T2, R>; (t1: T1): CurriedFunction1<T2, R>; (t1: T1, t2: T2): R; } interface CurriedFunction3<T1, T2, T3, R> { (): CurriedFunction3<T1, T2, T3, R>; (t1: T1): CurriedFunction2<T2, T3, R>; (t1: T1, t2: T2): CurriedFunction1<T3, R>; (t1: T1, t2: T2, t3: T3): R; } interface CurriedFunction4<T1, T2, T3, T4, R> { (): CurriedFunction4<T1, T2, T3, T4, R>; (t1: T1): CurriedFunction3<T2, T3, T4, R>; (t1: T1, t2: T2): CurriedFunction2<T3, T4, R>; (t1: T1, t2: T2, t3: T3): CurriedFunction1<T4, R>; (t1: T1, t2: T2, t3: T3, t4: T4): R; } interface CurriedFunction5<T1, T2, T3, T4, T5, R> { (): CurriedFunction5<T1, T2, T3, T4, T5, R>; (t1: T1): CurriedFunction4<T2, T3, T4, T5, R>; (t1: T1, t2: T2): CurriedFunction3<T3, T4, T5, R>; (t1: T1, t2: T2, t3: T3): CurriedFunction2<T4, T5, R>; (t1: T1, t2: T2, t3: T3, t4: T4): CurriedFunction1<T5, R>; (t1: T1, t2: T2, t3: T3, t4: T4, t5: T5): R; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.curry **/ curry<TResult extends Function>(arity?: number): LoDashImplicitObjectWrapper<TResult>; } //_.curryRight interface LoDashStatic { /** * This method is like _.curry except that arguments are applied to func in the manner of _.partialRight * instead of _.partial. * @param func The function to curry. * @return Returns the new curried function. */ curryRight<T1, R>(func: (t1: T1) => R): CurriedFunction1<T1, R>; /** * This method is like _.curry except that arguments are applied to func in the manner of _.partialRight * instead of _.partial. * @param func The function to curry. * @return Returns the new curried function. */ curryRight<T1, T2, R>(func: (t1: T1, t2: T2) => R): CurriedFunction2<T2, T1, R>; /** * This method is like _.curry except that arguments are applied to func in the manner of _.partialRight * instead of _.partial. * @param func The function to curry. * @return Returns the new curried function. */ curryRight<T1, T2, T3, R>(func: (t1: T1, t2: T2, t3: T3) => R): CurriedFunction3<T3, T2, T1, R>; /** * This method is like _.curry except that arguments are applied to func in the manner of _.partialRight * instead of _.partial. * @param func The function to curry. * @return Returns the new curried function. */ curryRight<T1, T2, T3, T4, R>(func: (t1: T1, t2: T2, t3: T3, t4: T4) => R): CurriedFunction4<T4, T3, T2, T1, R>; /** * This method is like _.curry except that arguments are applied to func in the manner of _.partialRight * instead of _.partial. * @param func The function to curry. * @return Returns the new curried function. */ curryRight<T1, T2, T3, T4, T5, R>(func: (t1: T1, t2: T2, t3: T3, t4: T4, t5: T5) => R): CurriedFunction5<T5, T4, T3, T2, T1, R>; /** * This method is like _.curry except that arguments are applied to func in the manner of _.partialRight * instead of _.partial. * @param func The function to curry. * @param arity The arity of func. * @return Returns the new curried function. */ curryRight<TResult extends Function>( func: Function, arity?: number): TResult; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.curryRight **/ curryRight<TResult extends Function>(arity?: number): LoDashImplicitObjectWrapper<TResult>; } //_.debounce interface DebounceSettings { /** * Specify invoking on the leading edge of the timeout. */ leading?: boolean; /** * The maximum time func is allowed to be delayed before it’s invoked. */ maxWait?: number; /** * Specify invoking on the trailing edge of the timeout. */ trailing?: boolean; } interface LoDashStatic { /** * Creates a debounced function that delays invoking func until after wait milliseconds have elapsed since * the last time the debounced function was invoked. The debounced function comes with a cancel method to * cancel delayed invocations. Provide an options object to indicate that func should be invoked on the * leading and/or trailing edge of the wait timeout. Subsequent calls to the debounced function return the * result of the last func invocation. * * Note: If leading and trailing options are true, func is invoked on the trailing edge of the timeout only * if the the debounced function is invoked more than once during the wait timeout. * * See David Corbacho’s article for details over the differences between _.debounce and _.throttle. * * @param func The function to debounce. * @param wait The number of milliseconds to delay. * @param options The options object. * @param options.leading Specify invoking on the leading edge of the timeout. * @param options.maxWait The maximum time func is allowed to be delayed before it’s invoked. * @param options.trailing Specify invoking on the trailing edge of the timeout. * @return Returns the new debounced function. */ debounce<T extends Function>( func: T, wait?: number, options?: DebounceSettings ): T & Cancelable; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.debounce */ debounce( wait?: number, options?: DebounceSettings ): LoDashImplicitObjectWrapper<T & Cancelable>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.debounce */ debounce( wait?: number, options?: DebounceSettings ): LoDashExplicitObjectWrapper<T & Cancelable>; } //_.defer interface LoDashStatic { /** * Defers invoking the func until the current call stack has cleared. Any additional arguments are provided to * func when it’s invoked. * * @param func The function to defer. * @param args The arguments to invoke the function with. * @return Returns the timer id. */ defer<T extends Function>( func: T, ...args: any[] ): number; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.defer */ defer(...args: any[]): LoDashImplicitWrapper<number>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.defer */ defer(...args: any[]): LoDashExplicitWrapper<number>; } //_.delay interface LoDashStatic { /** * Invokes func after wait milliseconds. Any additional arguments are provided to func when it’s invoked. * * @param func The function to delay. * @param wait The number of milliseconds to delay invocation. * @param args The arguments to invoke the function with. * @return Returns the timer id. */ delay<T extends Function>( func: T, wait: number, ...args: any[] ): number; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.delay */ delay( wait: number, ...args: any[] ): LoDashImplicitWrapper<number>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.delay */ delay( wait: number, ...args: any[] ): LoDashExplicitWrapper<number>; } //_.flow interface LoDashStatic { /** * Creates a function that returns the result of invoking the provided functions with the this binding of the * created function, where each successive invocation is supplied the return value of the previous. * * @param funcs Functions to invoke. * @return Returns the new function. */ flow<TResult extends Function>(...funcs: Function[]): TResult; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.flow */ flow<TResult extends Function>(...funcs: Function[]): LoDashImplicitObjectWrapper<TResult>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.flow */ flow<TResult extends Function>(...funcs: Function[]): LoDashExplicitObjectWrapper<TResult>; } //_.flowRight interface LoDashStatic { /** * This method is like _.flow except that it creates a function that invokes the provided functions from right * to left. * * @alias _.backflow, _.compose * * @param funcs Functions to invoke. * @return Returns the new function. */ flowRight<TResult extends Function>(...funcs: Function[]): TResult; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.flowRight */ flowRight<TResult extends Function>(...funcs: Function[]): LoDashImplicitObjectWrapper<TResult>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.flowRight */ flowRight<TResult extends Function>(...funcs: Function[]): LoDashExplicitObjectWrapper<TResult>; } //_.memoize interface MemoizedFunction extends Function { cache: MapCache; } interface LoDashStatic { /** * Creates a function that memoizes the result of func. If resolver is provided it determines the cache key for * storing the result based on the arguments provided to the memoized function. By default, the first argument * provided to the memoized function is coerced to a string and used as the cache key. The func is invoked with * the this binding of the memoized function. * @param func The function to have its output memoized. * @param resolver The function to resolve the cache key. * @return Returns the new memoizing function. */ memoize: { <T extends Function>(func: T, resolver?: Function): T & MemoizedFunction; Cache: MapCache; } } interface LoDashImplicitObjectWrapper<T> { /** * @see _.memoize */ memoize(resolver?: Function): LoDashImplicitObjectWrapper<T & MemoizedFunction>; } //_.modArgs interface LoDashStatic { /** * Creates a function that runs each argument through a corresponding transform function. * * @param func The function to wrap. * @param transforms The functions to transform arguments, specified as individual functions or arrays * of functions. * @return Returns the new function. */ modArgs<T extends Function, TResult extends Function>( func: T, ...transforms: Function[] ): TResult; /** * @see _.modArgs */ modArgs<T extends Function, TResult extends Function>( func: T, transforms: Function[] ): TResult; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.modArgs */ modArgs<TResult extends Function>(...transforms: Function[]): LoDashImplicitObjectWrapper<TResult>; /** * @see _.modArgs */ modArgs<TResult extends Function>(transforms: Function[]): LoDashImplicitObjectWrapper<TResult>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.modArgs */ modArgs<TResult extends Function>(...transforms: Function[]): LoDashExplicitObjectWrapper<TResult>; /** * @see _.modArgs */ modArgs<TResult extends Function>(transforms: Function[]): LoDashExplicitObjectWrapper<TResult>; } //_.negate interface LoDashStatic { /** * Creates a function that negates the result of the predicate func. The func predicate is invoked with * the this binding and arguments of the created function. * * @param predicate The predicate to negate. * @return Returns the new function. */ negate<T extends Function>(predicate: T): (...args: any[]) => boolean; /** * @see _.negate */ negate<T extends Function, TResult extends Function>(predicate: T): TResult; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.negate */ negate(): LoDashImplicitObjectWrapper<(...args: any[]) => boolean>; /** * @see _.negate */ negate<TResult extends Function>(): LoDashImplicitObjectWrapper<TResult>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.negate */ negate(): LoDashExplicitObjectWrapper<(...args: any[]) => boolean>; /** * @see _.negate */ negate<TResult extends Function>(): LoDashExplicitObjectWrapper<TResult>; } //_.once interface LoDashStatic { /** * Creates a function that is restricted to invoking func once. Repeat calls to the function return the value * of the first call. The func is invoked with the this binding and arguments of the created function. * * @param func The function to restrict. * @return Returns the new restricted function. */ once<T extends Function>(func: T): T; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.once */ once(): LoDashImplicitObjectWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.once */ once(): LoDashExplicitObjectWrapper<T>; } //_.partial interface LoDashStatic { /** * Creates a function that, when called, invokes func with any additional partial arguments * prepended to those provided to the new function. This method is similar to _.bind except * it does not alter the this binding. * @param func The function to partially apply arguments to. * @param args Arguments to be partially applied. * @return The new partially applied function. **/ partial: Partial; } type PH = LoDashStatic; interface Function0<R> { (): R; } interface Function1<T1, R> { (t1: T1): R; } interface Function2<T1, T2, R> { (t1: T1, t2: T2): R; } interface Function3<T1, T2, T3, R> { (t1: T1, t2: T2, t3: T3): R; } interface Function4<T1, T2, T3, T4, R> { (t1: T1, t2: T2, t3: T3, t4: T4): R; } interface Partial { // arity 0 <R>(func: Function0<R>): Function0<R>; // arity 1 <T1, R>(func: Function1<T1, R>): Function1<T1, R>; <T1, R>(func: Function1<T1, R>, arg1: T1): Function0<R>; // arity 2 <T1, T2, R>(func: Function2<T1, T2, R>): Function2<T1, T2, R>; <T1, T2, R>(func: Function2<T1, T2, R>, arg1: T1): Function1< T2, R>; <T1, T2, R>(func: Function2<T1, T2, R>, plc1: PH, arg2: T2): Function1<T1, R>; <T1, T2, R>(func: Function2<T1, T2, R>, arg1: T1, arg2: T2): Function0< R>; // arity 3 <T1, T2, T3, R>(func: Function3<T1, T2, T3, R>): Function3<T1, T2, T3, R>; <T1, T2, T3, R>(func: Function3<T1, T2, T3, R>, arg1: T1): Function2< T2, T3, R>; <T1, T2, T3, R>(func: Function3<T1, T2, T3, R>, plc1: PH, arg2: T2): Function2<T1, T3, R>; <T1, T2, T3, R>(func: Function3<T1, T2, T3, R>, arg1: T1, arg2: T2): Function1< T3, R>; <T1, T2, T3, R>(func: Function3<T1, T2, T3, R>, plc1: PH, plc2: PH, arg3: T3): Function2<T1, T2, R>; <T1, T2, T3, R>(func: Function3<T1, T2, T3, R>, arg1: T1, plc2: PH, arg3: T3): Function1< T2, R>; <T1, T2, T3, R>(func: Function3<T1, T2, T3, R>, plc1: PH, arg2: T2, arg3: T3): Function1<T1, R>; <T1, T2, T3, R>(func: Function3<T1, T2, T3, R>, arg1: T1, arg2: T2, arg3: T3): Function0< R>; // arity 4 <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>): Function4<T1, T2, T3, T4, R>; <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>, arg1: T1): Function3< T2, T3, T4, R>; <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>, plc1: PH, arg2: T2): Function3<T1, T3, T4, R>; <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>, arg1: T1, arg2: T2): Function2< T3, T4, R>; <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>, plc1: PH, plc2: PH, arg3: T3): Function3<T1, T2, T4, R>; <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>, arg1: T1, plc2: PH, arg3: T3): Function2< T2, T4, R>; <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>, plc1: PH, arg2: T2, arg3: T3): Function2<T1, T4, R>; <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>, arg1: T1, arg2: T2, arg3: T3): Function1< T4, R>; <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>, plc1: PH, plc2: PH, plc3: PH, arg4: T4): Function3<T1, T2, T3, R>; <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>, arg1: T1, plc2: PH, plc3: PH, arg4: T4): Function2< T2, T3, R>; <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>, plc1: PH, arg2: T2, plc3: PH, arg4: T4): Function2<T1, T3, R>; <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>, arg1: T1, arg2: T2, plc3: PH, arg4: T4): Function1< T3, R>; <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>, plc1: PH, plc2: PH, arg3: T3, arg4: T4): Function2<T1, T2, R>; <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>, arg1: T1, plc2: PH, arg3: T3, arg4: T4): Function1< T2, R>; <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>, plc1: PH, arg2: T2, arg3: T3, arg4: T4): Function1<T1, R>; <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>, arg1: T1, arg2: T2, arg3: T3, arg4: T4): Function0< R>; // catch-all (func: Function, ...args: any[]): Function; } //_.partialRight interface LoDashStatic { /** * This method is like _.partial except that partial arguments are appended to those provided * to the new function. * @param func The function to partially apply arguments to. * @param args Arguments to be partially applied. * @return The new partially applied function. **/ partialRight: PartialRight } interface PartialRight { // arity 0 <R>(func: Function0<R>): Function0<R>; // arity 1 <T1, R>(func: Function1<T1, R>): Function1<T1, R>; <T1, R>(func: Function1<T1, R>, arg1: T1): Function0<R>; // arity 2 <T1, T2, R>(func: Function2<T1, T2, R>): Function2<T1, T2, R>; <T1, T2, R>(func: Function2<T1, T2, R>, arg1: T1, plc2: PH): Function1< T2, R>; <T1, T2, R>(func: Function2<T1, T2, R>, arg2: T2): Function1<T1, R>; <T1, T2, R>(func: Function2<T1, T2, R>, arg1: T1, arg2: T2): Function0< R>; // arity 3 <T1, T2, T3, R>(func: Function3<T1, T2, T3, R>): Function3<T1, T2, T3, R>; <T1, T2, T3, R>(func: Function3<T1, T2, T3, R>, arg1: T1, plc2: PH, plc3: PH): Function2< T2, T3, R>; <T1, T2, T3, R>(func: Function3<T1, T2, T3, R>, arg2: T2, plc3: PH): Function2<T1, T3, R>; <T1, T2, T3, R>(func: Function3<T1, T2, T3, R>, arg1: T1, arg2: T2, plc3: PH): Function1< T3, R>; <T1, T2, T3, R>(func: Function3<T1, T2, T3, R>, arg3: T3): Function2<T1, T2, R>; <T1, T2, T3, R>(func: Function3<T1, T2, T3, R>, arg1: T1, plc2: PH, arg3: T3): Function1< T2, R>; <T1, T2, T3, R>(func: Function3<T1, T2, T3, R>, arg2: T2, arg3: T3): Function1<T1, R>; <T1, T2, T3, R>(func: Function3<T1, T2, T3, R>, arg1: T1, arg2: T2, arg3: T3): Function0< R>; // arity 4 <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>): Function4<T1, T2, T3, T4, R>; <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>, arg1: T1, plc2: PH, plc3: PH, plc4: PH): Function3< T2, T3, T4, R>; <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>, arg2: T2, plc3: PH, plc4: PH): Function3<T1, T3, T4, R>; <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>, arg1: T1, arg2: T2, plc3: PH, plc4: PH): Function2< T3, T4, R>; <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>, arg3: T3, plc4: PH): Function3<T1, T2, T4, R>; <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>, arg1: T1, plc2: PH, arg3: T3, plc4: PH): Function2< T2, T4, R>; <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>, arg2: T2, arg3: T3, plc4: PH): Function2<T1, T4, R>; <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>, arg1: T1, arg2: T2, arg3: T3, plc4: PH): Function1< T4, R>; <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>, arg4: T4): Function3<T1, T2, T3, R>; <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>, arg1: T1, plc2: PH, plc3: PH, arg4: T4): Function2< T2, T3, R>; <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>, arg2: T2, plc3: PH, arg4: T4): Function2<T1, T3, R>; <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>, arg1: T1, arg2: T2, plc3: PH, arg4: T4): Function1< T3, R>; <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>, arg3: T3, arg4: T4): Function2<T1, T2, R>; <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>, arg1: T1, plc2: PH, arg3: T3, arg4: T4): Function1< T2, R>; <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>, arg2: T2, arg3: T3, arg4: T4): Function1<T1, R>; <T1, T2, T3, T4, R>(func: Function4<T1, T2, T3, T4, R>, arg1: T1, arg2: T2, arg3: T3, arg4: T4): Function0< R>; // catch-all (func: Function, ...args: any[]): Function; } //_.rearg interface LoDashStatic { /** * Creates a function that invokes func with arguments arranged according to the specified indexes where the * argument value at the first index is provided as the first argument, the argument value at the second index * is provided as the second argument, and so on. * @param func The function to rearrange arguments for. * @param indexes The arranged argument indexes, specified as individual indexes or arrays of indexes. * @return Returns the new function. */ rearg<TResult extends Function>(func: Function, indexes: number[]): TResult; /** * @see _.rearg */ rearg<TResult extends Function>(func: Function, ...indexes: number[]): TResult; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.rearg */ rearg<TResult extends Function>(indexes: number[]): LoDashImplicitObjectWrapper<TResult>; /** * @see _.rearg */ rearg<TResult extends Function>(...indexes: number[]): LoDashImplicitObjectWrapper<TResult>; } //_.restParam interface LoDashStatic { /** * Creates a function that invokes func with the this binding of the created function and arguments from start * and beyond provided as an array. * * Note: This method is based on the rest parameter. * * @param func The function to apply a rest parameter to. * @param start The start position of the rest parameter. * @return Returns the new function. */ restParam<TResult extends Function>( func: Function, start?: number ): TResult; /** * @see _.restParam */ restParam<TResult extends Function, TFunc extends Function>( func: TFunc, start?: number ): TResult; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.restParam */ restParam<TResult extends Function>(start?: number): LoDashImplicitObjectWrapper<TResult>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.restParam */ restParam<TResult extends Function>(start?: number): LoDashExplicitObjectWrapper<TResult>; } //_.spread interface LoDashStatic { /** * Creates a function that invokes func with the this binding of the created function and an array of arguments * much like Function#apply. * * Note: This method is based on the spread operator. * * @param func The function to spread arguments over. * @return Returns the new function. */ spread<F extends Function, T extends Function>(func: F): T; /** * @see _.spread */ spread<T extends Function>(func: Function): T; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.spread */ spread<T extends Function>(): LoDashImplicitObjectWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.spread */ spread<T extends Function>(): LoDashExplicitObjectWrapper<T>; } //_.throttle interface ThrottleSettings { /** * If you'd like to disable the leading-edge call, pass this as false. */ leading?: boolean; /** * If you'd like to disable the execution on the trailing-edge, pass false. */ trailing?: boolean; } interface LoDashStatic { /** * Creates a throttled function that only invokes func at most once per every wait milliseconds. The throttled * function comes with a cancel method to cancel delayed invocations. Provide an options object to indicate * that func should be invoked on the leading and/or trailing edge of the wait timeout. Subsequent calls to * the throttled function return the result of the last func call. * * Note: If leading and trailing options are true, func is invoked on the trailing edge of the timeout only if * the the throttled function is invoked more than once during the wait timeout. * * @param func The function to throttle. * @param wait The number of milliseconds to throttle invocations to. * @param options The options object. * @param options.leading Specify invoking on the leading edge of the timeout. * @param options.trailing Specify invoking on the trailing edge of the timeout. * @return Returns the new throttled function. */ throttle<T extends Function>( func: T, wait?: number, options?: ThrottleSettings ): T & Cancelable; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.throttle */ throttle( wait?: number, options?: ThrottleSettings ): LoDashImplicitObjectWrapper<T & Cancelable>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.throttle */ throttle( wait?: number, options?: ThrottleSettings ): LoDashExplicitObjectWrapper<T & Cancelable>; } //_.wrap interface LoDashStatic { /** * Creates a function that provides value to the wrapper function as its first argument. Any additional * arguments provided to the function are appended to those provided to the wrapper function. The wrapper is * invoked with the this binding of the created function. * * @param value The value to wrap. * @param wrapper The wrapper function. * @return Returns the new function. */ wrap<V, W extends Function, R extends Function>( value: V, wrapper: W ): R; /** * @see _.wrap */ wrap<V, R extends Function>( value: V, wrapper: Function ): R; /** * @see _.wrap */ wrap<R extends Function>( value: any, wrapper: Function ): R; } interface LoDashImplicitWrapper<T> { /** * @see _.wrap */ wrap<W extends Function, R extends Function>(wrapper: W): LoDashImplicitObjectWrapper<R>; /** * @see _.wrap */ wrap<R extends Function>(wrapper: Function): LoDashImplicitObjectWrapper<R>; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.wrap */ wrap<W extends Function, R extends Function>(wrapper: W): LoDashImplicitObjectWrapper<R>; /** * @see _.wrap */ wrap<R extends Function>(wrapper: Function): LoDashImplicitObjectWrapper<R>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.wrap */ wrap<W extends Function, R extends Function>(wrapper: W): LoDashImplicitObjectWrapper<R>; /** * @see _.wrap */ wrap<R extends Function>(wrapper: Function): LoDashImplicitObjectWrapper<R>; } interface LoDashExplicitWrapper<T> { /** * @see _.wrap */ wrap<W extends Function, R extends Function>(wrapper: W): LoDashExplicitObjectWrapper<R>; /** * @see _.wrap */ wrap<R extends Function>(wrapper: Function): LoDashExplicitObjectWrapper<R>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.wrap */ wrap<W extends Function, R extends Function>(wrapper: W): LoDashExplicitObjectWrapper<R>; /** * @see _.wrap */ wrap<R extends Function>(wrapper: Function): LoDashExplicitObjectWrapper<R>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.wrap */ wrap<W extends Function, R extends Function>(wrapper: W): LoDashExplicitObjectWrapper<R>; /** * @see _.wrap */ wrap<R extends Function>(wrapper: Function): LoDashExplicitObjectWrapper<R>; } /******** * Lang * ********/ //_.clone interface CloneCustomizer<TValue, TResult> { (value: TValue): TResult; } interface LoDashStatic { /** * Creates a clone of value. If isDeep is true nested objects are cloned, otherwise they are assigned by * reference. If customizer is provided it's invoked to produce the cloned values. If customizer returns * undefined cloning is handled by the method instead. The customizer is bound to thisArg and invoked with * up to three argument; (value [, index|key, object]). * * Note: This method is loosely based on the structured clone algorithm. The enumerable properties of * arguments objects and objects created by constructors other than Object are cloned to plain Object * objects. An empty object is returned for uncloneable values such as functions, DOM nodes, Maps, Sets, * and WeakMaps. * * @param value The value to clone. * @param isDeep Specify a deep clone. * @param customizer The function to customize cloning values. * @param thisArg The this binding of customizer. * @return Returns the cloned value. */ clone<TResult>( value: any, isDeep: boolean, customizer: CloneCustomizer<any, TResult>, thisArg?: any ): TResult; /** * @see _.clone */ clone<T, TResult>( value: T, isDeep: boolean, customizer: CloneCustomizer<T, TResult>, thisArg?: any ): TResult; /** * @see _.clone */ clone<TResult>( value: any, customizer: CloneCustomizer<any, TResult>, thisArg?: any ): TResult; /** * @see _.clone */ clone<T, TResult>( value: T, customizer: CloneCustomizer<T, TResult>, thisArg?: any ): TResult; /** * @see _.clone */ clone<T>( value: T, isDeep?: boolean ): T; } interface LoDashImplicitWrapper<T> { /** * @see _.clone */ clone<TResult>( isDeep: boolean, customizer: CloneCustomizer<T, TResult>, thisArg?: any ): TResult; /** * @see _.clone */ clone<TResult>( customizer: CloneCustomizer<T, TResult>, thisArg?: any ): TResult; /** * @see _.clone */ clone( isDeep?: boolean ): T; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.clone */ clone<TResult>( isDeep: boolean, customizer: CloneCustomizer<T[], TResult>, thisArg?: any ): TResult; /** * @see _.clone */ clone<TResult>( customizer: CloneCustomizer<T[], TResult>, thisArg?: any ): TResult; /** * @see _.clone */ clone( isDeep?: boolean ): T[]; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.clone */ clone<TResult>( isDeep: boolean, customizer: CloneCustomizer<T, TResult>, thisArg?: any ): TResult; /** * @see _.clone */ clone<TResult>( customizer: CloneCustomizer<T, TResult>, thisArg?: any ): TResult; /** * @see _.clone */ clone( isDeep?: boolean ): T; } interface LoDashExplicitWrapper<T> { /** * @see _.clone */ clone<TResult extends (number|string|boolean)>( isDeep: boolean, customizer: CloneCustomizer<T, TResult>, thisArg?: any ): LoDashExplicitWrapper<TResult>; /** * @see _.clone */ clone<TResult>( isDeep: boolean, customizer: CloneCustomizer<T, TResult[]>, thisArg?: any ): LoDashExplicitArrayWrapper<TResult>; /** * @see _.clone */ clone<TResult extends Object>( isDeep: boolean, customizer: CloneCustomizer<T, TResult>, thisArg?: any ): LoDashExplicitObjectWrapper<TResult>; /** * @see _.clone */ clone<TResult extends (number|string|boolean)>( customizer: CloneCustomizer<T, TResult>, thisArg?: any ): LoDashExplicitWrapper<TResult>; /** * @see _.clone */ clone<TResult>( customizer: CloneCustomizer<T, TResult[]>, thisArg?: any ): LoDashExplicitArrayWrapper<TResult>; /** * @see _.clone */ clone<TResult extends Object>( customizer: CloneCustomizer<T, TResult>, thisArg?: any ): LoDashExplicitObjectWrapper<TResult>; /** * @see _.clone */ clone( isDeep?: boolean ): LoDashExplicitWrapper<T>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.clone */ clone<TResult extends (number|string|boolean)>( isDeep: boolean, customizer: CloneCustomizer<T[], TResult>, thisArg?: any ): LoDashExplicitWrapper<TResult>; /** * @see _.clone */ clone<TResult>( isDeep: boolean, customizer: CloneCustomizer<T[], TResult[]>, thisArg?: any ): LoDashExplicitArrayWrapper<TResult>; /** * @see _.clone */ clone<TResult extends Object>( isDeep: boolean, customizer: CloneCustomizer<T[], TResult>, thisArg?: any ): LoDashExplicitObjectWrapper<TResult>; /** * @see _.clone */ clone<TResult extends (number|string|boolean)>( customizer: CloneCustomizer<T[], TResult>, thisArg?: any ): LoDashExplicitWrapper<TResult>; /** * @see _.clone */ clone<TResult>( customizer: CloneCustomizer<T[], TResult[]>, thisArg?: any ): LoDashExplicitArrayWrapper<TResult>; /** * @see _.clone */ clone<TResult extends Object>( customizer: CloneCustomizer<T[], TResult>, thisArg?: any ): LoDashExplicitObjectWrapper<TResult>; /** * @see _.clone */ clone( isDeep?: boolean ): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.clone */ clone<TResult extends (number|string|boolean)>( isDeep: boolean, customizer: CloneCustomizer<T, TResult>, thisArg?: any ): LoDashExplicitWrapper<TResult>; /** * @see _.clone */ clone<TResult>( isDeep: boolean, customizer: CloneCustomizer<T, TResult[]>, thisArg?: any ): LoDashExplicitArrayWrapper<TResult>; /** * @see _.clone */ clone<TResult extends Object>( isDeep: boolean, customizer: CloneCustomizer<T, TResult>, thisArg?: any ): LoDashExplicitObjectWrapper<TResult>; /** * @see _.clone */ clone<TResult extends (number|string|boolean)>( customizer: CloneCustomizer<T, TResult>, thisArg?: any ): LoDashExplicitWrapper<TResult>; /** * @see _.clone */ clone<TResult>( customizer: CloneCustomizer<T, TResult[]>, thisArg?: any ): LoDashExplicitArrayWrapper<TResult>; /** * @see _.clone */ clone<TResult extends Object>( customizer: CloneCustomizer<T, TResult>, thisArg?: any ): LoDashExplicitObjectWrapper<TResult>; /** * @see _.clone */ clone( isDeep?: boolean ): LoDashExplicitObjectWrapper<T>; } //_.cloneDeep interface CloneDeepCustomizer<TValue, TResult> { (value: TValue): TResult; } interface LoDashStatic { /** * Creates a deep clone of value. If customizer is provided it's invoked to produce the cloned values. * If customizer returns undefined cloning is handled by the method instead. The customizer is bound to * thisArg and invoked with up to three argument; (value [, index|key, object]). * * Note: This method is loosely based on the structured clone algorithm. The enumerable properties of * arguments objects and objects created by constructors other than Object are cloned to plain Object objects. * An empty object is returned for uncloneable values such as functions, DOM nodes, Maps, Sets, and WeakMaps. * * @param value The value to deep clone. * @param customizer The function to customize cloning values. * @param thisArg The this binding of customizer. * @return Returns the deep cloned value. */ cloneDeep<TResult>( value: any, customizer: CloneDeepCustomizer<any, TResult>, thisArg?: any ): TResult; /** * @see _.cloneDeep */ cloneDeep<T, TResult>( value: T, customizer: CloneDeepCustomizer<T, TResult>, thisArg?: any ): TResult; /** * @see _.cloneDeep */ cloneDeep<T>( value: T ): T; } interface LoDashImplicitWrapper<T> { /** * @see _.cloneDeep */ cloneDeep<TResult>( customizer: CloneDeepCustomizer<T, TResult>, thisArg?: any ): TResult; /** * @see _.cloneDeep */ cloneDeep(): T; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.cloneDeep */ cloneDeep<TResult>( customizer: CloneDeepCustomizer<T[], TResult>, thisArg?: any ): TResult; /** * @see _.cloneDeep */ cloneDeep(): T[]; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.cloneDeep */ cloneDeep<TResult>( customizer: CloneDeepCustomizer<T, TResult>, thisArg?: any ): TResult; /** * @see _.cloneDeep */ cloneDeep(): T; } interface LoDashExplicitWrapper<T> { /** * @see _.cloneDeep */ cloneDeep<TResult extends (number|string|boolean)>( customizer: CloneDeepCustomizer<T, TResult>, thisArg?: any ): LoDashExplicitWrapper<TResult>; /** * @see _.cloneDeep */ cloneDeep<TResult>( customizer: CloneDeepCustomizer<T, TResult[]>, thisArg?: any ): LoDashExplicitArrayWrapper<TResult>; /** * @see _.cloneDeep */ cloneDeep<TResult extends Object>( customizer: CloneDeepCustomizer<T, TResult>, thisArg?: any ): LoDashExplicitObjectWrapper<TResult>; /** * @see _.cloneDeep */ cloneDeep(): LoDashExplicitWrapper<T>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.cloneDeep */ cloneDeep<TResult extends (number|string|boolean)>( customizer: CloneDeepCustomizer<T[], TResult>, thisArg?: any ): LoDashExplicitWrapper<TResult>; /** * @see _.cloneDeep */ cloneDeep<TResult>( customizer: CloneDeepCustomizer<T[], TResult[]>, thisArg?: any ): LoDashExplicitArrayWrapper<TResult>; /** * @see _.cloneDeep */ cloneDeep<TResult extends Object>( customizer: CloneDeepCustomizer<T[], TResult>, thisArg?: any ): LoDashExplicitObjectWrapper<TResult>; /** * @see _.cloneDeep */ cloneDeep(): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.cloneDeep */ cloneDeep<TResult extends (number|string|boolean)>( customizer: CloneDeepCustomizer<T, TResult>, thisArg?: any ): LoDashExplicitWrapper<TResult>; /** * @see _.cloneDeep */ cloneDeep<TResult>( customizer: CloneDeepCustomizer<T, TResult[]>, thisArg?: any ): LoDashExplicitArrayWrapper<TResult>; /** * @see _.cloneDeep */ cloneDeep<TResult extends Object>( customizer: CloneDeepCustomizer<T, TResult>, thisArg?: any ): LoDashExplicitObjectWrapper<TResult>; /** * @see _.cloneDeep */ cloneDeep(): LoDashExplicitObjectWrapper<T>; } //_.eq interface LoDashStatic { /** * @see _.isEqual */ eq( value: any, other: any, customizer?: IsEqualCustomizer, thisArg?: any ): boolean; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * @see _.isEqual */ eq( other: any, customizer?: IsEqualCustomizer, thisArg?: any ): boolean; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * @see _.isEqual */ eq( other: any, customizer?: IsEqualCustomizer, thisArg?: any ): LoDashExplicitWrapper<boolean>; } //_.gt interface LoDashStatic { /** * Checks if value is greater than other. * * @param value The value to compare. * @param other The other value to compare. * @return Returns true if value is greater than other, else false. */ gt( value: any, other: any ): boolean; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * @see _.gt */ gt(other: any): boolean; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * @see _.gt */ gt(other: any): LoDashExplicitWrapper<boolean>; } //_.gte interface LoDashStatic { /** * Checks if value is greater than or equal to other. * * @param value The value to compare. * @param other The other value to compare. * @return Returns true if value is greater than or equal to other, else false. */ gte( value: any, other: any ): boolean; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * @see _.gte */ gte(other: any): boolean; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * @see _.gte */ gte(other: any): LoDashExplicitWrapper<boolean>; } //_.isArguments interface LoDashStatic { /** * Checks if value is classified as an arguments object. * * @param value The value to check. * @return Returns true if value is correctly classified, else false. */ isArguments(value?: any): value is IArguments; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * @see _.isArguments */ isArguments(): boolean; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * @see _.isArguments */ isArguments(): LoDashExplicitWrapper<boolean>; } //_.isArray interface LoDashStatic { /** * Checks if value is classified as an Array object. * @param value The value to check. * * @return Returns true if value is correctly classified, else false. */ isArray<T>(value?: any): value is T[]; } interface LoDashImplicitWrapperBase<T,TWrapper> { /** * @see _.isArray */ isArray(): boolean; } interface LoDashExplicitWrapperBase<T,TWrapper> { /** * @see _.isArray */ isArray(): LoDashExplicitWrapper<boolean>; } //_.isBoolean interface LoDashStatic { /** * Checks if value is classified as a boolean primitive or object. * * @param value The value to check. * @return Returns true if value is correctly classified, else false. */ isBoolean(value?: any): value is boolean; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * @see _.isBoolean */ isBoolean(): boolean; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * @see _.isBoolean */ isBoolean(): LoDashExplicitWrapper<boolean>; } //_.isDate interface LoDashStatic { /** * Checks if value is classified as a Date object. * @param value The value to check. * * @return Returns true if value is correctly classified, else false. */ isDate(value?: any): value is Date; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * @see _.isDate */ isDate(): boolean; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * @see _.isDate */ isDate(): LoDashExplicitWrapper<boolean>; } //_.isElement interface LoDashStatic { /** * Checks if value is a DOM element. * * @param value The value to check. * @return Returns true if value is a DOM element, else false. */ isElement(value?: any): boolean; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * @see _.isElement */ isElement(): boolean; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * @see _.isElement */ isElement(): LoDashExplicitWrapper<boolean>; } //_.isEmpty interface LoDashStatic { /** * Checks if value is empty. A value is considered empty unless it’s an arguments object, array, string, or * jQuery-like collection with a length greater than 0 or an object with own enumerable properties. * * @param value The value to inspect. * @return Returns true if value is empty, else false. */ isEmpty(value?: any): boolean; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * @see _.isEmpty */ isEmpty(): boolean; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * @see _.isEmpty */ isEmpty(): LoDashExplicitWrapper<boolean>; } //_.isEqual interface IsEqualCustomizer { (value: any, other: any, indexOrKey?: number|string): boolean; } interface LoDashStatic { /** * Performs a deep comparison between two values to determine if they are equivalent. If customizer is * provided it’s invoked to compare values. If customizer returns undefined comparisons are handled by the * method instead. The customizer is bound to thisArg and invoked with up to three arguments: (value, other * [, index|key]). * * Note: This method supports comparing arrays, booleans, Date objects, numbers, Object objects, regexes, * and strings. Objects are compared by their own, not inherited, enumerable properties. Functions and DOM * nodes are not supported. Provide a customizer function to extend support for comparing other values. * * @alias _.eq * * @param value The value to compare. * @param other The other value to compare. * @param customizer The function to customize value comparisons. * @param thisArg The this binding of customizer. * @return Returns true if the values are equivalent, else false. */ isEqual( value: any, other: any, customizer?: IsEqualCustomizer, thisArg?: any ): boolean; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * @see _.isEqual */ isEqual( other: any, customizer?: IsEqualCustomizer, thisArg?: any ): boolean; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * @see _.isEqual */ isEqual( other: any, customizer?: IsEqualCustomizer, thisArg?: any ): LoDashExplicitWrapper<boolean>; } //_.isError interface LoDashStatic { /** * Checks if value is an Error, EvalError, RangeError, ReferenceError, SyntaxError, TypeError, or URIError * object. * * @param value The value to check. * @return Returns true if value is an error object, else false. */ isError(value: any): value is Error; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * @see _.isError */ isError(): boolean; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * @see _.isError */ isError(): LoDashExplicitWrapper<boolean>; } //_.isFinite interface LoDashStatic { /** * Checks if value is a finite primitive number. * * Note: This method is based on Number.isFinite. * * @param value The value to check. * @return Returns true if value is a finite number, else false. */ isFinite(value?: any): boolean; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * @see _.isFinite */ isFinite(): boolean; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * @see _.isFinite */ isFinite(): LoDashExplicitWrapper<boolean>; } //_.isFunction interface LoDashStatic { /** * Checks if value is classified as a Function object. * * @param value The value to check. * @return Returns true if value is correctly classified, else false. */ isFunction(value?: any): value is Function; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * @see _.isFunction */ isFunction(): boolean; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * @see _.isFunction */ isFunction(): LoDashExplicitWrapper<boolean>; } //_.isMatch interface isMatchCustomizer { (value: any, other: any, indexOrKey?: number|string): boolean; } interface LoDashStatic { /** * Performs a deep comparison between object and source to determine if object contains equivalent property * values. If customizer is provided it’s invoked to compare values. If customizer returns undefined * comparisons are handled by the method instead. The customizer is bound to thisArg and invoked with three * arguments: (value, other, index|key). * @param object The object to inspect. * @param source The object of property values to match. * @param customizer The function to customize value comparisons. * @param thisArg The this binding of customizer. * @return Returns true if object is a match, else false. */ isMatch(object: Object, source: Object, customizer?: isMatchCustomizer, thisArg?: any): boolean; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.isMatch */ isMatch(source: Object, customizer?: isMatchCustomizer, thisArg?: any): boolean; } //_.isNaN interface LoDashStatic { /** * Checks if value is NaN. * * Note: This method is not the same as isNaN which returns true for undefined and other non-numeric values. * * @param value The value to check. * @return Returns true if value is NaN, else false. */ isNaN(value?: any): boolean; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * @see _.isNaN */ isNaN(): boolean; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * @see _.isNaN */ isNaN(): LoDashExplicitWrapper<boolean>; } //_.isNative interface LoDashStatic { /** * Checks if value is a native function. * @param value The value to check. * * @retrun Returns true if value is a native function, else false. */ isNative(value: any): value is Function; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * see _.isNative */ isNative(): boolean; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * see _.isNative */ isNative(): LoDashExplicitWrapper<boolean>; } //_.isNull interface LoDashStatic { /** * Checks if value is null. * * @param value The value to check. * @return Returns true if value is null, else false. */ isNull(value?: any): boolean; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * see _.isNull */ isNull(): boolean; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * see _.isNull */ isNull(): LoDashExplicitWrapper<boolean>; } //_.isNumber interface LoDashStatic { /** * Checks if value is classified as a Number primitive or object. * * Note: To exclude Infinity, -Infinity, and NaN, which are classified as numbers, use the _.isFinite method. * * @param value The value to check. * @return Returns true if value is correctly classified, else false. */ isNumber(value?: any): value is number; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * see _.isNumber */ isNumber(): boolean; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * see _.isNumber */ isNumber(): LoDashExplicitWrapper<boolean>; } //_.isObject interface LoDashStatic { /** * Checks if value is the language type of Object. (e.g. arrays, functions, objects, regexes, new Number(0), * and new String('')) * * @param value The value to check. * @return Returns true if value is an object, else false. */ isObject(value?: any): boolean; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * see _.isObject */ isObject(): boolean; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * see _.isObject */ isObject(): LoDashExplicitWrapper<boolean>; } //_.isPlainObject interface LoDashStatic { /** * Checks if value is a plain object, that is, an object created by the Object constructor or one with a * [[Prototype]] of null. * * Note: This method assumes objects created by the Object constructor have no inherited enumerable properties. * * @param value The value to check. * @return Returns true if value is a plain object, else false. */ isPlainObject(value?: any): boolean; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * see _.isPlainObject */ isPlainObject(): boolean; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * see _.isPlainObject */ isPlainObject(): LoDashExplicitWrapper<boolean>; } //_.isRegExp interface LoDashStatic { /** * Checks if value is classified as a RegExp object. * @param value The value to check. * * @return Returns true if value is correctly classified, else false. */ isRegExp(value?: any): value is RegExp; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * see _.isRegExp */ isRegExp(): boolean; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * see _.isRegExp */ isRegExp(): LoDashExplicitWrapper<boolean>; } //_.isString interface LoDashStatic { /** * Checks if value is classified as a String primitive or object. * * @param value The value to check. * @return Returns true if value is correctly classified, else false. */ isString(value?: any): value is string; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * see _.isString */ isString(): boolean; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * see _.isString */ isString(): LoDashExplicitWrapper<boolean>; } //_.isTypedArray interface LoDashStatic { /** * Checks if value is classified as a typed array. * * @param value The value to check. * @return Returns true if value is correctly classified, else false. */ isTypedArray(value: any): boolean; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * see _.isTypedArray */ isTypedArray(): boolean; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * see _.isTypedArray */<|fim▁hole|> isTypedArray(): LoDashExplicitWrapper<boolean>; } //_.isUndefined interface LoDashStatic { /** * Checks if value is undefined. * * @param value The value to check. * @return Returns true if value is undefined, else false. */ isUndefined(value: any): boolean; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * see _.isUndefined */ isUndefined(): boolean; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * see _.isUndefined */ isUndefined(): LoDashExplicitWrapper<boolean>; } //_.lt interface LoDashStatic { /** * Checks if value is less than other. * * @param value The value to compare. * @param other The other value to compare. * @return Returns true if value is less than other, else false. */ lt( value: any, other: any ): boolean; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * @see _.lt */ lt(other: any): boolean; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * @see _.lt */ lt(other: any): LoDashExplicitWrapper<boolean>; } //_.lte interface LoDashStatic { /** * Checks if value is less than or equal to other. * * @param value The value to compare. * @param other The other value to compare. * @return Returns true if value is less than or equal to other, else false. */ lte( value: any, other: any ): boolean; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * @see _.lte */ lte(other: any): boolean; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * @see _.lte */ lte(other: any): LoDashExplicitWrapper<boolean>; } //_.toArray interface LoDashStatic { /** * Converts value to an array. * * @param value The value to convert. * @return Returns the converted array. */ toArray<T>(value: List<T>|Dictionary<T>|NumericDictionary<T>): T[]; /** * @see _.toArray */ toArray<TValue, TResult>(value: TValue): TResult[]; /** * @see _.toArray */ toArray<TResult>(value?: any): TResult[]; } interface LoDashImplicitWrapper<T> { /** * @see _.toArray */ toArray<TResult>(): LoDashImplicitArrayWrapper<TResult>; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.toArray */ toArray(): LoDashImplicitArrayWrapper<T>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.toArray */ toArray<TResult>(): LoDashImplicitArrayWrapper<TResult>; } interface LoDashExplicitWrapper<T> { /** * @see _.toArray */ toArray<TResult>(): LoDashExplicitArrayWrapper<TResult>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.toArray */ toArray(): LoDashExplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.toArray */ toArray<TResult>(): LoDashExplicitArrayWrapper<TResult>; } //_.toPlainObject interface LoDashStatic { /** * Converts value to a plain object flattening inherited enumerable properties of value to own properties * of the plain object. * * @param value The value to convert. * @return Returns the converted plain object. */ toPlainObject<TResult extends {}>(value?: any): TResult; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * @see _.toPlainObject */ toPlainObject<TResult extends {}>(): LoDashImplicitObjectWrapper<TResult>; } /******** * Math * ********/ //_.add interface LoDashStatic { /** * Adds two numbers. * * @param augend The first number to add. * @param addend The second number to add. * @return Returns the sum. */ add( augend: number, addend: number ): number; } interface LoDashImplicitWrapper<T> { /** * @see _.add */ add(addend: number): number; } interface LoDashExplicitWrapper<T> { /** * @see _.add */ add(addend: number): LoDashExplicitWrapper<number>; } //_.ceil interface LoDashStatic { /** * Calculates n rounded up to precision. * * @param n The number to round up. * @param precision The precision to round up to. * @return Returns the rounded up number. */ ceil( n: number, precision?: number ): number; } interface LoDashImplicitWrapper<T> { /** * @see _.ceil */ ceil(precision?: number): number; } interface LoDashExplicitWrapper<T> { /** * @see _.ceil */ ceil(precision?: number): LoDashExplicitWrapper<number>; } //_.floor interface LoDashStatic { /** * Calculates n rounded down to precision. * * @param n The number to round down. * @param precision The precision to round down to. * @return Returns the rounded down number. */ floor( n: number, precision?: number ): number; } interface LoDashImplicitWrapper<T> { /** * @see _.floor */ floor(precision?: number): number; } interface LoDashExplicitWrapper<T> { /** * @see _.floor */ floor(precision?: number): LoDashExplicitWrapper<number>; } //_.max interface LoDashStatic { /** * Gets the maximum value of collection. If collection is empty or falsey -Infinity is returned. If an iteratee * function is provided it’s invoked for each value in collection to generate the criterion by which the value * is ranked. The iteratee is bound to thisArg and invoked with three arguments: (value, index, collection). * * If a property name is provided for iteratee the created _.property style callback returns the property value * of the given element. * * If a value is also provided for thisArg the created _.matchesProperty style callback returns true for * elements that have a matching property value, else false. * * If an object is provided for iteratee the created _.matches style callback returns true for elements that * have the properties of the given object, else false. * * @param collection The collection to iterate over. * @param iteratee The function invoked per iteration. * @param thisArg The this binding of iteratee. * @return Returns the maximum value. */ max<T>( collection: List<T>, iteratee?: ListIterator<T, any>, thisArg?: any ): T; /** * @see _.max */ max<T>( collection: Dictionary<T>, iteratee?: DictionaryIterator<T, any>, thisArg?: any ): T; /** * @see _.max */ max<T>( collection: List<T>|Dictionary<T>, iteratee?: string, thisArg?: any ): T; /** * @see _.max */ max<TObject extends {}, T>( collection: List<T>|Dictionary<T>, whereValue?: TObject ): T; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.max */ max( iteratee?: ListIterator<T, any>, thisArg?: any ): T; /** * @see _.max */ max( iteratee?: string, thisArg?: any ): T; /** * @see _.max */ max<TObject extends {}>( whereValue?: TObject ): T; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.max */ max<T>( iteratee?: ListIterator<T, any>|DictionaryIterator<T, any>, thisArg?: any ): T; /** * @see _.max */ max<T>( iteratee?: string, thisArg?: any ): T; /** * @see _.max */ max<TObject extends {}, T>( whereValue?: TObject ): T; } //_.min interface LoDashStatic { /** * Gets the minimum value of collection. If collection is empty or falsey Infinity is returned. If an iteratee * function is provided it’s invoked for each value in collection to generate the criterion by which the value * is ranked. The iteratee is bound to thisArg and invoked with three arguments: (value, index, collection). * * If a property name is provided for iteratee the created _.property style callback returns the property value * of the given element. * * If a value is also provided for thisArg the created _.matchesProperty style callback returns true for * elements that have a matching property value, else false. * * If an object is provided for iteratee the created _.matches style callback returns true for elements that * have the properties of the given object, else false. * * @param collection The collection to iterate over. * @param iteratee The function invoked per iteration. * @param thisArg The this binding of iteratee. * @return Returns the minimum value. */ min<T>( collection: List<T>, iteratee?: ListIterator<T, any>, thisArg?: any ): T; /** * @see _.min */ min<T>( collection: Dictionary<T>, iteratee?: DictionaryIterator<T, any>, thisArg?: any ): T; /** * @see _.min */ min<T>( collection: List<T>|Dictionary<T>, iteratee?: string, thisArg?: any ): T; /** * @see _.min */ min<TObject extends {}, T>( collection: List<T>|Dictionary<T>, whereValue?: TObject ): T; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.min */ min( iteratee?: ListIterator<T, any>, thisArg?: any ): T; /** * @see _.min */ min( iteratee?: string, thisArg?: any ): T; /** * @see _.min */ min<TObject extends {}>( whereValue?: TObject ): T; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.min */ min<T>( iteratee?: ListIterator<T, any>|DictionaryIterator<T, any>, thisArg?: any ): T; /** * @see _.min */ min<T>( iteratee?: string, thisArg?: any ): T; /** * @see _.min */ min<TObject extends {}, T>( whereValue?: TObject ): T; } //_.round interface LoDashStatic { /** * Calculates n rounded to precision. * * @param n The number to round. * @param precision The precision to round to. * @return Returns the rounded number. */ round( n: number, precision?: number ): number; } interface LoDashImplicitWrapper<T> { /** * @see _.round */ round(precision?: number): number; } interface LoDashExplicitWrapper<T> { /** * @see _.round */ round(precision?: number): LoDashExplicitWrapper<number>; } //_.sum interface LoDashStatic { /** * Gets the sum of the values in collection. * * @param collection The collection to iterate over. * @param iteratee The function invoked per iteration. * @param thisArg The this binding of iteratee. * @return Returns the sum. */ sum<T>( collection: List<T>, iteratee: ListIterator<T, number>, thisArg?: any ): number; /** * @see _.sum **/ sum<T>( collection: Dictionary<T>, iteratee: DictionaryIterator<T, number>, thisArg?: any ): number; /** * @see _.sum */ sum<T>( collection: List<number>|Dictionary<number>, iteratee: string ): number; /** * @see _.sum */ sum<T>(collection: List<T>|Dictionary<T>): number; /** * @see _.sum */ sum(collection: List<number>|Dictionary<number>): number; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.sum */ sum( iteratee: ListIterator<T, number>, thisArg?: any ): number; /** * @see _.sum */ sum(iteratee: string): number; /** * @see _.sum */ sum(): number; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.sum **/ sum<TValue>( iteratee: ListIterator<TValue, number>|DictionaryIterator<TValue, number>, thisArg?: any ): number; /** * @see _.sum */ sum(iteratee: string): number; /** * @see _.sum */ sum(): number; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.sum */ sum( iteratee: ListIterator<T, number>, thisArg?: any ): LoDashExplicitWrapper<number>; /** * @see _.sum */ sum(iteratee: string): LoDashExplicitWrapper<number>; /** * @see _.sum */ sum(): LoDashExplicitWrapper<number>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.sum */ sum<TValue>( iteratee: ListIterator<TValue, number>|DictionaryIterator<TValue, number>, thisArg?: any ): LoDashExplicitWrapper<number>; /** * @see _.sum */ sum(iteratee: string): LoDashExplicitWrapper<number>; /** * @see _.sum */ sum(): LoDashExplicitWrapper<number>; } /********** * Number * **********/ //_.inRange interface LoDashStatic { /** * Checks if n is between start and up to but not including, end. If end is not specified it’s set to start * with start then set to 0. * * @param n The number to check. * @param start The start of the range. * @param end The end of the range. * @return Returns true if n is in the range, else false. */ inRange( n: number, start: number, end: number ): boolean; /** * @see _.inRange */ inRange( n: number, end: number ): boolean; } interface LoDashImplicitWrapper<T> { /** * @see _.inRange */ inRange( start: number, end: number ): boolean; /** * @see _.inRange */ inRange(end: number): boolean; } interface LoDashExplicitWrapper<T> { /** * @see _.inRange */ inRange( start: number, end: number ): LoDashExplicitWrapper<boolean>; /** * @see _.inRange */ inRange(end: number): LoDashExplicitWrapper<boolean>; } //_.random interface LoDashStatic { /** * Produces a random number between min and max (inclusive). If only one argument is provided a number between * 0 and the given number is returned. If floating is true, or either min or max are floats, a floating-point * number is returned instead of an integer. * * @param min The minimum possible value. * @param max The maximum possible value. * @param floating Specify returning a floating-point number. * @return Returns the random number. */ random( min?: number, max?: number, floating?: boolean ): number; /** * @see _.random */ random( min?: number, floating?: boolean ): number; /** * @see _.random */ random(floating?: boolean): number; } interface LoDashImplicitWrapper<T> { /** * @see _.random */ random( max?: number, floating?: boolean ): number; /** * @see _.random */ random(floating?: boolean): number; } interface LoDashExplicitWrapper<T> { /** * @see _.random */ random( max?: number, floating?: boolean ): LoDashExplicitWrapper<number>; /** * @see _.random */ random(floating?: boolean): LoDashExplicitWrapper<number>; } /********** * Object * **********/ //_.assign interface AssignCustomizer { (objectValue: any, sourceValue: any, key?: string, object?: {}, source?: {}): any; } interface LoDashStatic { /** * Assigns own enumerable properties of source object(s) to the destination object. Subsequent sources * overwrite property assignments of previous sources. If customizer is provided it’s invoked to produce the * assigned values. The customizer is bound to thisArg and invoked with five arguments: * (objectValue, sourceValue, key, object, source). * * Note: This method mutates object and is based on Object.assign. * * @alias _.extend * * @param object The destination object. * @param source The source objects. * @param customizer The function to customize assigned values. * @param thisArg The this binding of callback. * @return The destination object. */ assign<TObject extends {}, TSource extends {}, TResult extends {}>( object: TObject, source: TSource, customizer?: AssignCustomizer, thisArg?: any ): TResult; /** * @see assign */ assign<TObject extends {}, TSource1 extends {}, TSource2 extends {}, TResult extends {}>( object: TObject, source1: TSource1, source2: TSource2, customizer?: AssignCustomizer, thisArg?: any ): TResult; /** * @see assign */ assign<TObject extends {}, TSource1 extends {}, TSource2 extends {}, TSource3 extends {}, TResult extends {}>( object: TObject, source1: TSource1, source2: TSource2, source3: TSource3, customizer?: AssignCustomizer, thisArg?: any ): TResult; /** * @see assign */ assign<TObject extends {}, TSource1 extends {}, TSource2 extends {}, TSource3 extends {}, TSource4 extends {}, TResult extends {}> ( object: TObject, source1: TSource1, source2: TSource2, source3: TSource3, source4: TSource4, customizer?: AssignCustomizer, thisArg?: any ): TResult; /** * @see _.assign */ assign<TObject extends {}>(object: TObject): TObject; /** * @see _.assign */ assign<TObject extends {}, TResult extends {}>( object: TObject, ...otherArgs: any[] ): TResult; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.assign */ assign<TSource extends {}, TResult extends {}>( source: TSource, customizer?: AssignCustomizer, thisArg?: any ): LoDashImplicitObjectWrapper<TResult>; /** * @see assign */ assign<TSource1 extends {}, TSource2 extends {}, TResult extends {}>( source1: TSource1, source2: TSource2, customizer?: AssignCustomizer, thisArg?: any ): LoDashImplicitObjectWrapper<TResult>; /** * @see assign */ assign<TSource1 extends {}, TSource2 extends {}, TSource3 extends {}, TResult extends {}>( source1: TSource1, source2: TSource2, source3: TSource3, customizer?: AssignCustomizer, thisArg?: any ): LoDashImplicitObjectWrapper<TResult>; /** * @see assign */ assign<TSource1 extends {}, TSource2 extends {}, TSource3 extends {}, TSource4 extends {}, TResult extends {}>( source1: TSource1, source2: TSource2, source3: TSource3, source4: TSource4, customizer?: AssignCustomizer, thisArg?: any ): LoDashImplicitObjectWrapper<TResult>; /** * @see _.assign */ assign(): LoDashImplicitObjectWrapper<T>; /** * @see _.assign */ assign<TResult extends {}>(...otherArgs: any[]): LoDashImplicitObjectWrapper<TResult>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.assign */ assign<TSource extends {}, TResult extends {}>( source: TSource, customizer?: AssignCustomizer, thisArg?: any ): LoDashExplicitObjectWrapper<TResult>; /** * @see assign */ assign<TSource1 extends {}, TSource2 extends {}, TResult extends {}>( source1: TSource1, source2: TSource2, customizer?: AssignCustomizer, thisArg?: any ): LoDashExplicitObjectWrapper<TResult>; /** * @see assign */ assign<TSource1 extends {}, TSource2 extends {}, TSource3 extends {}, TResult extends {}>( source1: TSource1, source2: TSource2, source3: TSource3, customizer?: AssignCustomizer, thisArg?: any ): LoDashExplicitObjectWrapper<TResult>; /** * @see assign */ assign<TSource1 extends {}, TSource2 extends {}, TSource3 extends {}, TSource4 extends {}, TResult extends {}>( source1: TSource1, source2: TSource2, source3: TSource3, source4: TSource4, customizer?: AssignCustomizer, thisArg?: any ): LoDashExplicitObjectWrapper<TResult>; /** * @see _.assign */ assign(): LoDashExplicitObjectWrapper<T>; /** * @see _.assign */ assign<TResult extends {}>(...otherArgs: any[]): LoDashExplicitObjectWrapper<TResult>; } //_.create interface LoDashStatic { /** * Creates an object that inherits from the given prototype object. If a properties object is provided its own * enumerable properties are assigned to the created object. * * @param prototype The object to inherit from. * @param properties The properties to assign to the object. * @return Returns the new object. */ create<T extends Object, U extends Object>( prototype: T, properties?: U ): T & U; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.create */ create<U extends Object>(properties?: U): LoDashImplicitObjectWrapper<T & U>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.create */ create<U extends Object>(properties?: U): LoDashExplicitObjectWrapper<T & U>; } //_.defaults interface LoDashStatic { /** * Assigns own enumerable properties of source object(s) to the destination object for all destination * properties that resolve to undefined. Once a property is set, additional values of the same property are * ignored. * * Note: This method mutates object. * * @param object The destination object. * @param sources The source objects. * @return The destination object. */ defaults<Obj extends {}, TResult extends {}>( object: Obj, ...sources: {}[] ): TResult; /** * @see _.defaults */ defaults<Obj extends {}, S1 extends {}, TResult extends {}>( object: Obj, source1: S1, ...sources: {}[] ): TResult; /** * @see _.defaults */ defaults<Obj extends {}, S1 extends {}, S2 extends {}, TResult extends {}>( object: Obj, source1: S1, source2: S2, ...sources: {}[] ): TResult; /** * @see _.defaults */ defaults<Obj extends {}, S1 extends {}, S2 extends {}, S3 extends {}, TResult extends {}>( object: Obj, source1: S1, source2: S2, source3: S3, ...sources: {}[] ): TResult; /** * @see _.defaults */ defaults<Obj extends {}, S1 extends {}, S2 extends {}, S3 extends {}, S4 extends {}, TResult extends {}>( object: Obj, source1: S1, source2: S2, source3: S3, source4: S4, ...sources: {}[] ): TResult; /** * @see _.defaults */ defaults<TResult extends {}>( object: {}, ...sources: {}[] ): TResult; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.defaults */ defaults<S1 extends {}, TResult extends {}>( source1: S1, ...sources: {}[] ): LoDashImplicitObjectWrapper<TResult>; /** * @see _.defaults */ defaults<S1 extends {}, S2 extends {}, TResult extends {}>( source1: S1, source2: S2, ...sources: {}[] ): LoDashImplicitObjectWrapper<TResult>; /** * @see _.defaults */ defaults<S1 extends {}, S2 extends {}, S3 extends {}, TResult extends {}>( source1: S1, source2: S2, source3: S3, ...sources: {}[] ): LoDashImplicitObjectWrapper<TResult>; /** * @see _.defaults */ defaults<S1 extends {}, S2 extends {}, S3 extends {}, S4 extends {}, TResult extends {}>( source1: S1, source2: S2, source3: S3, source4: S4, ...sources: {}[] ): LoDashImplicitObjectWrapper<TResult>; /** * @see _.defaults */ defaults(): LoDashImplicitObjectWrapper<T>; /** * @see _.defaults */ defaults<TResult>(...sources: {}[]): LoDashImplicitObjectWrapper<TResult>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.defaults */ defaults<S1 extends {}, TResult extends {}>( source1: S1, ...sources: {}[] ): LoDashExplicitObjectWrapper<TResult>; /** * @see _.defaults */ defaults<S1 extends {}, S2 extends {}, TResult extends {}>( source1: S1, source2: S2, ...sources: {}[] ): LoDashExplicitObjectWrapper<TResult>; /** * @see _.defaults */ defaults<S1 extends {}, S2 extends {}, S3 extends {}, TResult extends {}>( source1: S1, source2: S2, source3: S3, ...sources: {}[] ): LoDashExplicitObjectWrapper<TResult>; /** * @see _.defaults */ defaults<S1 extends {}, S2 extends {}, S3 extends {}, S4 extends {}, TResult extends {}>( source1: S1, source2: S2, source3: S3, source4: S4, ...sources: {}[] ): LoDashExplicitObjectWrapper<TResult>; /** * @see _.defaults */ defaults(): LoDashExplicitObjectWrapper<T>; /** * @see _.defaults */ defaults<TResult>(...sources: {}[]): LoDashExplicitObjectWrapper<TResult>; } //_.defaultsDeep interface LoDashStatic { /** * This method is like _.defaults except that it recursively assigns default properties. * @param object The destination object. * @param sources The source objects. * @return Returns object. **/ defaultsDeep<T, TResult>( object: T, ...sources: any[]): TResult; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.defaultsDeep **/ defaultsDeep<TResult>(...sources: any[]): LoDashImplicitObjectWrapper<TResult> } //_.extend interface LoDashStatic { /** * @see assign */ extend<TObject extends {}, TSource extends {}, TResult extends {}>( object: TObject, source: TSource, customizer?: AssignCustomizer, thisArg?: any ): TResult; /** * @see assign */ extend<TObject extends {}, TSource1 extends {}, TSource2 extends {}, TResult extends {}>( object: TObject, source1: TSource1, source2: TSource2, customizer?: AssignCustomizer, thisArg?: any ): TResult; /** * @see assign */ extend<TObject extends {}, TSource1 extends {}, TSource2 extends {}, TSource3 extends {}, TResult extends {}>( object: TObject, source1: TSource1, source2: TSource2, source3: TSource3, customizer?: AssignCustomizer, thisArg?: any ): TResult; /** * @see assign */ extend<TObject extends {}, TSource1 extends {}, TSource2 extends {}, TSource3 extends {}, TSource4 extends {}, TResult extends {}> ( object: TObject, source1: TSource1, source2: TSource2, source3: TSource3, source4: TSource4, customizer?: AssignCustomizer, thisArg?: any ): TResult; /** * @see _.assign */ extend<TObject extends {}>(object: TObject): TObject; /** * @see _.assign */ extend<TObject extends {}, TResult extends {}>( object: TObject, ...otherArgs: any[] ): TResult; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.assign */ extend<TSource extends {}, TResult extends {}>( source: TSource, customizer?: AssignCustomizer, thisArg?: any ): LoDashImplicitObjectWrapper<TResult>; /** * @see assign */ extend<TSource1 extends {}, TSource2 extends {}, TResult extends {}>( source1: TSource1, source2: TSource2, customizer?: AssignCustomizer, thisArg?: any ): LoDashImplicitObjectWrapper<TResult>; /** * @see assign */ extend<TSource1 extends {}, TSource2 extends {}, TSource3 extends {}, TResult extends {}>( source1: TSource1, source2: TSource2, source3: TSource3, customizer?: AssignCustomizer, thisArg?: any ): LoDashImplicitObjectWrapper<TResult>; /** * @see assign */ extend<TSource1 extends {}, TSource2 extends {}, TSource3 extends {}, TSource4 extends {}, TResult extends {}>( source1: TSource1, source2: TSource2, source3: TSource3, source4: TSource4, customizer?: AssignCustomizer, thisArg?: any ): LoDashImplicitObjectWrapper<TResult>; /** * @see _.assign */ extend(): LoDashImplicitObjectWrapper<T>; /** * @see _.assign */ extend<TResult extends {}>(...otherArgs: any[]): LoDashImplicitObjectWrapper<TResult>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.assign */ extend<TSource extends {}, TResult extends {}>( source: TSource, customizer?: AssignCustomizer, thisArg?: any ): LoDashExplicitObjectWrapper<TResult>; /** * @see assign */ extend<TSource1 extends {}, TSource2 extends {}, TResult extends {}>( source1: TSource1, source2: TSource2, customizer?: AssignCustomizer, thisArg?: any ): LoDashExplicitObjectWrapper<TResult>; /** * @see assign */ extend<TSource1 extends {}, TSource2 extends {}, TSource3 extends {}, TResult extends {}>( source1: TSource1, source2: TSource2, source3: TSource3, customizer?: AssignCustomizer, thisArg?: any ): LoDashExplicitObjectWrapper<TResult>; /** * @see assign */ extend<TSource1 extends {}, TSource2 extends {}, TSource3 extends {}, TSource4 extends {}, TResult extends {}>( source1: TSource1, source2: TSource2, source3: TSource3, source4: TSource4, customizer?: AssignCustomizer, thisArg?: any ): LoDashExplicitObjectWrapper<TResult>; /** * @see _.assign */ extend(): LoDashExplicitObjectWrapper<T>; /** * @see _.assign */ extend<TResult extends {}>(...otherArgs: any[]): LoDashExplicitObjectWrapper<TResult>; } //_.findKey interface LoDashStatic { /** * This method is like _.find except that it returns the key of the first element predicate returns truthy for * instead of the element itself. * * If a property name is provided for predicate the created _.property style callback returns the property * value of the given element. * * If a value is also provided for thisArg the created _.matchesProperty style callback returns true for * elements that have a matching property value, else false. * * If an object is provided for predicate the created _.matches style callback returns true for elements that * have the properties of the given object, else false. * * @param object The object to search. * @param predicate The function invoked per iteration. * @param thisArg The this binding of predicate. * @return Returns the key of the matched element, else undefined. */ findKey<TValues, TObject>( object: TObject, predicate?: DictionaryIterator<TValues, boolean>, thisArg?: any ): string; /** * @see _.findKey */ findKey<TObject>( object: TObject, predicate?: ObjectIterator<any, boolean>, thisArg?: any ): string; /** * @see _.findKey */ findKey<TObject>( object: TObject, predicate?: string, thisArg?: any ): string; /** * @see _.findKey */ findKey<TWhere extends Dictionary<any>, TObject>( object: TObject, predicate?: TWhere ): string; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.findKey */ findKey<TValues>( predicate?: DictionaryIterator<TValues, boolean>, thisArg?: any ): string; /** * @see _.findKey */ findKey( predicate?: ObjectIterator<any, boolean>, thisArg?: any ): string; /** * @see _.findKey */ findKey( predicate?: string, thisArg?: any ): string; /** * @see _.findKey */ findKey<TWhere extends Dictionary<any>>( predicate?: TWhere ): string; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.findKey */ findKey<TValues>( predicate?: DictionaryIterator<TValues, boolean>, thisArg?: any ): LoDashExplicitWrapper<string>; /** * @see _.findKey */ findKey( predicate?: ObjectIterator<any, boolean>, thisArg?: any ): LoDashExplicitWrapper<string>; /** * @see _.findKey */ findKey( predicate?: string, thisArg?: any ): LoDashExplicitWrapper<string>; /** * @see _.findKey */ findKey<TWhere extends Dictionary<any>>( predicate?: TWhere ): LoDashExplicitWrapper<string>; } //_.findLastKey interface LoDashStatic { /** * This method is like _.findKey except that it iterates over elements of a collection in the opposite order. * * If a property name is provided for predicate the created _.property style callback returns the property * value of the given element. * * If a value is also provided for thisArg the created _.matchesProperty style callback returns true for * elements that have a matching property value, else false. * * If an object is provided for predicate the created _.matches style callback returns true for elements that * have the properties of the given object, else false. * * @param object The object to search. * @param predicate The function invoked per iteration. * @param thisArg The this binding of predicate. * @return Returns the key of the matched element, else undefined. */ findLastKey<TValues, TObject>( object: TObject, predicate?: DictionaryIterator<TValues, boolean>, thisArg?: any ): string; /** * @see _.findLastKey */ findLastKey<TObject>( object: TObject, predicate?: ObjectIterator<any, boolean>, thisArg?: any ): string; /** * @see _.findLastKey */ findLastKey<TObject>( object: TObject, predicate?: string, thisArg?: any ): string; /** * @see _.findLastKey */ findLastKey<TWhere extends Dictionary<any>, TObject>( object: TObject, predicate?: TWhere ): string; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.findLastKey */ findLastKey<TValues>( predicate?: DictionaryIterator<TValues, boolean>, thisArg?: any ): string; /** * @see _.findLastKey */ findLastKey( predicate?: ObjectIterator<any, boolean>, thisArg?: any ): string; /** * @see _.findLastKey */ findLastKey( predicate?: string, thisArg?: any ): string; /** * @see _.findLastKey */ findLastKey<TWhere extends Dictionary<any>>( predicate?: TWhere ): string; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.findLastKey */ findLastKey<TValues>( predicate?: DictionaryIterator<TValues, boolean>, thisArg?: any ): LoDashExplicitWrapper<string>; /** * @see _.findLastKey */ findLastKey( predicate?: ObjectIterator<any, boolean>, thisArg?: any ): LoDashExplicitWrapper<string>; /** * @see _.findLastKey */ findLastKey( predicate?: string, thisArg?: any ): LoDashExplicitWrapper<string>; /** * @see _.findLastKey */ findLastKey<TWhere extends Dictionary<any>>( predicate?: TWhere ): LoDashExplicitWrapper<string>; } //_.forIn interface LoDashStatic { /** * Iterates over own and inherited enumerable properties of an object invoking iteratee for each property. The * iteratee is bound to thisArg and invoked with three arguments: (value, key, object). Iteratee functions may * exit iteration early by explicitly returning false. * * @param object The object to iterate over. * @param iteratee The function invoked per iteration. * @param thisArg The this binding of iteratee. * @return Returns object. */ forIn<T>( object: Dictionary<T>, iteratee?: DictionaryIterator<T, any>, thisArg?: any ): Dictionary<T>; /** * @see _.forIn */ forIn<T extends {}>( object: T, iteratee?: ObjectIterator<any, any>, thisArg?: any ): T; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.forIn */ forIn<TValue>( iteratee?: DictionaryIterator<TValue, any>, thisArg?: any ): _.LoDashImplicitObjectWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.forIn */ forIn<TValue>( iteratee?: DictionaryIterator<TValue, any>, thisArg?: any ): _.LoDashExplicitObjectWrapper<T>; } //_.forInRight interface LoDashStatic { /** * This method is like _.forIn except that it iterates over properties of object in the opposite order. * * @param object The object to iterate over. * @param iteratee The function invoked per iteration. * @param thisArg The this binding of iteratee. * @return Returns object. */ forInRight<T>( object: Dictionary<T>, iteratee?: DictionaryIterator<T, any>, thisArg?: any ): Dictionary<T>; /** * @see _.forInRight */ forInRight<T extends {}>( object: T, iteratee?: ObjectIterator<any, any>, thisArg?: any ): T; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.forInRight */ forInRight<TValue>( iteratee?: DictionaryIterator<TValue, any>, thisArg?: any ): _.LoDashImplicitObjectWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.forInRight */ forInRight<TValue>( iteratee?: DictionaryIterator<TValue, any>, thisArg?: any ): _.LoDashExplicitObjectWrapper<T>; } //_.forOwn interface LoDashStatic { /** * Iterates over own enumerable properties of an object invoking iteratee for each property. The iteratee is * bound to thisArg and invoked with three arguments: (value, key, object). Iteratee functions may exit * iteration early by explicitly returning false. * * @param object The object to iterate over. * @param iteratee The function invoked per iteration. * @param thisArg The this binding of iteratee. * @return Returns object. */ forOwn<T>( object: Dictionary<T>, iteratee?: DictionaryIterator<T, any>, thisArg?: any ): Dictionary<T>; /** * @see _.forOwn */ forOwn<T extends {}>( object: T, iteratee?: ObjectIterator<any, any>, thisArg?: any ): T; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.forOwn */ forOwn<TValue>( iteratee?: DictionaryIterator<TValue, any>, thisArg?: any ): _.LoDashImplicitObjectWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.forOwn */ forOwn<TValue>( iteratee?: DictionaryIterator<TValue, any>, thisArg?: any ): _.LoDashExplicitObjectWrapper<T>; } //_.forOwnRight interface LoDashStatic { /** * This method is like _.forOwn except that it iterates over properties of object in the opposite order. * * @param object The object to iterate over. * @param iteratee The function invoked per iteration. * @param thisArg The this binding of iteratee. * @return Returns object. */ forOwnRight<T>( object: Dictionary<T>, iteratee?: DictionaryIterator<T, any>, thisArg?: any ): Dictionary<T>; /** * @see _.forOwnRight */ forOwnRight<T extends {}>( object: T, iteratee?: ObjectIterator<any, any>, thisArg?: any ): T; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.forOwnRight */ forOwnRight<TValue>( iteratee?: DictionaryIterator<TValue, any>, thisArg?: any ): _.LoDashImplicitObjectWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.forOwnRight */ forOwnRight<TValue>( iteratee?: DictionaryIterator<TValue, any>, thisArg?: any ): _.LoDashExplicitObjectWrapper<T>; } //_.functions interface LoDashStatic { /** * Creates an array of function property names from all enumerable properties, own and inherited, of object. * * @alias _.methods * * @param object The object to inspect. * @return Returns the new array of property names. */ functions<T extends {}>(object: any): string[]; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.functions */ functions(): _.LoDashImplicitArrayWrapper<string>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.functions */ functions(): _.LoDashExplicitArrayWrapper<string>; } //_.get interface LoDashStatic { /** * Gets the property value at path of object. If the resolved * value is undefined the defaultValue is used in its place. * @param object The object to query. * @param path The path of the property to get. * @param defaultValue The value returned if the resolved value is undefined. * @return Returns the resolved value. **/ get<TResult>(object: Object, path: string|number|boolean|Array<string|number|boolean>, defaultValue?:TResult ): TResult; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.get **/ get<TResult>(path: string|number|boolean|Array<string|number|boolean>, defaultValue?: TResult ): TResult; } //_.has interface LoDashStatic { /** * Checks if path is a direct property. * * @param object The object to query. * @param path The path to check. * @return Returns true if path is a direct property, else false. */ has<T extends {}>( object: T, path: StringRepresentable|StringRepresentable[] ): boolean; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.has */ has(path: StringRepresentable|StringRepresentable[]): boolean; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.has */ has(path: StringRepresentable|StringRepresentable[]): LoDashExplicitWrapper<boolean>; } //_.invert interface LoDashStatic { /** * Creates an object composed of the inverted keys and values of object. If object contains duplicate values, * subsequent values overwrite property assignments of previous values unless multiValue is true. * * @param object The object to invert. * @param multiValue Allow multiple values per key. * @return Returns the new inverted object. */ invert<T extends {}, TResult extends {}>( object: T, multiValue?: boolean ): TResult; /** * @see _.invert */ invert<TResult extends {}>( object: Object, multiValue?: boolean ): TResult; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.invert */ invert<TResult extends {}>(multiValue?: boolean): LoDashImplicitObjectWrapper<TResult>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.invert */ invert<TResult extends {}>(multiValue?: boolean): LoDashExplicitObjectWrapper<TResult>; } //_.keys interface LoDashStatic { /** * Creates an array of the own enumerable property names of object. * * Note: Non-object values are coerced to objects. See the ES spec for more details. * * @param object The object to query. * @return Returns the array of property names. */ keys(object?: any): string[]; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.keys */ keys(): LoDashImplicitArrayWrapper<string>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.keys */ keys(): LoDashExplicitArrayWrapper<string>; } //_.keysIn interface LoDashStatic { /** * Creates an array of the own and inherited enumerable property names of object. * * Note: Non-object values are coerced to objects. * * @param object The object to query. * @return An array of property names. */ keysIn(object?: any): string[]; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.keysIn */ keysIn(): LoDashImplicitArrayWrapper<string>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.keysIn */ keysIn(): LoDashExplicitArrayWrapper<string>; } //_.mapKeys interface LoDashStatic { /** * The opposite of _.mapValues; this method creates an object with the same values as object and keys generated * by running each own enumerable property of object through iteratee. * * @param object The object to iterate over. * @param iteratee The function invoked per iteration. * @param thisArg The this binding of iteratee. * @return Returns the new mapped object. */ mapKeys<T, TKey>( object: List<T>, iteratee?: ListIterator<T, TKey>, thisArg?: any ): Dictionary<T>; /** * @see _.mapKeys */ mapKeys<T, TKey>( object: Dictionary<T>, iteratee?: DictionaryIterator<T, TKey>, thisArg?: any ): Dictionary<T>; /** * @see _.mapKeys */ mapKeys<T, TObject extends {}>( object: List<T>|Dictionary<T>, iteratee?: TObject ): Dictionary<T>; /** * @see _.mapKeys */ mapKeys<T>( object: List<T>|Dictionary<T>, iteratee?: string, thisArg?: any ): Dictionary<T>; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.mapKeys */ mapKeys<TKey>( iteratee?: ListIterator<T, TKey>, thisArg?: any ): LoDashImplicitObjectWrapper<Dictionary<T>>; /** * @see _.mapKeys */ mapKeys<TObject extends {}>( iteratee?: TObject ): LoDashImplicitObjectWrapper<Dictionary<T>>; /** * @see _.mapKeys */ mapKeys( iteratee?: string, thisArg?: any ): LoDashImplicitObjectWrapper<Dictionary<T>>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.mapKeys */ mapKeys<TResult, TKey>( iteratee?: ListIterator<TResult, TKey>|DictionaryIterator<TResult, TKey>, thisArg?: any ): LoDashImplicitObjectWrapper<Dictionary<TResult>>; /** * @see _.mapKeys */ mapKeys<TResult, TObject extends {}>( iteratee?: TObject ): LoDashImplicitObjectWrapper<Dictionary<TResult>>; /** * @see _.mapKeys */ mapKeys<TResult>( iteratee?: string, thisArg?: any ): LoDashImplicitObjectWrapper<Dictionary<TResult>>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.mapKeys */ mapKeys<TKey>( iteratee?: ListIterator<T, TKey>, thisArg?: any ): LoDashExplicitObjectWrapper<Dictionary<T>>; /** * @see _.mapKeys */ mapKeys<TObject extends {}>( iteratee?: TObject ): LoDashExplicitObjectWrapper<Dictionary<T>>; /** * @see _.mapKeys */ mapKeys( iteratee?: string, thisArg?: any ): LoDashExplicitObjectWrapper<Dictionary<T>>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.mapKeys */ mapKeys<TResult, TKey>( iteratee?: ListIterator<TResult, TKey>|DictionaryIterator<TResult, TKey>, thisArg?: any ): LoDashExplicitObjectWrapper<Dictionary<TResult>>; /** * @see _.mapKeys */ mapKeys<TResult, TObject extends {}>( iteratee?: TObject ): LoDashExplicitObjectWrapper<Dictionary<TResult>>; /** * @see _.mapKeys */ mapKeys<TResult>( iteratee?: string, thisArg?: any ): LoDashExplicitObjectWrapper<Dictionary<TResult>>; } //_.mapValues interface LoDashStatic { /** * Creates an object with the same keys as object and values generated by running each own * enumerable property of object through iteratee. The iteratee function is bound to thisArg * and invoked with three arguments: (value, key, object). * * If a property name is provided iteratee the created "_.property" style callback returns * the property value of the given element. * * If a value is also provided for thisArg the creted "_.matchesProperty" style callback returns * true for elements that have a matching property value, else false;. * * If an object is provided for iteratee the created "_.matches" style callback returns true * for elements that have the properties of the given object, else false. * * @param {Object} object The object to iterate over. * @param {Function|Object|string} [iteratee=_.identity] The function invoked per iteration. * @param {Object} [thisArg] The `this` binding of `iteratee`. * @return {Object} Returns the new mapped object. */ mapValues<T, TResult>(obj: Dictionary<T>, callback: ObjectIterator<T, TResult>, thisArg?: any): Dictionary<TResult>; mapValues<T>(obj: Dictionary<T>, where: Dictionary<T>): Dictionary<boolean>; mapValues<T, TMapped>(obj: T, pluck: string): TMapped; mapValues<T>(obj: T, callback: ObjectIterator<any, any>, thisArg?: any): T; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.mapValues * TValue is the type of the property values of T. * TResult is the type output by the ObjectIterator function */ mapValues<TValue, TResult>(callback: ObjectIterator<TValue, TResult>, thisArg?: any): LoDashImplicitObjectWrapper<Dictionary<TResult>>; /** * @see _.mapValues * TResult is the type of the property specified by pluck. * T should be a Dictionary<Dictionary<TResult>> */ mapValues<TResult>(pluck: string): LoDashImplicitObjectWrapper<Dictionary<TResult>>; /** * @see _.mapValues * TResult is the type of the properties on the object specified by pluck. * T should be a Dictionary<Dictionary<Dictionary<TResult>>> */ mapValues<TResult>(pluck: string, where: Dictionary<TResult>): LoDashImplicitArrayWrapper<Dictionary<boolean>>; /** * @see _.mapValues * TResult is the type of the properties of each object in the values of T * T should be a Dictionary<Dictionary<TResult>> */ mapValues<TResult>(where: Dictionary<TResult>): LoDashImplicitArrayWrapper<boolean>; } //_.merge interface MergeCustomizer { (value: any, srcValue: any, key?: string, object?: Object, source?: Object): any; } interface LoDashStatic { /** * Recursively merges own enumerable properties of the source object(s), that don’t resolve to undefined into * the destination object. Subsequent sources overwrite property assignments of previous sources. If customizer * is provided it’s invoked to produce the merged values of the destination and source properties. If * customizer returns undefined merging is handled by the method instead. The customizer is bound to thisArg * and invoked with five arguments: (objectValue, sourceValue, key, object, source). * * @param object The destination object. * @param source The source objects. * @param customizer The function to customize assigned values. * @param thisArg The this binding of customizer. * @return Returns object. */ merge<TObject, TSource>( object: TObject, source: TSource, customizer?: MergeCustomizer, thisArg?: any ): TObject & TSource; /** * @see _.merge */ merge<TObject, TSource1, TSource2>( object: TObject, source1: TSource1, source2: TSource2, customizer?: MergeCustomizer, thisArg?: any ): TObject & TSource1 & TSource2; /** * @see _.merge */ merge<TObject, TSource1, TSource2, TSource3>( object: TObject, source1: TSource1, source2: TSource2, source3: TSource3, customizer?: MergeCustomizer, thisArg?: any ): TObject & TSource1 & TSource2 & TSource3; /** * @see _.merge */ merge<TObject, TSource1, TSource2, TSource3, TSource4>( object: TObject, source1: TSource1, source2: TSource2, source3: TSource3, source4: TSource4, customizer?: MergeCustomizer, thisArg?: any ): TObject & TSource1 & TSource2 & TSource3 & TSource4; /** * @see _.merge */ merge<TResult>( object: any, ...otherArgs: any[] ): TResult; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.merge */ merge<TSource>( source: TSource, customizer?: MergeCustomizer, thisArg?: any ): LoDashImplicitObjectWrapper<T & TSource>; /** * @see _.merge */ merge<TSource1, TSource2>( source1: TSource1, source2: TSource2, customizer?: MergeCustomizer, thisArg?: any ): LoDashImplicitObjectWrapper<T & TSource1 & TSource2>; /** * @see _.merge */ merge<TSource1, TSource2, TSource3>( source1: TSource1, source2: TSource2, source3: TSource3, customizer?: MergeCustomizer, thisArg?: any ): LoDashImplicitObjectWrapper<T & TSource1 & TSource2 & TSource3>; /** * @see _.merge */ merge<TSource1, TSource2, TSource3, TSource4>( source1: TSource1, source2: TSource2, source3: TSource3, source4: TSource4, customizer?: MergeCustomizer, thisArg?: any ): LoDashImplicitObjectWrapper<T & TSource1 & TSource2 & TSource3 & TSource4>; /** * @see _.merge */ merge<TResult>( ...otherArgs: any[] ): LoDashImplicitObjectWrapper<TResult>; } //_.methods interface LoDashStatic { /** * @see _.functions */ methods<T extends {}>(object: any): string[]; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.functions */ methods(): _.LoDashImplicitArrayWrapper<string>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.functions */ methods(): _.LoDashExplicitArrayWrapper<string>; } //_.omit interface LoDashStatic { /** * The opposite of _.pick; this method creates an object composed of the own and inherited enumerable * properties of object that are not omitted. * * @param object The source object. * @param predicate The function invoked per iteration or property names to omit, specified as individual * property names or arrays of property names. * @param thisArg The this binding of predicate. * @return Returns the new object. */ omit<TResult extends {}, T extends {}>( object: T, predicate: ObjectIterator<any, boolean>, thisArg?: any ): TResult; /** * @see _.omit */ omit<TResult extends {}, T extends {}>( object: T, ...predicate: (StringRepresentable|StringRepresentable[])[] ): TResult; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.omit */ omit<TResult extends {}>( predicate: ObjectIterator<any, boolean>, thisArg?: any ): LoDashImplicitObjectWrapper<TResult>; /** * @see _.omit */ omit<TResult extends {}>( ...predicate: (StringRepresentable|StringRepresentable[])[] ): LoDashImplicitObjectWrapper<TResult>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.omit */ omit<TResult extends {}>( predicate: ObjectIterator<any, boolean>, thisArg?: any ): LoDashExplicitObjectWrapper<TResult>; /** * @see _.omit */ omit<TResult extends {}>( ...predicate: (StringRepresentable|StringRepresentable[])[] ): LoDashExplicitObjectWrapper<TResult>; } //_.pairs interface LoDashStatic { /** * Creates a two dimensional array of the key-value pairs for object, e.g. [[key1, value1], [key2, value2]]. * * @param object The object to query. * @return Returns the new array of key-value pairs. */ pairs<T extends {}>(object?: T): any[][]; pairs<T extends {}, TResult>(object?: T): TResult[][]; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.pairs */ pairs<TResult>(): LoDashImplicitArrayWrapper<TResult[]>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.pairs */ pairs<TResult>(): LoDashExplicitArrayWrapper<TResult[]>; } //_.pick interface LoDashStatic { /** * Creates an object composed of the picked object properties. Property names may be specified as individual * arguments or as arrays of property names. If predicate is provided it’s invoked for each property of object * picking the properties predicate returns truthy for. The predicate is bound to thisArg and invoked with * three arguments: (value, key, object). * * @param object The source object. * @param predicate The function invoked per iteration or property names to pick, specified as individual * property names or arrays of property names. * @param thisArg The this binding of predicate. * @return Returns the new object. */ pick<TResult extends {}, T extends {}>( object: T, predicate: ObjectIterator<any, boolean>, thisArg?: any ): TResult; /** * @see _.pick */ pick<TResult extends {}, T extends {}>( object: T, ...predicate: (StringRepresentable|StringRepresentable[])[] ): TResult; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.pick */ pick<TResult extends {}>( predicate: ObjectIterator<any, boolean>, thisArg?: any ): LoDashImplicitObjectWrapper<TResult>; /** * @see _.pick */ pick<TResult extends {}>( ...predicate: (StringRepresentable|StringRepresentable[])[] ): LoDashImplicitObjectWrapper<TResult>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.pick */ pick<TResult extends {}>( predicate: ObjectIterator<any, boolean>, thisArg?: any ): LoDashExplicitObjectWrapper<TResult>; /** * @see _.pick */ pick<TResult extends {}>( ...predicate: (StringRepresentable|StringRepresentable[])[] ): LoDashExplicitObjectWrapper<TResult>; } //_.result interface LoDashStatic { /** * This method is like _.get except that if the resolved value is a function it’s invoked with the this binding * of its parent object and its result is returned. * * @param object The object to query. * @param path The path of the property to resolve. * @param defaultValue The value returned if the resolved value is undefined. * @return Returns the resolved value. */ result<TObject, TResult>( object: TObject, path: number|string|boolean|Array<number|string|boolean>, defaultValue?: TResult ): TResult; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * @see _.result */ result<TResult>( path: number|string|boolean|Array<number|string|boolean>, defaultValue?: TResult ): TResult; } //_.set interface LoDashStatic { /** * Sets the property value of path on object. If a portion of path does not exist it’s created. * * @param object The object to augment. * @param path The path of the property to set. * @param value The value to set. * @return Returns object. */ set<TResult>( object: Object, path: StringRepresentable|StringRepresentable[], value: any ): TResult; /** * @see _.set */ set<V, TResult>( object: Object, path: StringRepresentable|StringRepresentable[], value: V ): TResult; /** * @see _.set */ set<O, V, TResult>( object: O, path: StringRepresentable|StringRepresentable[], value: V ): TResult; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.set */ set<TResult>( path: StringRepresentable|StringRepresentable[], value: any ): LoDashImplicitObjectWrapper<TResult>; /** * @see _.set */ set<V, TResult>( path: StringRepresentable|StringRepresentable[], value: V ): LoDashImplicitObjectWrapper<TResult>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.set */ set<TResult>( path: StringRepresentable|StringRepresentable[], value: any ): LoDashExplicitObjectWrapper<TResult>; /** * @see _.set */ set<V, TResult>( path: StringRepresentable|StringRepresentable[], value: V ): LoDashExplicitObjectWrapper<TResult>; } //_.transform interface LoDashStatic { /** * An alternative to _.reduce; this method transforms object to a new accumulator object which is the result of * running each of its own enumerable properties through iteratee, with each invocation potentially mutating * the accumulator object. The iteratee is bound to thisArg and invoked with four arguments: (accumulator, * value, key, object). Iteratee functions may exit iteration early by explicitly returning false. * * @param object The object to iterate over. * @param iteratee The function invoked per iteration. * @param accumulator The custom accumulator value. * @param thisArg The this binding of iteratee. * @return Returns the accumulated value. */ transform<T, TResult>( object: T[], iteratee?: MemoVoidArrayIterator<T, TResult[]>, accumulator?: TResult[], thisArg?: any ): TResult[]; /** * @see _.transform */ transform<T, TResult>( object: T[], iteratee?: MemoVoidArrayIterator<T, Dictionary<TResult>>, accumulator?: Dictionary<TResult>, thisArg?: any ): Dictionary<TResult>; /** * @see _.transform */ transform<T, TResult>( object: Dictionary<T>, iteratee?: MemoVoidDictionaryIterator<T, Dictionary<TResult>>, accumulator?: Dictionary<TResult>, thisArg?: any ): Dictionary<TResult>; /** * @see _.transform */ transform<T, TResult>( object: Dictionary<T>, iteratee?: MemoVoidDictionaryIterator<T, TResult[]>, accumulator?: TResult[], thisArg?: any ): TResult[]; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.transform */ transform<TResult>( iteratee?: MemoVoidArrayIterator<T, TResult[]>, accumulator?: TResult[], thisArg?: any ): LoDashImplicitArrayWrapper<TResult>; /** * @see _.transform */ transform<TResult>( iteratee?: MemoVoidArrayIterator<T, Dictionary<TResult>>, accumulator?: Dictionary<TResult>, thisArg?: any ): LoDashImplicitObjectWrapper<Dictionary<TResult>>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.transform */ transform<T, TResult>( iteratee?: MemoVoidDictionaryIterator<T, Dictionary<TResult>>, accumulator?: Dictionary<TResult>, thisArg?: any ): LoDashImplicitObjectWrapper<Dictionary<TResult>>; /** * @see _.transform */ transform<T, TResult>( iteratee?: MemoVoidDictionaryIterator<T, TResult[]>, accumulator?: TResult[], thisArg?: any ): LoDashImplicitArrayWrapper<TResult>; } //_.values interface LoDashStatic { /** * Creates an array of the own enumerable property values of object. * * @param object The object to query. * @return Returns an array of property values. */ values<T>(object?: any): T[]; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.values */ values<T>(): LoDashImplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.values */ values<T>(): LoDashExplicitArrayWrapper<T>; } //_.valuesIn interface LoDashStatic { /** * Creates an array of the own and inherited enumerable property values of object. * * @param object The object to query. * @return Returns the array of property values. */ valuesIn<T>(object?: any): T[]; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.valuesIn */ valuesIn<T>(): LoDashImplicitArrayWrapper<T>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.valuesIn */ valuesIn<T>(): LoDashExplicitArrayWrapper<T>; } /********** * String * **********/ //_.camelCase interface LoDashStatic { /** * Converts string to camel case. * * @param string The string to convert. * @return Returns the camel cased string. */ camelCase(string?: string): string; } interface LoDashImplicitWrapper<T> { /** * @see _.camelCase */ camelCase(): string; } interface LoDashExplicitWrapper<T> { /** * @see _.camelCase */ camelCase(): LoDashExplicitWrapper<string>; } //_.capitalize interface LoDashStatic { capitalize(string?: string): string; } interface LoDashImplicitWrapper<T> { /** * @see _.capitalize */ capitalize(): string; } interface LoDashExplicitWrapper<T> { /** * @see _.capitalize */ capitalize(): LoDashExplicitWrapper<string>; } //_.deburr interface LoDashStatic { /** * Deburrs string by converting latin-1 supplementary letters to basic latin letters and removing combining * diacritical marks. * * @param string The string to deburr. * @return Returns the deburred string. */ deburr(string?: string): string; } interface LoDashImplicitWrapper<T> { /** * @see _.deburr */ deburr(): string; } interface LoDashExplicitWrapper<T> { /** * @see _.deburr */ deburr(): LoDashExplicitWrapper<string>; } //_.endsWith interface LoDashStatic { /** * Checks if string ends with the given target string. * * @param string The string to search. * @param target The string to search for. * @param position The position to search from. * @return Returns true if string ends with target, else false. */ endsWith( string?: string, target?: string, position?: number ): boolean; } interface LoDashImplicitWrapper<T> { /** * @see _.endsWith */ endsWith( target?: string, position?: number ): boolean; } interface LoDashExplicitWrapper<T> { /** * @see _.endsWith */ endsWith( target?: string, position?: number ): LoDashExplicitWrapper<boolean>; } // _.escape interface LoDashStatic { /** * Converts the characters "&", "<", ">", '"', "'", and "`", in string to their corresponding HTML entities. * * Note: No other characters are escaped. To escape additional characters use a third-party library like he. * * Though the ">" character is escaped for symmetry, characters like ">" and "/" don’t need escaping in HTML * and have no special meaning unless they're part of a tag or unquoted attribute value. See Mathias Bynens’s * article (under "semi-related fun fact") for more details. * * Backticks are escaped because in Internet Explorer < 9, they can break out of attribute values or HTML * comments. See #59, #102, #108, and #133 of the HTML5 Security Cheatsheet for more details. * * When working with HTML you should always quote attribute values to reduce XSS vectors. * * @param string The string to escape. * @return Returns the escaped string. */ escape(string?: string): string; } interface LoDashImplicitWrapper<T> { /** * @see _.escape */ escape(): string; } interface LoDashExplicitWrapper<T> { /** * @see _.escape */ escape(): LoDashExplicitWrapper<string>; } // _.escapeRegExp interface LoDashStatic { /** * Escapes the RegExp special characters "\", "/", "^", "$", ".", "|", "?", "*", "+", "(", ")", "[", "]", * "{" and "}" in string. * * @param string The string to escape. * @return Returns the escaped string. */ escapeRegExp(string?: string): string; } interface LoDashImplicitWrapper<T> { /** * @see _.escapeRegExp */ escapeRegExp(): string; } interface LoDashExplicitWrapper<T> { /** * @see _.escapeRegExp */ escapeRegExp(): LoDashExplicitWrapper<string>; } //_.kebabCase interface LoDashStatic { /** * Converts string to kebab case. * * @param string The string to convert. * @return Returns the kebab cased string. */ kebabCase(string?: string): string; } interface LoDashImplicitWrapper<T> { /** * @see _.kebabCase */ kebabCase(): string; } interface LoDashExplicitWrapper<T> { /** * @see _.kebabCase */ kebabCase(): LoDashExplicitWrapper<string>; } //_.pad interface LoDashStatic { /** * Pads string on the left and right sides if it’s shorter than length. Padding characters are truncated if * they can’t be evenly divided by length. * * @param string The string to pad. * @param length The padding length. * @param chars The string used as padding. * @return Returns the padded string. */ pad( string?: string, length?: number, chars?: string ): string; } interface LoDashImplicitWrapper<T> { /** * @see _.pad */ pad( length?: number, chars?: string ): string; } interface LoDashExplicitWrapper<T> { /** * @see _.pad */ pad( length?: number, chars?: string ): LoDashExplicitWrapper<string>; } //_.padLeft interface LoDashStatic { /** * Pads string on the left side if it’s shorter than length. Padding characters are truncated if they exceed * length. * * @param string The string to pad. * @param length The padding length. * @param chars The string used as padding. * @return Returns the padded string. */ padLeft( string?: string, length?: number, chars?: string ): string; } interface LoDashImplicitWrapper<T> { /** * @see _.padLeft */ padLeft( length?: number, chars?: string ): string; } interface LoDashExplicitWrapper<T> { /** * @see _.padLeft */ padLeft( length?: number, chars?: string ): LoDashExplicitWrapper<string>; } //_.padRight interface LoDashStatic { /** * Pads string on the right side if it’s shorter than length. Padding characters are truncated if they exceed * length. * * @param string The string to pad. * @param length The padding length. * @param chars The string used as padding. * @return Returns the padded string. */ padRight( string?: string, length?: number, chars?: string ): string; } interface LoDashImplicitWrapper<T> { /** * @see _.padRight */ padRight( length?: number, chars?: string ): string; } interface LoDashExplicitWrapper<T> { /** * @see _.padRight */ padRight( length?: number, chars?: string ): LoDashExplicitWrapper<string>; } //_.parseInt interface LoDashStatic { /** * Converts string to an integer of the specified radix. If radix is undefined or 0, a radix of 10 is used * unless value is a hexadecimal, in which case a radix of 16 is used. * * Note: This method aligns with the ES5 implementation of parseInt. * * @param string The string to convert. * @param radix The radix to interpret value by. * @return Returns the converted integer. */ parseInt( string: string, radix?: number ): number; } interface LoDashImplicitWrapper<T> { /** * @see _.parseInt */ parseInt(radix?: number): number; } interface LoDashExplicitWrapper<T> { /** * @see _.parseInt */ parseInt(radix?: number): LoDashExplicitWrapper<number>; } //_.repeat interface LoDashStatic { /** * Repeats the given string n times. * * @param string The string to repeat. * @param n The number of times to repeat the string. * @return Returns the repeated string. */ repeat( string?: string, n?: number ): string; } interface LoDashImplicitWrapper<T> { /** * @see _.repeat */ repeat(n?: number): string; } interface LoDashExplicitWrapper<T> { /** * @see _.repeat */ repeat(n?: number): LoDashExplicitWrapper<string>; } //_.snakeCase interface LoDashStatic { /** * Converts string to snake case. * * @param string The string to convert. * @return Returns the snake cased string. */ snakeCase(string?: string): string; } interface LoDashImplicitWrapper<T> { /** * @see _.snakeCase */ snakeCase(): string; } interface LoDashExplicitWrapper<T> { /** * @see _.snakeCase */ snakeCase(): LoDashExplicitWrapper<string>; } //_.startCase interface LoDashStatic { /** * Converts string to start case. * * @param string The string to convert. * @return Returns the start cased string. */ startCase(string?: string): string; } interface LoDashImplicitWrapper<T> { /** * @see _.startCase */ startCase(): string; } interface LoDashExplicitWrapper<T> { /** * @see _.startCase */ startCase(): LoDashExplicitWrapper<string>; } //_.startsWith interface LoDashStatic { /** * Checks if string starts with the given target string. * * @param string The string to search. * @param target The string to search for. * @param position The position to search from. * @return Returns true if string starts with target, else false. */ startsWith( string?: string, target?: string, position?: number ): boolean; } interface LoDashImplicitWrapper<T> { /** * @see _.startsWith */ startsWith( target?: string, position?: number ): boolean; } interface LoDashExplicitWrapper<T> { /** * @see _.startsWith */ startsWith( target?: string, position?: number ): LoDashExplicitWrapper<boolean>; } //_.template interface TemplateOptions extends TemplateSettings { /** * The sourceURL of the template's compiled source. */ sourceURL?: string; } interface TemplateExecutor { (data?: Object): string; source: string; } interface LoDashStatic { /** * Creates a compiled template function that can interpolate data properties in "interpolate" delimiters, * HTML-escape interpolated data properties in "escape" delimiters, and execute JavaScript in "evaluate" * delimiters. Data properties may be accessed as free variables in the template. If a setting object is * provided it takes precedence over _.templateSettings values. * * Note: In the development build _.template utilizes * [sourceURLs](http://www.html5rocks.com/en/tutorials/developertools/sourcemaps/#toc-sourceurl) for easier * debugging. * * For more information on precompiling templates see * [lodash's custom builds documentation](https://lodash.com/custom-builds). * * For more information on Chrome extension sandboxes see * [Chrome's extensions documentation](https://developer.chrome.com/extensions/sandboxingEval). * * @param string The template string. * @param options The options object. * @param options.escape The HTML "escape" delimiter. * @param options.evaluate The "evaluate" delimiter. * @param options.imports An object to import into the template as free variables. * @param options.interpolate The "interpolate" delimiter. * @param options.sourceURL The sourceURL of the template's compiled source. * @param options.variable The data object variable name. * @return Returns the compiled template function. */ template( string: string, options?: TemplateOptions ): TemplateExecutor; } interface LoDashImplicitWrapper<T> { /** * @see _.template */ template(options?: TemplateOptions): TemplateExecutor; } interface LoDashExplicitWrapper<T> { /** * @see _.template */ template(options?: TemplateOptions): LoDashExplicitObjectWrapper<TemplateExecutor>; } //_.trim interface LoDashStatic { /** * Removes leading and trailing whitespace or specified characters from string. * * @param string The string to trim. * @param chars The characters to trim. * @return Returns the trimmed string. */ trim( string?: string, chars?: string ): string; } interface LoDashImplicitWrapper<T> { /** * @see _.trim */ trim(chars?: string): string; } interface LoDashExplicitWrapper<T> { /** * @see _.trim */ trim(chars?: string): LoDashExplicitWrapper<string>; } //_.trimLeft interface LoDashStatic { /** * Removes leading whitespace or specified characters from string. * * @param string The string to trim. * @param chars The characters to trim. * @return Returns the trimmed string. */ trimLeft( string?: string, chars?: string ): string; } interface LoDashImplicitWrapper<T> { /** * @see _.trimLeft */ trimLeft(chars?: string): string; } interface LoDashExplicitWrapper<T> { /** * @see _.trimLeft */ trimLeft(chars?: string): LoDashExplicitWrapper<string>; } //_.trimRight interface LoDashStatic { /** * Removes trailing whitespace or specified characters from string. * * @param string The string to trim. * @param chars The characters to trim. * @return Returns the trimmed string. */ trimRight( string?: string, chars?: string ): string; } interface LoDashImplicitWrapper<T> { /** * @see _.trimRight */ trimRight(chars?: string): string; } interface LoDashExplicitWrapper<T> { /** * @see _.trimRight */ trimRight(chars?: string): LoDashExplicitWrapper<string>; } //_.trunc interface TruncOptions { /** The maximum string length. */ length?: number; /** The string to indicate text is omitted. */ omission?: string; /** The separator pattern to truncate to. */ separator?: string|RegExp; } interface LoDashStatic { /** * Truncates string if it’s longer than the given maximum string length. The last characters of the truncated * string are replaced with the omission string which defaults to "…". * * @param string The string to truncate. * @param options The options object or maximum string length. * @return Returns the truncated string. */ trunc( string?: string, options?: TruncOptions|number ): string; } interface LoDashImplicitWrapper<T> { /** * @see _.trunc */ trunc(options?: TruncOptions|number): string; } interface LoDashExplicitWrapper<T> { /** * @see _.trunc */ trunc(options?: TruncOptions|number): LoDashExplicitWrapper<string>; } //_.unescape interface LoDashStatic { /** * The inverse of _.escape; this method converts the HTML entities &amp;, &lt;, &gt;, &quot;, &#39;, and &#96; * in string to their corresponding characters. * * @param string The string to unescape. * @return Returns the unescaped string. */ unescape(string?: string): string; } interface LoDashImplicitWrapper<T> { /** * @see _.unescape */ unescape(): string; } interface LoDashExplicitWrapper<T> { /** * @see _.unescape */ unescape(): LoDashExplicitWrapper<string>; } //_.words interface LoDashStatic { /** * Splits string into an array of its words. * * @param string The string to inspect. * @param pattern The pattern to match words. * @return Returns the words of string. */ words( string?: string, pattern?: string|RegExp ): string[]; } interface LoDashImplicitWrapper<T> { /** * @see _.words */ words(pattern?: string|RegExp): string[]; } interface LoDashExplicitWrapper<T> { /** * @see _.words */ words(pattern?: string|RegExp): LoDashExplicitArrayWrapper<string>; } /*********** * Utility * ***********/ //_.attempt interface LoDashStatic { /** * Attempts to invoke func, returning either the result or the caught error object. Any additional arguments * are provided to func when it’s invoked. * * @param func The function to attempt. * @return Returns the func result or error object. */ attempt<TResult>(func: (...args: any[]) => TResult, ...args: any[]): TResult|Error; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.attempt */ attempt<TResult>(...args: any[]): TResult|Error; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.attempt */ attempt<TResult>(...args: any[]): LoDashExplicitObjectWrapper<TResult|Error>; } //_.callback interface LoDashStatic { /** * Creates a function that invokes func with the this binding of thisArg and arguments of the created function. * If func is a property name the created callback returns the property value for a given element. If func is * an object the created callback returns true for elements that contain the equivalent object properties, * otherwise it returns false. * * @param func The value to convert to a callback. * @param thisArg The this binding of func. * @result Returns the callback. */ callback<TResult>( func: Function, thisArg?: any ): (...args: any[]) => TResult; /** * @see _.callback */ callback<TResult>( func: string, thisArg?: any ): (object: any) => TResult; /** * @see _.callback */ callback( func: Object, thisArg?: any ): (object: any) => boolean; /** * @see _.callback */ callback<TResult>(): (value: TResult) => TResult; } interface LoDashImplicitWrapper<T> { /** * @see _.callback */ callback<TResult>(thisArg?: any): LoDashImplicitObjectWrapper<(object: any) => TResult>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.callback */ callback(thisArg?: any): LoDashImplicitObjectWrapper<(object: any) => boolean>; /** * @see _.callback */ callback<TResult>(thisArg?: any): LoDashImplicitObjectWrapper<(...args: any[]) => TResult>; } interface LoDashExplicitWrapper<T> { /** * @see _.callback */ callback<TResult>(thisArg?: any): LoDashExplicitObjectWrapper<(object: any) => TResult>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.callback */ callback(thisArg?: any): LoDashExplicitObjectWrapper<(object: any) => boolean>; /** * @see _.callback */ callback<TResult>(thisArg?: any): LoDashExplicitObjectWrapper<(...args: any[]) => TResult>; } //_.constant interface LoDashStatic { /** * Creates a function that returns value. * * @param value The value to return from the new function. * @return Returns the new function. */ constant<T>(value: T): () => T; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * @see _.constant */ constant<TResult>(): LoDashImplicitObjectWrapper<() => TResult>; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * @see _.constant */ constant<TResult>(): LoDashExplicitObjectWrapper<() => TResult>; } //_.identity interface LoDashStatic { /** * This method returns the first argument provided to it. * @param value Any value. * @return Returns value. */ identity<T>(value?: T): T; } interface LoDashImplicitWrapper<T> { /** * @see _.identity */ identity(): T; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.identity */ identity(): T[]; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.identity */ identity(): T; } //_.iteratee interface LoDashStatic { /** * @see _.callback */ iteratee<TResult>( func: Function, thisArg?: any ): (...args: any[]) => TResult; /** * @see _.callback */ iteratee<TResult>( func: string, thisArg?: any ): (object: any) => TResult; /** * @see _.callback */ iteratee( func: Object, thisArg?: any ): (object: any) => boolean; /** * @see _.callback */ iteratee<TResult>(): (value: TResult) => TResult; } interface LoDashImplicitWrapper<T> { /** * @see _.callback */ iteratee<TResult>(thisArg?: any): LoDashImplicitObjectWrapper<(object: any) => TResult>; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.callback */ iteratee(thisArg?: any): LoDashImplicitObjectWrapper<(object: any) => boolean>; /** * @see _.callback */ iteratee<TResult>(thisArg?: any): LoDashImplicitObjectWrapper<(...args: any[]) => TResult>; } interface LoDashExplicitWrapper<T> { /** * @see _.callback */ iteratee<TResult>(thisArg?: any): LoDashExplicitObjectWrapper<(object: any) => TResult>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.callback */ iteratee(thisArg?: any): LoDashExplicitObjectWrapper<(object: any) => boolean>; /** * @see _.callback */ iteratee<TResult>(thisArg?: any): LoDashExplicitObjectWrapper<(...args: any[]) => TResult>; } //_.matches interface LoDashStatic { /** * Creates a function that performs a deep comparison between a given object and source, returning true if the * given object has equivalent property values, else false. * * Note: This method supports comparing arrays, booleans, Date objects, numbers, Object objects, regexes, and * strings. Objects are compared by their own, not inherited, enumerable properties. For comparing a single own * or inherited property value see _.matchesProperty. * * @param source The object of property values to match. * @return Returns the new function. */ matches<T>(source: T): (value: any) => boolean; /** * @see _.matches */ matches<T, V>(source: T): (value: V) => boolean; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * @see _.matches */ matches<V>(): LoDashImplicitObjectWrapper<(value: V) => boolean>; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * @see _.matches */ matches<V>(): LoDashExplicitObjectWrapper<(value: V) => boolean>; } //_.matchesProperty interface LoDashStatic { /** * Creates a function that compares the property value of path on a given object to value. * * Note: This method supports comparing arrays, booleans, Date objects, numbers, Object objects, regexes, and * strings. Objects are compared by their own, not inherited, enumerable properties. * * @param path The path of the property to get. * @param srcValue The value to match. * @return Returns the new function. */ matchesProperty<T>( path: StringRepresentable|StringRepresentable[], srcValue: T ): (value: any) => boolean; /** * @see _.matchesProperty */ matchesProperty<T, V>( path: StringRepresentable|StringRepresentable[], srcValue: T ): (value: V) => boolean; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * @see _.matchesProperty */ matchesProperty<SrcValue>( srcValue: SrcValue ): LoDashImplicitObjectWrapper<(value: any) => boolean>; /** * @see _.matchesProperty */ matchesProperty<SrcValue, Value>( srcValue: SrcValue ): LoDashImplicitObjectWrapper<(value: Value) => boolean>; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * @see _.matchesProperty */ matchesProperty<SrcValue>( srcValue: SrcValue ): LoDashExplicitObjectWrapper<(value: any) => boolean>; /** * @see _.matchesProperty */ matchesProperty<SrcValue, Value>( srcValue: SrcValue ): LoDashExplicitObjectWrapper<(value: Value) => boolean>; } //_.method interface LoDashStatic { /** * Creates a function that invokes the method at path on a given object. Any additional arguments are provided * to the invoked method. * * @param path The path of the method to invoke. * @param args The arguments to invoke the method with. * @return Returns the new function. */ method<TObject, TResult>( path: string|StringRepresentable[], ...args: any[] ): (object: TObject) => TResult; /** * @see _.method */ method<TResult>( path: string|StringRepresentable[], ...args: any[] ): (object: any) => TResult; } interface LoDashImplicitWrapper<T> { /** * @see _.method */ method<TObject, TResult>(...args: any[]): LoDashImplicitObjectWrapper<(object: TObject) => TResult>; /** * @see _.method */ method<TResult>(...args: any[]): LoDashImplicitObjectWrapper<(object: any) => TResult>; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.method */ method<TObject, TResult>(...args: any[]): LoDashImplicitObjectWrapper<(object: TObject) => TResult>; /** * @see _.method */ method<TResult>(...args: any[]): LoDashImplicitObjectWrapper<(object: any) => TResult>; } interface LoDashExplicitWrapper<T> { /** * @see _.method */ method<TObject, TResult>(...args: any[]): LoDashExplicitObjectWrapper<(object: TObject) => TResult>; /** * @see _.method */ method<TResult>(...args: any[]): LoDashExplicitObjectWrapper<(object: any) => TResult>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.method */ method<TObject, TResult>(...args: any[]): LoDashExplicitObjectWrapper<(object: TObject) => TResult>; /** * @see _.method */ method<TResult>(...args: any[]): LoDashExplicitObjectWrapper<(object: any) => TResult>; } //_.methodOf interface LoDashStatic { /** * The opposite of _.method; this method creates a function that invokes the method at a given path on object. * Any additional arguments are provided to the invoked method. * * @param object The object to query. * @param args The arguments to invoke the method with. * @return Returns the new function. */ methodOf<TObject extends {}, TResult>( object: TObject, ...args: any[] ): (path: StringRepresentable|StringRepresentable[]) => TResult; /** * @see _.methodOf */ methodOf<TResult>( object: {}, ...args: any[] ): (path: StringRepresentable|StringRepresentable[]) => TResult; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.methodOf */ methodOf<TResult>( ...args: any[] ): LoDashImplicitObjectWrapper<(path: StringRepresentable|StringRepresentable[]) => TResult>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.methodOf */ methodOf<TResult>( ...args: any[] ): LoDashExplicitObjectWrapper<(path: StringRepresentable|StringRepresentable[]) => TResult>; } //_.mixin interface MixinOptions { chain?: boolean; } interface LoDashStatic { /** * Adds all own enumerable function properties of a source object to the destination object. If object is a * function then methods are added to its prototype as well. * * Note: Use _.runInContext to create a pristine lodash function to avoid conflicts caused by modifying * the original. * * @param object The destination object. * @param source The object of functions to add. * @param options The options object. * @param options.chain Specify whether the functions added are chainable. * @return Returns object. */ mixin<TResult, TObject>( object: TObject, source: Dictionary<Function>, options?: MixinOptions ): TResult; /** * @see _.mixin */ mixin<TResult>( source: Dictionary<Function>, options?: MixinOptions ): TResult; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.mixin */ mixin<TResult>( source: Dictionary<Function>, options?: MixinOptions ): LoDashImplicitObjectWrapper<TResult>; /** * @see _.mixin */ mixin<TResult>( options?: MixinOptions ): LoDashImplicitObjectWrapper<TResult>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.mixin */ mixin<TResult>( source: Dictionary<Function>, options?: MixinOptions ): LoDashExplicitObjectWrapper<TResult>; /** * @see _.mixin */ mixin<TResult>( options?: MixinOptions ): LoDashExplicitObjectWrapper<TResult>; } //_.noConflict interface LoDashStatic { /** * Reverts the _ variable to its previous value and returns a reference to the lodash function. * * @return Returns the lodash function. */ noConflict(): typeof _; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * @see _.noConflict */ noConflict(): typeof _; } //_.noop interface LoDashStatic { /** * A no-operation function that returns undefined regardless of the arguments it receives. * * @return undefined */ noop(...args: any[]): void; } interface LoDashImplicitWrapperBase<T, TWrapper> { /** * @see _.noop */ noop(...args: any[]): void; } interface LoDashExplicitWrapperBase<T, TWrapper> { /** * @see _.noop */ noop(...args: any[]): _.LoDashExplicitWrapper<void>; } //_.property interface LoDashStatic { /** * Creates a function that returns the property value at path on a given object. * * @param path The path of the property to get. * @return Returns the new function. */ property<TObj, TResult>(path: StringRepresentable|StringRepresentable[]): (obj: TObj) => TResult; } interface LoDashImplicitWrapper<T> { /** * @see _.property */ property<TObj, TResult>(): LoDashImplicitObjectWrapper<(obj: TObj) => TResult>; } interface LoDashImplicitArrayWrapper<T> { /** * @see _.property */ property<TObj, TResult>(): LoDashImplicitObjectWrapper<(obj: TObj) => TResult>; } interface LoDashExplicitWrapper<T> { /** * @see _.property */ property<TObj, TResult>(): LoDashExplicitObjectWrapper<(obj: TObj) => TResult>; } interface LoDashExplicitArrayWrapper<T> { /** * @see _.property */ property<TObj, TResult>(): LoDashExplicitObjectWrapper<(obj: TObj) => TResult>; } //_.propertyOf interface LoDashStatic { /** * The opposite of _.property; this method creates a function that returns the property value at a given path * on object. * * @param object The object to query. * @return Returns the new function. */ propertyOf<T extends {}>(object: T): (path: string|string[]) => any; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.propertyOf */ propertyOf(): LoDashImplicitObjectWrapper<(path: string|string[]) => any>; } interface LoDashExplicitObjectWrapper<T> { /** * @see _.propertyOf */ propertyOf(): LoDashExplicitObjectWrapper<(path: string|string[]) => any>; } //_.range interface LoDashStatic { /** * Creates an array of numbers (positive and/or negative) progressing from start up to, but not including, end. * If end is not specified it’s set to start with start then set to 0. If end is less than start a zero-length * range is created unless a negative step is specified. * * @param start The start of the range. * @param end The end of the range. * @param step The value to increment or decrement by. * @return Returns a new range array. */ range( start: number, end: number, step?: number ): number[]; /** * @see _.range */ range( end: number, step?: number ): number[]; } interface LoDashImplicitWrapper<T> { /** * @see _.range */ range( end?: number, step?: number ): LoDashImplicitArrayWrapper<number>; } interface LoDashExplicitWrapper<T> { /** * @see _.range */ range( end?: number, step?: number ): LoDashExplicitArrayWrapper<number>; } //_.runInContext interface LoDashStatic { /** * Create a new pristine lodash function using the given context object. * * @param context The context object. * @return Returns a new lodash function. */ runInContext(context?: Object): typeof _; } interface LoDashImplicitObjectWrapper<T> { /** * @see _.runInContext */ runInContext(): typeof _; } //_.times interface LoDashStatic { /** * Invokes the iteratee function n times, returning an array of the results of each invocation. The iteratee is * bound to thisArg and invoked with one argument; (index). * * @param n The number of times to invoke iteratee. * @param iteratee The function invoked per iteration. * @param thisArg The this binding of iteratee. * @return Returns the array of results. */ times<TResult>( n: number, iteratee: (num: number) => TResult, thisArg?: any ): TResult[]; /** * @see _.times */ times(n: number): number[]; } interface LoDashImplicitWrapper<T> { /** * @see _.times */ times<TResult>( iteratee: (num: number) => TResult, thisArgs?: any ): LoDashImplicitArrayWrapper<TResult>; /** * @see _.times */ times(): LoDashImplicitArrayWrapper<number>; } interface LoDashExplicitWrapper<T> { /** * @see _.times */ times<TResult>( iteratee: (num: number) => TResult, thisArgs?: any ): LoDashExplicitArrayWrapper<TResult>; /** * @see _.times */ times(): LoDashExplicitArrayWrapper<number>; } //_.uniqueId interface LoDashStatic { /** * Generates a unique ID. If prefix is provided the ID is appended to it. * * @param prefix The value to prefix the ID with. * @return Returns the unique ID. */ uniqueId(prefix?: string): string; } interface LoDashImplicitWrapper<T> { /** * @see _.uniqueId */ uniqueId(): string; } interface LoDashExplicitWrapper<T> { /** * @see _.uniqueId */ uniqueId(): LoDashExplicitWrapper<string>; } interface ListIterator<T, TResult> { (value: T, index: number, collection: List<T>): TResult; } interface DictionaryIterator<T, TResult> { (value: T, key?: string, collection?: Dictionary<T>): TResult; } interface NumericDictionaryIterator<T, TResult> { (value: T, key?: number, collection?: Dictionary<T>): TResult; } interface ObjectIterator<T, TResult> { (element: T, key?: string, collection?: any): TResult; } interface StringIterator<TResult> { (char: string, index?: number, string?: string): TResult; } interface MemoVoidIterator<T, TResult> { (prev: TResult, curr: T, indexOrKey?: any, list?: T[]): void; } interface MemoIterator<T, TResult> { (prev: TResult, curr: T, indexOrKey?: any, list?: T[]): TResult; } interface MemoVoidArrayIterator<T, TResult> { (acc: TResult, curr: T, index?: number, arr?: T[]): void; } interface MemoVoidDictionaryIterator<T, TResult> { (acc: TResult, curr: T, key?: string, dict?: Dictionary<T>): void; } //interface Collection<T> {} // Common interface between Arrays and jQuery objects interface List<T> { [index: number]: T; length: number; } interface Dictionary<T> { [index: string]: T; } interface NumericDictionary<T> { [index: number]: T; } interface StringRepresentable { toString(): string; } interface Cancelable { cancel(): void; } } declare module "lodash" { export = _; }<|fim▁end|>
<|file_name|>wizard_genesis.go<|end_file_name|><|fim▁begin|>// Copyright 2017 The AriseID Authors // This file is part AriseID. // // AriseID free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // AriseID distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with AriseID. If not, see <http://www.gnu.org/licenses/>. package main import ( "bytes" "fmt" "math/big" "math/rand" "time" "github.com/ariseid/ariseid-core/common" "github.com/ariseid/ariseid-core/core" "github.com/ariseid/ariseid-core/log" "github.com/ariseid/ariseid-core/params" ) // makeGenesis creates a new genesis struct based on some user input. func (w *wizard) makeGenesis() { // Construct a default genesis block genesis := &core.Genesis{ Timestamp: uint64(time.Now().Unix()), LifeLimit: 4700000, Difficulty: big.NewInt(1048576), Alloc: make(core.GenesisAlloc), Config: &params.ChainConfig{ HomesteadBlock: big.NewInt(1), EIP150Block: big.NewInt(2), EIP155Block: big.NewInt(3), EIP158Block: big.NewInt(3), }, } // Figure out which consensus engine to choose fmt.Println() fmt.Println("Which consensus engine to use? (default = clique)") fmt.Println(" 1. Idhash - proof-of-work") fmt.Println(" 2. Clique - proof-of-authority") choice := w.read() switch { case choice == "1": // In case of idhash, we're pretty much done genesis.Config.Idhash = new(params.IdhashConfig) genesis.ExtraData = make([]byte, 32) case choice == "" || choice == "2": // In the case of clique, configure the consensus parameters genesis.Difficulty = big.NewInt(1) genesis.Config.Clique = &params.CliqueConfig{ Period: 15, Epoch: 30000, } fmt.Println() fmt.Println("How many seconds should blocks take? (default = 15)") genesis.Config.Clique.Period = uint64(w.readDefaultInt(15)) // We also need the initial list of signers fmt.Println() fmt.Println("Which accounts are allowed to seal? (mandatory at least one)") var signers []common.Address for { if address := w.readAddress(); address != nil { signers = append(signers, *address) continue } if len(signers) > 0 { break } } // Sort the signers and embed into the extra-data section for i := 0; i < len(signers); i++ { for j := i + 1; j < len(signers); j++ { if bytes.Compare(signers[i][:], signers[j][:]) > 0 { signers[i], signers[j] = signers[j], signers[i]<|fim▁hole|> for i, signer := range signers { copy(genesis.ExtraData[32+i*common.AddressLength:], signer[:]) } default: log.Crit("Invalid consensus engine choice", "choice", choice) } // Consensus all set, just ask for initial funds and go fmt.Println() fmt.Println("Which accounts should be pre-funded? (advisable at least one)") for { // Read the address of the account to fund if address := w.readAddress(); address != nil { genesis.Alloc[*address] = core.GenesisAccount{ Balance: new(big.Int).Lsh(big.NewInt(1), 256-7), // 2^256 / 128 (allow many pre-funds without balance overflows) } continue } break } // Add a batch of precompile balances to avoid them getting deleted for i := int64(0); i < 256; i++ { genesis.Alloc[common.BigToAddress(big.NewInt(i))] = core.GenesisAccount{Balance: big.NewInt(1)} } fmt.Println() // Query the user for some custom extras fmt.Println() fmt.Println("Specify your chain/network ID if you want an explicit one (default = random)") genesis.Config.ChainId = new(big.Int).SetUint64(uint64(w.readDefaultInt(rand.Intn(65536)))) fmt.Println() fmt.Println("Anything fun to embed into the genesis block? (max 32 bytes)") extra := w.read() if len(extra) > 32 { extra = extra[:32] } genesis.ExtraData = append([]byte(extra), genesis.ExtraData[len(extra):]...) // All done, store the genesis and flush to disk w.conf.genesis = genesis }<|fim▁end|>
} } } genesis.ExtraData = make([]byte, 32+len(signers)*common.AddressLength+65)