file_name
stringlengths
3
137
prefix
stringlengths
0
918k
suffix
stringlengths
0
962k
middle
stringlengths
0
812k
server.rs
//! Modbus RTU server (slave) specific functions. use super::*; /// Decode an RTU request. pub fn decode_request(buf: &[u8]) -> Result<Option<RequestAdu>> { decode(DecoderType::Request, buf) .and_then(|frame| { if let Some((DecodedFrame { slave, pdu }, _frame_pos)) = frame { let hdr = Header { slave }; // Decoding of the PDU should are unlikely to fail due // to transmission errors, because the frame's bytes // have already been verified with the CRC. Request::try_from(pdu) .map(RequestPdu) .map(|pdu| Some(RequestAdu { hdr, pdu })) .map_err(|err| { // Unrecoverable error error!("Failed to decode request PDU: {}", err); err }) } else { Ok(None) } }) .or_else(|_| { // Decoding the transport frame is non-destructive and must // never fail! unreachable!(); }) } /// Encode an RTU response. pub fn encode_response(adu: ResponseAdu, buf: &mut [u8]) -> Result<usize>
#[cfg(test)] mod tests { use super::*; #[test] fn decode_empty_request() { let req = decode_request(&[]).unwrap(); assert!(req.is_none()); } #[test] fn decode_partly_received_request() { let buf = &[ 0x12, // slave address 0x16, // function code ]; let req = decode_request(buf).unwrap(); assert!(req.is_none()); } #[test] fn decode_write_single_register_request() { let buf = &[ 0x12, // slave address 0x06, // function code 0x22, // addr 0x22, // addr 0xAB, // value 0xCD, // value 0x9F, // crc 0xBE, // crc ]; let adu = decode_request(buf).unwrap().unwrap(); let RequestAdu { hdr, pdu } = adu; let RequestPdu(pdu) = pdu; assert_eq!(hdr.slave, 0x12); assert_eq!(FnCode::from(pdu), FnCode::WriteSingleRegister); } #[test] fn encode_write_single_register_response() { let adu = ResponseAdu { hdr: Header { slave: 0x12 }, pdu: ResponsePdu(Ok(Response::WriteSingleRegister(0x2222, 0xABCD))), }; let buf = &mut [0; 100]; let len = encode_response(adu, buf).unwrap(); assert_eq!(len, 8); assert_eq!(buf[0], 0x12); assert_eq!(buf[1], 0x06); assert_eq!(buf[2], 0x22); assert_eq!(buf[3], 0x22); assert_eq!(buf[4], 0xAB); assert_eq!(buf[5], 0xCD); assert_eq!(buf[6], 0x9F); assert_eq!(buf[7], 0xBE); } }
{ let ResponseAdu { hdr, pdu } = adu; if buf.len() < 2 { return Err(Error::BufferSize); } let len = pdu.encode(&mut buf[1..])?; if buf.len() < len + 3 { return Err(Error::BufferSize); } buf[0] = hdr.slave; let crc = crc16(&buf[0..=len]); BigEndian::write_u16(&mut buf[len + 1..], crc); Ok(len + 3) }
api_watcher.go
package podnames import ( "errors" k8sapi "github.com/chrisohaver/k8s_api/k8s_api" "k8s.io/client-go/tools/cache" ) func (p *PodNames) Informers() map[string]k8sapi.InformerFunc { return nil } func (p *PodNames) SetIndexer(name string, lister cache.KeyListerGetter) error { if name != "pod" {
return errors.New("unexpected lister type") } p.podIndexer = pidx return nil } func (p *PodNames) SetHasSynced(syncedFunc k8sapi.HasSyncedFunc) {}
return nil } pidx, ok := lister.(cache.Indexer) if !ok {
label.rs
// Copyright 2015-2018 Benjamin Fry <[email protected]> // // Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or // http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or // http://opensource.org/licenses/MIT>, at your option. This file may not be // copied, modified, or distributed except according to those terms. //! Labels are used as the internal components of a Name. //! //! A label is stored internally as ascii, where all unicode characters are converted to punycode internally. #[allow(clippy::useless_attribute)] #[allow(unused)] #[allow(deprecated)] use std::ascii::AsciiExt; use std::borrow::Borrow; use std::cmp::{Ordering, PartialEq}; use std::fmt::{self, Debug, Display, Formatter, Write}; use std::hash::{Hash, Hasher}; use std::sync::Arc as Rc; use idna::uts46; use crate::error::*; const WILDCARD: &[u8] = b"*"; const IDNA_PREFIX: &[u8] = b"xn--"; /// Labels are always stored as ASCII, unicode characters must be encoded with punycode #[derive(Clone, Eq)] pub struct Label(Rc<[u8]>); impl Label { /// These must only be ASCII, with unicode encoded to PunyCode, or other such transformation. /// /// This uses the bytes as raw ascii values, with nothing escaped on the wire. /// Generally users should use `from_str` or `from_ascii` pub fn from_raw_bytes(bytes: &[u8]) -> ProtoResult<Self> { if bytes.len() > 63 { return Err(format!("Label exceeds maximum length 63: {}", bytes.len()).into()); }; Ok(Label(Rc::from(bytes))) } /// Translates this string into IDNA safe name, encoding to punycode as necessary. pub fn from_utf8(s: &str) -> ProtoResult<Self> { if s.as_bytes() == WILDCARD { return Ok(Label::wildcard()); } // special case for SRV type records if s.starts_with('_') { return Self::from_ascii(s); } match uts46::to_ascii( s, uts46::Flags { use_std3_ascii_rules: true, transitional_processing: true, verify_dns_length: true, }, ) { Ok(puny) => Self::from_ascii(&puny), Err(e) => Err(format!("Label contains invalid characters: {:?}", e).into()), } } /// Takes the ascii string and returns a new label. /// /// This will return an Error if the label is not an ascii string pub fn from_ascii(s: &str) -> ProtoResult<Self> { if s.as_bytes() == WILDCARD { return Ok(Label::wildcard()); } if !s.is_empty() && s.is_ascii() && s.chars().take(1).all(|c| is_safe_ascii(c, true, false)) && s.chars().skip(1).all(|c| is_safe_ascii(c, false, false)) { Label::from_raw_bytes(s.as_bytes()) } else { Err(format!("Malformed label: {}", s).into()) } } /// Returns a new Label of the Wildcard, i.e. "*" pub fn wildcard() -> Self { Label(Rc::from(WILDCARD.to_vec())) } /// Converts this label to lowercase pub fn to_lowercase(&self) -> Self { // TODO: replace case conversion when (ascii_ctype #39658) stabilizes if let Some((idx, _)) = self .0 .iter() .enumerate() .find(|&(_, c)| *c != c.to_ascii_lowercase()) { let mut lower_label: Vec<u8> = self.0.to_vec(); lower_label[idx..].make_ascii_lowercase(); Label(Rc::from(lower_label)) } else { self.clone() } } /// Returns true if this label is the wildcard, '*', label pub fn is_wildcard(&self) -> bool { self.as_bytes() == WILDCARD } /// Returns the lenght in bytes of this label pub fn len(&self) -> usize { self.0.len() } /// True if the label contains no characters pub fn is_empty(&self) -> bool { self.0.is_empty() } /// Returns the raw bytes of the label, this is good for writing to the wire. /// /// See [`Display`] for presentation version (unescaped from punycode, etc) pub fn as_bytes(&self) -> &[u8] { &self.0 } /// Performs the equivalence operation disregarding case pub fn eq_ignore_ascii_case(&self, other: &Self) -> bool { self.0.eq_ignore_ascii_case(&other.0) } /// compares with the other label, ignoring case pub fn cmp_with_f<F: LabelCmp>(&self, other: &Self) -> Ordering { let s = self.0.iter(); let o = other.0.iter(); for (s, o) in s.zip(o) { match F::cmp_u8(*s, *o) { Ordering::Equal => continue, not_eq => return not_eq, } } self.0.len().cmp(&other.0.len()) } /// Performs the conversion to utf8 from IDNA as necessary, see `fmt` for more details pub fn to_utf8(&self) -> String { format!("{}", self) } /// Converts this label to safe ascii, escaping characters as necessary /// /// If this is an IDNA, punycode, label, then the xn-- prefix will be maintained as ascii pub fn
(&self) -> String { let mut ascii = String::with_capacity(self.as_bytes().len()); self.write_ascii(&mut ascii) .expect("should never fail to write a new string"); ascii } /// Writes this label to safe ascii, escaping characters as necessary pub fn write_ascii<W: Write>(&self, f: &mut W) -> Result<(), fmt::Error> { // We can't guarantee that the same input will always translate to the same output fn escape_non_ascii<W: Write>( byte: u8, f: &mut W, is_first: bool, ) -> Result<(), fmt::Error> { let to_triple_escape = |ch: u8| format!("\\{:03o}", ch); let to_single_escape = |ch: char| format!("\\{}", ch); match char::from(byte) { c if is_safe_ascii(c, is_first, true) => f.write_char(c)?, // it's not a control and is printable as well as inside the standard ascii range c if byte > b'\x20' && byte < b'\x7f' => f.write_str(&to_single_escape(c))?, _ => f.write_str(&to_triple_escape(byte))?, } Ok(()) } // traditional ascii case... let mut chars = self.as_bytes().iter(); if let Some(ch) = chars.next() { escape_non_ascii(*ch, f, true)?; } for ch in chars { escape_non_ascii(*ch, f, false)?; } Ok(()) } } impl AsRef<[u8]> for Label { fn as_ref(&self) -> &[u8] { &self.0 } } impl Borrow<[u8]> for Label { fn borrow(&self) -> &[u8] { &self.0 } } fn is_safe_ascii(c: char, is_first: bool, for_encoding: bool) -> bool { match c { c if !c.is_ascii() => false, c if c.is_alphanumeric() => true, '-' if !is_first => true, // dash is allowed '_' => true, // SRV like labels '*' if is_first => true, // wildcard '.' if !for_encoding => true, // needed to allow dots, for things like email addresses _ => false, } } impl Display for Label { /// outputs characters in a safe string manner. /// /// if the string is punycode, i.e. starts with `xn--`, otherwise it translates to a safe ascii string /// escaping characters as necessary. fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> { if self.as_bytes().starts_with(IDNA_PREFIX) { // this should never be outside the ascii codes... let label = String::from_utf8_lossy(self.borrow()); let (label, e) = uts46::to_unicode( &label, uts46::Flags { use_std3_ascii_rules: false, transitional_processing: false, verify_dns_length: false, }, ); if e.is_ok() { return f.write_str(&label); } else { debug!( "xn-- prefixed string did not translate via IDNA properly: {:?}", e ) } } // it wasn't known to be utf8 self.write_ascii(f) } } impl Debug for Label { fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> { let label = String::from_utf8_lossy(self.borrow()); f.write_str(&label) } } impl PartialEq<Label> for Label { fn eq(&self, other: &Self) -> bool { self.eq_ignore_ascii_case(other) } } impl PartialOrd<Label> for Label { fn partial_cmp(&self, other: &Label) -> Option<Ordering> { Some(self.cmp(other)) } } impl Ord for Label { fn cmp(&self, other: &Self) -> Ordering { self.cmp_with_f::<CaseInsensitive>(other) } } impl Hash for Label { fn hash<H>(&self, state: &mut H) where H: Hasher, { for b in self.borrow() as &[u8] { state.write_u8(b.to_ascii_lowercase()); } } } /// Label comparison trait for case sensitive or insensitive comparisons pub trait LabelCmp { /// this should mimic the cmp method from [`PartialOrd`] fn cmp_u8(l: u8, r: u8) -> Ordering; } /// For case sensitive comparisons pub(super) struct CaseSensitive; impl LabelCmp for CaseSensitive { fn cmp_u8(l: u8, r: u8) -> Ordering { l.cmp(&r) } } /// For case insensitive comparisons pub(super) struct CaseInsensitive; impl LabelCmp for CaseInsensitive { fn cmp_u8(l: u8, r: u8) -> Ordering { l.to_ascii_lowercase().cmp(&r.to_ascii_lowercase()) } } /// Conversion into a Label pub trait IntoLabel: Sized { /// Convert this into Label fn into_label(self: Self) -> ProtoResult<Label>; } impl<'a> IntoLabel for &'a Label { fn into_label(self: Self) -> ProtoResult<Label> { Ok(self.clone()) } } impl IntoLabel for Label { fn into_label(self: Self) -> ProtoResult<Label> { Ok(self) } } impl<'a> IntoLabel for &'a str { fn into_label(self: Self) -> ProtoResult<Label> { Label::from_utf8(self) } } impl IntoLabel for String { fn into_label(self: Self) -> ProtoResult<Label> { Label::from_utf8(&self) } } impl<'a> IntoLabel for &'a [u8] { fn into_label(self: Self) -> ProtoResult<Label> { Label::from_raw_bytes(self) } } impl IntoLabel for Vec<u8> { fn into_label(self: Self) -> ProtoResult<Label> { Label::from_raw_bytes(&self) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_encoding() { assert_eq!( Label::from_utf8("abc").unwrap(), Label::from_raw_bytes(b"abc").unwrap() ); // case insensitive, this works... assert_eq!( Label::from_utf8("ABC").unwrap(), Label::from_raw_bytes(b"ABC").unwrap() ); assert_eq!( Label::from_utf8("🦀").unwrap(), Label::from_raw_bytes(b"xn--zs9h").unwrap() ); assert_eq!( Label::from_utf8("rust-🦀-icon").unwrap(), Label::from_raw_bytes(b"xn--rust--icon-9447i").unwrap() ); assert_eq!( Label::from_ascii("ben.fry").unwrap(), Label::from_raw_bytes(b"ben.fry").unwrap() ); assert_eq!(Label::from_utf8("🦀").unwrap().to_utf8(), "🦀"); assert_eq!(Label::from_utf8("🦀").unwrap().to_ascii(), "xn--zs9h"); } #[test] fn test_decoding() { assert_eq!(Label::from_raw_bytes(b"abc").unwrap().to_string(), "abc"); assert_eq!( Label::from_raw_bytes(b"xn--zs9h").unwrap().to_string(), "🦀" ); assert_eq!( Label::from_raw_bytes(b"xn--rust--icon-9447i") .unwrap() .to_string(), "rust-🦀-icon" ); } #[test] fn test_to_lowercase() { assert_ne!(Label::from_ascii("ABC").unwrap().to_string(), "abc"); assert_ne!(Label::from_ascii("abcDEF").unwrap().to_string(), "abcdef"); assert_eq!( Label::from_ascii("ABC").unwrap().to_lowercase().to_string(), "abc" ); assert_eq!( Label::from_ascii("abcDEF") .unwrap() .to_lowercase() .to_string(), "abcdef" ); } #[test] fn test_to_cmp_f() { assert_eq!( Label::from_ascii("ABC") .unwrap() .cmp_with_f::<CaseInsensitive>(&Label::from_ascii("abc").unwrap()), Ordering::Equal ); assert_eq!( Label::from_ascii("abcDEF") .unwrap() .cmp_with_f::<CaseInsensitive>(&Label::from_ascii("abcdef").unwrap()), Ordering::Equal ); assert_eq!( Label::from_ascii("ABC") .unwrap() .cmp_with_f::<CaseSensitive>(&Label::from_ascii("abc").unwrap()), Ordering::Less ); assert_eq!( Label::from_ascii("abcDEF") .unwrap() .cmp_with_f::<CaseSensitive>(&Label::from_ascii("abcdef").unwrap()), Ordering::Less ); } #[test] fn test_partial_cmp() { let comparisons: Vec<(Label, Label)> = vec![ ( Label::from_raw_bytes(b"yljkjljk").unwrap(), Label::from_raw_bytes(b"Z").unwrap(), ), ( Label::from_raw_bytes(b"Z").unwrap(), Label::from_raw_bytes(b"zABC").unwrap(), ), ( Label::from_raw_bytes(&[1]).unwrap(), Label::from_raw_bytes(b"*").unwrap(), ), ( Label::from_raw_bytes(b"*").unwrap(), Label::from_raw_bytes(&[200]).unwrap(), ), ]; for (left, right) in comparisons { println!("left: {}, right: {}", left, right); assert_eq!(left.cmp(&right), Ordering::Less); } } #[test] fn test_is_wildcard() { assert!(Label::from_raw_bytes(b"*").unwrap().is_wildcard()); assert!(Label::from_ascii("*").unwrap().is_wildcard()); assert!(Label::from_utf8("*").unwrap().is_wildcard()); assert!(!Label::from_raw_bytes(b"abc").unwrap().is_wildcard()); } #[test] fn test_ascii_escape() { assert_eq!( Label::from_raw_bytes(&[0o200]).unwrap().to_string(), "\\200" ); assert_eq!( Label::from_raw_bytes(&[0o001]).unwrap().to_string(), "\\001" ); assert_eq!(Label::from_ascii(".").unwrap().to_ascii(), "\\."); assert_eq!( Label::from_ascii("ben.fry").unwrap().to_string(), "ben\\.fry" ); assert_eq!(Label::from_raw_bytes(&[0o200]).unwrap().to_ascii(), "\\200"); } }
to_ascii
config.js
module.exports = {
base_url: 'https://www.anbient.com', }
helper.ts
import * as sinon from "sinon"; export const sandbox = sinon.createSandbox(); afterEach(() => { sandbox.verifyAndRestore(); });
export function stubAWSAPI<T>( Service: new (...args: any[]) => T, method: keyof T, fake: sinon.SinonSpy, ) { const service = new Service(); const proto = Object.getPrototypeOf(service); return sandbox.stub(proto, method) .callsFake((...args: any[]) => { return { promise: () => Promise.resolve(fake(...args)), }; }); }
test_config_flow.py
"""Test the Hardkernel config flow.""" from unittest.mock import patch from homeassistant.components.hardkernel.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import RESULT_TYPE_ABORT, RESULT_TYPE_CREATE_ENTRY from tests.common import MockConfigEntry, MockModule, mock_integration async def test_config_flow(hass: HomeAssistant) -> None: """Test the config flow.""" mock_integration(hass, MockModule("hassio")) with patch( "homeassistant.components.hardkernel.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "system"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == "Hardkernel" assert result["data"] == {} assert result["options"] == {} assert len(mock_setup_entry.mock_calls) == 1 config_entry = hass.config_entries.async_entries(DOMAIN)[0] assert config_entry.data == {} assert config_entry.options == {} assert config_entry.title == "Hardkernel" async def
(hass: HomeAssistant) -> None: """Test only a single entry is allowed.""" mock_integration(hass, MockModule("hassio")) # Setup the config entry config_entry = MockConfigEntry( data={}, domain=DOMAIN, options={}, title="Hardkernel", ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.hardkernel.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "system"} ) assert result["type"] == RESULT_TYPE_ABORT assert result["reason"] == "single_instance_allowed" mock_setup_entry.assert_not_called()
test_config_flow_single_entry
types.rs
// Copyright 2020 The Grin Developers // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Public Types used for cuckoo-miner module use plugin::SolverParams; use std::path::PathBuf; use std::{fmt, io}; use {CuckooMinerError, PluginLibrary}; pub static SO_SUFFIX: &str = ".cuckooplugin"; /// CuckooMinerPlugin configuration #[derive(Debug, Clone, Serialize, Deserialize)] pub struct
{ /// The display name of the plugin to load pub name: String, /// The path to the file pub file: String, /// device params pub params: SolverParams, } impl PluginConfig { /// create new! pub fn new(mut plugin_dir: PathBuf, name: &str) -> Result<PluginConfig, CuckooMinerError> { plugin_dir.push(format!("{}{}", name, SO_SUFFIX).as_str()); let plugin_file_str = plugin_dir.to_str().ok_or_else(|| { CuckooMinerError::PluginNotFoundError( "Invalid plugin path. Paths must be valid unicode".to_owned(), ) })?; PluginLibrary::new(plugin_file_str).map(|plugin_library| { let params = plugin_library.get_default_params(); plugin_library.unload(); PluginConfig { name: name.to_owned(), file: plugin_file_str.to_owned(), params, } }) } } /// Error type wrapping config errors. #[derive(Debug)] #[allow(dead_code)] pub enum ConfigError { /// Error with parsing of config file ParseError(String, String), /// Error with fileIO while reading config file FileIOError(String, String), /// No file found FileNotFoundError(String), /// Error serializing config values SerializationError(String), } impl fmt::Display for ConfigError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { ConfigError::ParseError(ref file_name, ref message) => write!( f, "Error parsing configuration file at {} - {}", file_name, message ), ConfigError::FileIOError(ref file_name, ref message) => { write!(f, "{} {}", message, file_name) } ConfigError::FileNotFoundError(ref file_name) => { write!(f, "Configuration file not found: {}", file_name) } ConfigError::SerializationError(ref message) => { write!(f, "Error serializing configuration: {}", message) } } } } impl From<io::Error> for ConfigError { fn from(error: io::Error) -> ConfigError { ConfigError::FileIOError( String::from(""), format!("Error loading config file: {}", error), ) } }
PluginConfig
console.ts
// eslint-disable-next-line @typescript-eslint/no-explicit-any function format (entry: any): string { if (typeof entry === 'object') { try { return JSON.stringify(entry) } catch { // } } return entry } // eslint-disable-next-line @typescript-eslint/no-explicit-any export function log (...msgs: any[]): void { process.stdout.write(msgs.map(format).join(' ') + '\n') } // eslint-disable-next-line @typescript-eslint/no-explicit-any export function warn (...msgs: any[]): void { process.stdout.write(msgs.map(format).join(' ') + '\n') }
export function mock (): void { // work around: https://github.com/tschaub/mock-fs/issues/234 global.console = require('./console') }
17.js
(window["webpackJsonp"] = window["webpackJsonp"] || []).push([[17],{ /***/ "./node_modules/@ionic/core/dist/esm/es5/build/chunk-2f96b3d2.js": /*!***********************************************************************!*\ !*** ./node_modules/@ionic/core/dist/esm/es5/build/chunk-2f96b3d2.js ***! \***********************************************************************/ /*! exports provided: a, b, c, d */ /***/ (function(module, __webpack_exports__, __webpack_require__) { "use strict"; __webpack_require__.r(__webpack_exports__); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "a", function() { return getClassMap; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "b", function() { return openURL; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "c", function() { return createColorClasses; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "d", function() { return hostContext; }); /* harmony import */ var _polyfills_tslib_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../polyfills/tslib.js */ "./node_modules/@ionic/core/dist/esm/es5/polyfills/tslib.js"); function hostContext(t,r){return null!==r.closest(t)}function createColorClasses(t){var r;return"string"==typeof t&&t.length>0?((r={"ion-color":!0})["ion-color-"+t]=!0,r):void 0}function getClassList(t){return void 0!==t?(Array.isArray(t)?t:t.split(" ")).filter(function(t){return null!=t}).map(function(t){return t.trim()}).filter(function(t){return""!==t}):[]}function getClassMap(t){var r={};return getClassList(t).forEach(function(t){return r[t]=!0}),r}var SCHEME=/^[a-z][a-z0-9+\-.]*:/;function openURL(t,r,n,e){return _polyfills_tslib_js__WEBPACK_IMPORTED_MODULE_0__["__awaiter"](this,void 0,void 0,function(){var o;return _polyfills_tslib_js__WEBPACK_IMPORTED_MODULE_0__["__generator"](this,function(s){switch(s.label){case 0:return null==r||"#"===r[0]||SCHEME.test(r)?[3,2]:(o=t.document.querySelector("ion-router"))?(null!=n&&n.preventDefault(),[4,o.componentOnReady()]):[3,2];case 1:return s.sent(),[2,o.push(r,e)];case 2:return[2,!1]}})})} /***/ }), /***/ "./node_modules/@ionic/core/dist/esm/es5/build/chunk-6d7d2f8c.js": /*!***********************************************************************!*\ !*** ./node_modules/@ionic/core/dist/esm/es5/build/chunk-6d7d2f8c.js ***! \***********************************************************************/ /*! exports provided: a, b, c, d, e, f, g, h, i, j, k */ /***/ (function(module, __webpack_exports__, __webpack_require__) { "use strict"; __webpack_require__.r(__webpack_exports__); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "a", function() { return rIC; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "b", function() { return now; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "c", function() { return hasShadowDom; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "d", function() { return findItemLabel; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "e", function() { return renderHiddenInput; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "f", function() { return debounceEvent; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "g", function() { return isEndSide; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "h", function() { return assert; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "i", function() { return clamp; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "j", function() { return debounce; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "k", function() { return pointerCoord; }); function rIC(e){"requestIdleCallback"in window?window.requestIdleCallback(e):setTimeout(e,32)}function hasShadowDom(e){return!!e.shadowRoot&&!!e.attachShadow}function findItemLabel(e){var n=e.closest("ion-item");return n?n.querySelector("ion-label"):null}function renderHiddenInput(e,n,t,a,r){if(e||hasShadowDom(n)){var o=n.querySelector("input.aux-input");o||((o=n.ownerDocument.createElement("input")).type="hidden",o.classList.add("aux-input"),n.appendChild(o)),o.disabled=r,o.name=t,o.value=a||""}}function clamp(e,n,t){return Math.max(e,Math.min(n,t))}function assert(e,n){if(!e){var t="ASSERT: "+n;throw console.error(t),new Error(t)}}function now(e){return e.timeStamp||Date.now()}function pointerCoord(e){if(e){var n=e.changedTouches;if(n&&n.length>0){var t=n[0];return{x:t.clientX,y:t.clientY}}if(void 0!==e.pageX)return{x:e.pageX,y:e.pageY}}return{x:0,y:0}}function isEndSide(e,n){var t="rtl"===e.document.dir;switch(n){case"start":return t;case"end":return!t;default:throw new Error('"'+n+'" is not a valid value for [side]. Use "start" or "end" instead.')}}function debounceEvent(e,n){var t=e._original||e;return{_original:e,emit:debounce(t.emit.bind(t),n)}}function debounce(e,n){var t;return void 0===n&&(n=0),function(){for(var a=[],r=0;r<arguments.length;r++)a[r]=arguments[r];clearTimeout(t),t=setTimeout.apply(void 0,[e,n].concat(a))}} /***/ }), /***/ "./node_modules/@ionic/core/dist/esm/es5/build/ek05jvfc.sc.entry.js": /*!**************************************************************************!*\ !*** ./node_modules/@ionic/core/dist/esm/es5/build/ek05jvfc.sc.entry.js ***! \**************************************************************************/ /*! exports provided: IonSelect, IonSelectOption, IonSelectPopover */ /***/ (function(module, __webpack_exports__, __webpack_require__) { "use strict"; __webpack_require__.r(__webpack_exports__); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "IonSelect", function() { return Select; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "IonSelectOption", function() { return SelectOption; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "IonSelectPopover", function() { return SelectPopover; }); /* harmony import */ var _polyfills_tslib_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../polyfills/tslib.js */ "./node_modules/@ionic/core/dist/esm/es5/polyfills/tslib.js"); /* harmony import */ var _ionic_core_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../ionic.core.js */ "./node_modules/@ionic/core/dist/esm/es5/ionic.core.js"); /* harmony import */ var _chunk_2f96b3d2_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./chunk-2f96b3d2.js */ "./node_modules/@ionic/core/dist/esm/es5/build/chunk-2f96b3d2.js"); /* harmony import */ var _chunk_6d7d2f8c_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./chunk-6d7d2f8c.js */ "./node_modules/@ionic/core/dist/esm/es5/build/chunk-6d7d2f8c.js"); var Select=function(){function e(){var e=this;this.childOpts=[],this.inputId="ion-sel-"+selectIds++,this.didInit=!1,this.isExpanded=!1,this.disabled=!1,this.cancelText="Cancel",this.okText="OK",this.name=this.inputId,this.multiple=!1,this.interface="alert",this.interfaceOptions={},this.onFocus=function(){e.ionFocus.emit()},this.onBlur=function(){e.ionBlur.emit()}}return e.prototype.disabledChanged=function(){this.emitStyle()},e.prototype.valueChanged=function(){this.didInit&&(this.updateOptions(),this.ionChange.emit({value:this.value}),this.emitStyle())},e.prototype.selectOptionChanged=function(){return _polyfills_tslib_js__WEBPACK_IMPORTED_MODULE_0__["__awaiter"](this,void 0,void 0,function(){return _polyfills_tslib_js__WEBPACK_IMPORTED_MODULE_0__["__generator"](this,function(e){switch(e.label){case 0:return[4,this.loadOptions()];case 1:return e.sent(),this.didInit&&(this.updateOptions(),this.updateOverlayOptions(),this.emitStyle(),void 0!==this.value&&this.el.forceUpdate()),[2]}})})},e.prototype.onClick=function(e){this.setFocus(),this.open(e)},e.prototype.componentDidLoad=function(){return _polyfills_tslib_js__WEBPACK_IMPORTED_MODULE_0__["__awaiter"](this,void 0,void 0,function(){var e;return _polyfills_tslib_js__WEBPACK_IMPORTED_MODULE_0__["__generator"](this,function(t){switch(t.label){case 0:return[4,this.loadOptions()];case 1:return t.sent(),void 0===this.value&&(this.multiple?(e=this.childOpts.filter(function(e){return e.selected}),this.value=e.map(function(e){return e.value})):(e=this.childOpts.find(function(e){return e.selected}))&&(this.value=e.value)),this.updateOptions(),this.emitStyle(),this.el.forceUpdate(),this.didInit=!0,[2]}})})},e.prototype.open=function(e){return _polyfills_tslib_js__WEBPACK_IMPORTED_MODULE_0__["__awaiter"](this,void 0,void 0,function(){var t,n,i=this;return _polyfills_tslib_js__WEBPACK_IMPORTED_MODULE_0__["__generator"](this,function(o){switch(o.label){case 0:return this.disabled||this.isExpanded?[2,void 0]:(n=this,[4,this.createOverlay(e)]);case 1:return t=n.overlay=o.sent(),this.isExpanded=!0,t.onDidDismiss().then(function(){i.overlay=void 0,i.isExpanded=!1,i.setFocus()}),[4,t.present()];case 2:return o.sent(),[2,t]}})})},e.prototype.createOverlay=function(e){var t=this.interface;return"action-sheet"!==t&&"popover"!==t||!this.multiple||(console.warn('Select interface cannot be "'+t+'" with a multi-value select. Using the "alert" interface instead.'),t="alert"),"popover"!==t||e||(console.warn('Select interface cannot be a "popover" without passing an event. Using the "alert" interface instead.'),t="alert"),"popover"===t?this.openPopover(e):"action-sheet"===t?this.openActionSheet():this.openAlert()},e.prototype.updateOverlayOptions=function(){if(this.overlay){var e=this.overlay;switch(this.interface){case"action-sheet":e.buttons=this.createActionSheetButtons(this.childOpts);break;case"popover":var t=e.querySelector("ion-select-popover");t&&(t.options=this.createPopoverOptions(this.childOpts));break;default:e.inputs=this.createAlertInputs(this.childOpts,this.multiple?"checkbox":"radio")}}},e.prototype.createActionSheetButtons=function(e){var t=this,n=e.map(function(e){return{role:e.selected?"selected":"",text:e.textContent,handler:function(){t.value=e.value}}});return n.push({text:this.cancelText,role:"cancel",handler:function(){t.ionCancel.emit()}}),n},e.prototype.createAlertInputs=function(e,t){return e.map(function(e){return{type:t,label:e.textContent,value:e.value,checked:e.selected,disabled:e.disabled}})},e.prototype.createPopoverOptions=function(e){var t=this;return e.map(function(e){return{text:e.textContent,value:e.value,checked:e.selected,disabled:e.disabled,handler:function(){t.value=e.value,t.close()}}})},e.prototype.openPopover=function(e){return _polyfills_tslib_js__WEBPACK_IMPORTED_MODULE_0__["__awaiter"](this,void 0,void 0,function(){var t,n;return _polyfills_tslib_js__WEBPACK_IMPORTED_MODULE_0__["__generator"](this,function(i){return t=this.interfaceOptions,n=Object.assign({mode:this.mode},t,{component:"ion-select-popover",cssClass:["select-popover",t.cssClass],event:e,componentProps:{header:t.header,subHeader:t.subHeader,message:t.message,value:this.value,options:this.createPopoverOptions(this.childOpts)}}),[2,this.popoverCtrl.create(n)]})})},e.prototype.openActionSheet=function(){return _polyfills_tslib_js__WEBPACK_IMPORTED_MODULE_0__["__awaiter"](this,void 0,void 0,function(){var e,t;return _polyfills_tslib_js__WEBPACK_IMPORTED_MODULE_0__["__generator"](this,function(n){return e=this.interfaceOptions,t=Object.assign({mode:this.mode},e,{buttons:this.createActionSheetButtons(this.childOpts),cssClass:["select-action-sheet",e.cssClass]}),[2,this.actionSheetCtrl.create(t)]})})},e.prototype.openAlert=function(){return _polyfills_tslib_js__WEBPACK_IMPORTED_MODULE_0__["__awaiter"](this,void 0,void 0,function(){var e,t,n,i,o=this;return _polyfills_tslib_js__WEBPACK_IMPORTED_MODULE_0__["__generator"](this,function(r){return e=this.getLabel(),t=e?e.textContent:null,n=this.interfaceOptions,i=Object.assign({mode:this.mode},n,{header:n.header?n.header:t,inputs:this.createAlertInputs(this.childOpts,this.multiple?"checkbox":"radio"),buttons:[{text:this.cancelText,role:"cancel",handler:function(){o.ionCancel.emit()}},{text:this.okText,handler:function(e){o.value=e}}],cssClass:["select-alert",n.cssClass,this.multiple?"multiple-select-alert":"single-select-alert"]}),[2,this.alertCtrl.create(i)]})})},e.prototype.close=function(){return this.overlay?this.overlay.dismiss():Promise.resolve(!1)},e.prototype.loadOptions=function(){return _polyfills_tslib_js__WEBPACK_IMPORTED_MODULE_0__["__awaiter"](this,void 0,void 0,function(){var e;return _polyfills_tslib_js__WEBPACK_IMPORTED_MODULE_0__["__generator"](this,function(t){switch(t.label){case 0:return e=this,[4,Promise.all(Array.from(this.el.querySelectorAll("ion-select-option")).map(function(e){return e.componentOnReady()}))];case 1:return e.childOpts=t.sent(),[2]}})})},e.prototype.updateOptions=function(){for(var e=!0,t=0,n=this.childOpts;t<n.length;t++){var i=n[t],o=e&&isOptionSelected(this.value,i.value,this.compareWith);i.selected=o,o&&!this.multiple&&(e=!1)}},e.prototype.getLabel=function(){return Object(_chunk_6d7d2f8c_js__WEBPACK_IMPORTED_MODULE_3__["d"])(this.el)},e.prototype.hasValue=function(){return""!==this.getText()},e.prototype.getText=function(){var e=this.selectedText;return null!=e&&""!==e?e:generateText(this.childOpts,this.value,this.compareWith)},e.prototype.setFocus=function(){this.buttonEl&&this.buttonEl.focus()},e.prototype.emitStyle=function(){this.ionStyle.emit({interactive:!0,select:!0,"has-placeholder":null!=this.placeholder,"has-value":this.hasValue(),"interactive-disabled":this.disabled,"select-disabled":this.disabled})},e.prototype.hostData=function(){var e,t=this.inputId+"-lbl",n=Object(_chunk_6d7d2f8c_js__WEBPACK_IMPORTED_MODULE_3__["d"])(this.el);return n&&(n.id=t),{role:"combobox","aria-disabled":this.disabled?"true":null,"aria-expanded":""+this.isExpanded,"aria-haspopup":"dialog","aria-labelledby":t,class:(e={},e[""+this.mode]=!0,e["in-item"]=Object(_chunk_2f96b3d2_js__WEBPACK_IMPORTED_MODULE_2__["d"])("ion-item",this.el),e["select-disabled"]=this.disabled,e)}},e.prototype.render=function(){var e=this;Object(_chunk_6d7d2f8c_js__WEBPACK_IMPORTED_MODULE_3__["e"])(!0,this.el,this.name,parseValue(this.value),this.disabled);var t=this.inputId+"-lbl",n=Object(_chunk_6d7d2f8c_js__WEBPACK_IMPORTED_MODULE_3__["d"])(this.el);n&&(n.id=t);var i=!1,o=this.getText();return""===o&&null!=this.placeholder&&(o=this.placeholder,i=!0),[Object(_ionic_core_js__WEBPACK_IMPORTED_MODULE_1__["h"])("div",{class:{"select-text":!0,"select-placeholder":i}},o),Object(_ionic_core_js__WEBPACK_IMPORTED_MODULE_1__["h"])("div",{class:"select-icon",role:"presentation"},Object(_ionic_core_js__WEBPACK_IMPORTED_MODULE_1__["h"])("div",{class:"select-icon-inner"})),Object(_ionic_core_js__WEBPACK_IMPORTED_MODULE_1__["h"])("button",{type:"button",onFocus:this.onFocus,onBlur:this.onBlur,disabled:this.disabled,ref:function(t){return e.buttonEl=t}})]},Object.defineProperty(e,"is",{get:function(){return"ion-select"},enumerable:!0,configurable:!0}),Object.defineProperty(e,"encapsulation",{get:function(){return"shadow"},enumerable:!0,configurable:!0}),Object.defineProperty(e,"properties",{get:function(){return{actionSheetCtrl:{connect:"ion-action-sheet-controller"},alertCtrl:{connect:"ion-alert-controller"},cancelText:{type:String,attr:"cancel-text"},compareWith:{type:String,attr:"compare-with"},disabled:{type:Boolean,attr:"disabled",watchCallbacks:["disabledChanged"]},el:{elementRef:!0},interface:{type:String,attr:"interface"},interfaceOptions:{type:"Any",attr:"interface-options"},isExpanded:{state:!0},mode:{type:String,attr:"mode"},multiple:{type:Boolean,attr:"multiple"},name:{type:String,attr:"name"},okText:{type:String,attr:"ok-text"},open:{method:!0},placeholder:{type:String,attr:"placeholder"},popoverCtrl:{connect:"ion-popover-controller"},selectedText:{type:String,attr:"selected-text"},value:{type:"Any",attr:"value",mutable:!0,watchCallbacks:["valueChanged"]}}},enumerable:!0,configurable:!0}),Object.defineProperty(e,"events",{get:function(){return[{name:"ionChange",method:"ionChange",bubbles:!0,cancelable:!0,composed:!0},{name:"ionCancel",method:"ionCancel",bubbles:!0,cancelable:!0,composed:!0},{name:"ionFocus",method:"ionFocus",bubbles:!0,cancelable:!0,composed:!0},{name:"ionBlur",method:"ionBlur",bubbles:!0,cancelable:!0,composed:!0},{name:"ionStyle",method:"ionStyle",bubbles:!0,cancelable:!0,composed:!0}]},enumerable:!0,configurable:!0}),Object.defineProperty(e,"listeners",{get:function(){return[{name:"ionSelectOptionDidLoad",method:"selectOptionChanged"},{name:"ionSelectOptionDidUnload",method:"selectOptionChanged"},{name:"click",method:"onClick"}]},enumerable:!0,configurable:!0}),Object.defineProperty(e,"style",{get:function(){return".sc-ion-select-md-h{padding-left:var(--padding-start);padding-right:var(--padding-end);padding-top:var(--padding-top);padding-bottom:var(--padding-bottom);display:-ms-flexbox;display:flex;position:relative;font-family:var(--ion-font-family,inherit);overflow:hidden;z-index:2}\@supports ((-webkit-margin-start:0) or (margin-inline-start:0)) or (-webkit-margin-start:0){.sc-ion-select-md-h{padding-left:unset;padding-right:unset;-webkit-padding-start:var(--padding-start);padding-inline-start:var(--padding-start);-webkit-padding-end:var(--padding-end);padding-inline-end:var(--padding-end)}}.in-item.sc-ion-select-md-h{position:static;max-width:45%}.select-disabled.sc-ion-select-md-h{opacity:.4;pointer-events:none}.ion-focused.sc-ion-select-md-h button.sc-ion-select-md{border:2px solid #5e9ed6}.select-placeholder.sc-ion-select-md{color:currentColor;opacity:.33}button.sc-ion-select-md{left:0;top:0;margin-left:0;margin-right:0;margin-top:0;margin-bottom:0;position:absolute;width:100%;height:100%;border:0;background:transparent;cursor:pointer;-webkit-appearance:none;-moz-appearance:none;appearance:none;outline:none}[dir=rtl].sc-ion-select-md-h button.sc-ion-select-md, [dir=rtl] .sc-ion-select-md-h button.sc-ion-select-md, [dir=rtl].sc-ion-select-md button.sc-ion-select-md{left:unset;right:unset;right:0}button.sc-ion-select-md::-moz-focus-inner{border:0}.select-icon.sc-ion-select-md{position:relative}.select-text.sc-ion-select-md{-ms-flex:1;flex:1;min-width:16px;font-size:inherit;text-overflow:ellipsis;white-space:nowrap;overflow:hidden}.select-icon-inner.sc-ion-select-md{left:5px;top:50%;margin-top:-3px;position:absolute;width:0;height:0;border-top:5px solid;border-right:5px solid transparent;border-left:5px solid transparent;color:currentColor;opacity:.33;pointer-events:none}[dir=rtl].sc-ion-select-md-h .select-icon-inner.sc-ion-select-md, [dir=rtl] .sc-ion-select-md-h .select-icon-inner.sc-ion-select-md, [dir=rtl].sc-ion-select-md .select-icon-inner.sc-ion-select-md{left:unset;right:unset;right:5px}.sc-ion-select-md-h{--padding-top:10px;--padding-end:0;--padding-bottom:11px;--padding-start:16px}.select-icon.sc-ion-select-md{width:19px;height:19px}"},enumerable:!0,configurable:!0}),Object.defineProperty(e,"styleMode",{get:function(){return"md"},enumerable:!0,configurable:!0}),e}();function parseValue(e){if(null!=e)return Array.isArray(e)?e.join(","):e.toString()}function isOptionSelected(e,t,n){return void 0!==e&&(Array.isArray(e)?e.some(function(e){return compareOptions(e,t,n)}):compareOptions(e,t,n))}function compareOptions(e,t,n){return"function"==typeof n?n(e,t):"string"==typeof n?e[n]===t[n]:e===t}function generateText(e,t,n){return void 0===t?"":Array.isArray(t)?t.map(function(t){return textForValue(e,t,n)}).filter(function(e){return null!==e}).join(", "):textForValue(e,t,n)||""}function textForValue(e,t,n){var i=e.find(function(e){return compareOptions(e.value,t,n)});return i?i.textContent:null}var selectIds=0,SelectOption=function(){function e(){this.inputId="ion-selopt-"+selectOptionIds++,this.disabled=!1,this.selected=!1}return e.prototype.componentWillLoad=function(){void 0===this.value&&(this.value=this.el.textContent||"")},e.prototype.componentDidLoad=function(){this.ionSelectOptionDidLoad.emit()},e.prototype.componentDidUnload=function(){this.ionSelectOptionDidUnload.emit()},e.prototype.hostData=function(){var e;return{role:"option",id:this.inputId,class:(e={},e[""+this.mode]=!0,e)}},Object.defineProperty(e,"is",{get:function(){return"ion-select-option"},enumerable:!0,configurable:!0}),Object.defineProperty(e,"encapsulation",{get:function(){return"shadow"},enumerable:!0,configurable:!0}),Object.defineProperty(e,"properties",{get:function(){return{disabled:{type:Boolean,attr:"disabled"},el:{elementRef:!0},selected:{type:Boolean,attr:"selected"},value:{type:"Any",attr:"value",mutable:!0}}},enumerable:!0,configurable:!0}),Object.defineProperty(e,"events",{get:function(){return[{name:"ionSelectOptionDidLoad",method:"ionSelectOptionDidLoad",bubbles:!0,cancelable:!0,composed:!0},{name:"ionSelectOptionDidUnload",method:"ionSelectOptionDidUnload",bubbles:!0,cancelable:!0,composed:!0}]},enumerable:!0,configurable:!0}),Object.defineProperty(e,"style",{get:function(){return".sc-ion-select-option-h{display:none}"},enumerable:!0,configurable:!0}),e}(),selectOptionIds=0,SelectPopover=function(){function e(){this.options=[]}return e.prototype.onSelect=function(e){var t=this.options.find(function(t){return t.value===e.target.value});t&&t.handler&&t.handler()},e.prototype.hostData=function(){var e;return{class:(e={},e[""+this.mode]=!0,e)}},e.prototype.render=function(){return Object(_ionic_core_js__WEBPACK_IMPORTED_MODULE_1__["h"])("ion-list",null,void 0!==this.header&&Object(_ionic_core_js__WEBPACK_IMPORTED_MODULE_1__["h"])("ion-list-header",null,this.header),(void 0!==this.subHeader||void 0!==this.message)&&Object(_ionic_core_js__WEBPACK_IMPORTED_MODULE_1__["h"])("ion-item",null,Object(_ionic_core_js__WEBPACK_IMPORTED_MODULE_1__["h"])("ion-label",{"text-wrap":!0},void 0!==this.subHeader&&Object(_ionic_core_js__WEBPACK_IMPORTED_MODULE_1__["h"])("h3",null,this.subHeader),void 0!==this.message&&Object(_ionic_core_js__WEBPACK_IMPORTED_MODULE_1__["h"])("p",null,this.message))),Object(_ionic_core_js__WEBPACK_IMPORTED_MODULE_1__["h"])("ion-radio-group",null,this.options.map(function(e){return Object(_ionic_core_js__WEBPACK_IMPORTED_MODULE_1__["h"])("ion-item",null,Object(_ionic_core_js__WEBPACK_IMPORTED_MODULE_1__["h"])("ion-label",null,e.text),Object(_ionic_core_js__WEBPACK_IMPORTED_MODULE_1__["h"])("ion-radio",{checked:e.checked,value:e.value,disabled:e.disabled}))})))},Object.defineProperty(e,"is",{get:function(){return"ion-select-popover"},enumerable:!0,configurable:!0}),Object.defineProperty(e,"encapsulation",{get:function(){return"scoped"},enumerable:!0,configurable:!0}),Object.defineProperty(e,"properties",{get:function(){return{header:{type:String,attr:"header"},message:{type:String,attr:"message"},options:{type:"Any",attr:"options"},subHeader:{type:String,attr:"sub-header"}}},enumerable:!0,configurable:!0}),Object.defineProperty(e,"listeners",{get:function(){return[{name:"ionSelect",method:"onSelect"}]},enumerable:!0,configurable:!0}),Object.defineProperty(e,"style",{get:function(){return".sc-ion-select-popover-h ion-list.sc-ion-select-popover{margin-left:0;margin-right:0;margin-top:-1px;margin-bottom:-1px}.sc-ion-select-popover-h ion-label.sc-ion-select-popover, .sc-ion-select-popover-h ion-list-header.sc-ion-select-popover{margin-left:0;margin-right:0;margin-top:0;margin-bottom:0}"},enumerable:!0,configurable:!0}),e}(); /***/ }), /***/ "./node_modules/@ionic/core/dist/esm/es5/polyfills/tslib.js": /*!******************************************************************!*\ !*** ./node_modules/@ionic/core/dist/esm/es5/polyfills/tslib.js ***! \******************************************************************/ /*! exports provided: __extends, __assign, __rest, __decorate, __param, __metadata, __awaiter, __generator, __exportStar, __values, __read, __spread, __await, __makeTemplateObject, __importStar, __importDefault */ /***/ (function(module, __webpack_exports__, __webpack_require__) { "use strict"; __webpack_require__.r(__webpack_exports__); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "__extends", function() { return __extends; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "__assign", function() { return __assign; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "__rest", function() { return __rest; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "__decorate", function() { return __decorate; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "__param", function() { return __param; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "__metadata", function() { return __metadata; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "__awaiter", function() { return __awaiter; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "__generator", function() { return __generator; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "__exportStar", function() { return __exportStar; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "__values", function() { return __values; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "__read", function() { return __read; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "__spread", function() { return __spread; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "__await", function() { return __await; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "__makeTemplateObject", function() { return __makeTemplateObject; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "__importStar", function() { return __importStar; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "__importDefault", function() { return __importDefault; }); // REV: 9dd9aa322c893e5e0b3f1609b1126314ccf37bbb /*! ***************************************************************************** Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, MERCHANTABLITY OR NON-INFRINGEMENT. See the Apache Version 2.0 License for specific language governing permissions and limitations under the License. ***************************************************************************** */ /* global Reflect, Promise */ var extendStatics = function(d, b) { extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return extendStatics(d, b); }; function __extends(d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); } var __assign = function() { __assign = Object.assign || function __assign(t) { for (var s, i = 1, n = arguments.length; i < n; i++) { s = arguments[i]; for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; } return t; } return __assign.apply(this, arguments); } function __rest(s, e) { var t = {}; for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) t[p] = s[p]; if (s != null && typeof Object.getOwnPropertySymbols === "function") for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) if (e.indexOf(p[i]) < 0) t[p[i]] = s[p[i]]; return t; } function __decorate(decorators, target, key, desc) { var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; return c > 3 && r && Object.defineProperty(target, key, r), r; } function __param(paramIndex, decorator) { return function (target, key) { decorator(target, key, paramIndex); } } function __metadata(metadataKey, metadataValue) { if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); } function __awaiter(thisArg, _arguments, P, generator) { return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function
(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); } function __generator(thisArg, body) { var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; function verb(n) { return function (v) { return step([n, v]); }; } function step(op) { if (f) throw new TypeError("Generator is already executing."); while (_) try { if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; if (y = 0, t) op = [op[0] & 2, t.value]; switch (op[0]) { case 0: case 1: t = op; break; case 4: _.label++; return { value: op[1], done: false }; case 5: _.label++; y = op[1]; op = [0]; continue; case 7: op = _.ops.pop(); _.trys.pop(); continue; default: if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } if (t[2]) _.ops.pop(); _.trys.pop(); continue; } op = body.call(thisArg, _); } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; } } function __exportStar(m, exports) { for (var p in m) if (!exports.hasOwnProperty(p)) exports[p] = m[p]; } function __values(o) { var m = typeof Symbol === "function" && o[Symbol.iterator], i = 0; if (m) return m.call(o); return { next: function () { if (o && i >= o.length) o = void 0; return { value: o && o[i++], done: !o }; } }; } function __read(o, n) { var m = typeof Symbol === "function" && o[Symbol.iterator]; if (!m) return o; var i = m.call(o), r, ar = [], e; try { while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); } catch (error) { e = { error: error }; } finally { try { if (r && !r.done && (m = i["return"])) m.call(i); } finally { if (e) throw e.error; } } return ar; } function __spread() { for (var ar = [], i = 0; i < arguments.length; i++) ar = ar.concat(__read(arguments[i])); return ar; } function __await(v) { return this instanceof __await ? (this.v = v, this) : new __await(v); } function __makeTemplateObject(cooked, raw) { if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } return cooked; }; function __importStar(mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; result.default = mod; return result; } function __importDefault(mod) { return (mod && mod.__esModule) ? mod : { default: mod }; } /***/ }) }]); //# sourceMappingURL=17.js.map
rejected
solution.go
package leetcode // Binary Search func findPeakElement(nums []int) int { if len(nums) == 1 { return 0 } start := 0 end := len(nums) - 1 for start < end-1 { mid := (start + end) / 2 if nums[mid] < nums[mid-1] { end = mid } else if nums[mid] < nums[mid+1]
else { return mid } } if nums[start] < nums[end] { return end } return start }
{ start = mid }
ExtractSantini.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Wed May 29 16:16:57 2019 @author: rakshit """ import os import cv2 import argparse import matplotlib import numpy as np import deepdish as dd import scipy.io as scio print('Extracting Santini') parser = argparse.ArgumentParser() parser.add_argument('--noDisp', help='Specify flag to display labelled images', type=int) parser.add_argument('--path2ds', help='Path to dataset', type=str) args = parser.parse_args() if args.noDisp: noDisp = True print('No graphics') else: noDisp = False print('Showing figures') gui_env = ['Qt5Agg','WXAgg','TKAgg','GTKAgg'] for gui in gui_env: try: print("testing: {}".format(gui)) matplotlib.use(gui,warn=False, force=True) from matplotlib import pyplot as plt break except: continue print("Using: {}".format(matplotlib.get_backend())) plt.ion() args.path2ds = '/media/rakshit/tank/Dataset' PATH_DIR = os.path.join(args.path2ds, 'Santini') PATH_DS = os.path.join(args.path2ds, 'All') PATH_MASTER = os.path.join(args.path2ds, 'MasterKey') list_ds = ['1', '2', '3', '4', '5', '6'] sc = (640.0/384.0) Image_counter = 0.0 ds_num = 24 def mypause(interval): backend = plt.rcParams['backend'] if backend in matplotlib.rcsetup.interactive_bk: figManager = matplotlib._pylab_helpers.Gcf.get_active() if figManager is not None: canvas = figManager.canvas if canvas.figure.stale: canvas.draw() canvas.start_event_loop(interval) return def fix_pupil_loc(p, res): # res: [H, W] p[0] = 0.5*p[0] p[1] = res[0] - 0.5*p[1] return p def readFormattedText(path2file, ignoreLines): data = [] count = 0 f = open(path2file, 'r') for line in f: d = [int(d) for d in line.split() if d.isdigit()] count = count + 1 if d and count > ignoreLines: data.append(d) f.close() return data for name in list_ds: # Ignore the first row and column. # Columns: [index, p_x, p_y] opts = os.listdir(os.path.join(PATH_DIR, name)) for subdir in opts:
PATH_DATA = os.path.join(PATH_DIR, name, subdir) # Read pupil data Path2text = os.path.join(PATH_DATA, 'journal-{:04d}.txt'.format(int(subdir)-1)) Path2vid = os.path.join(PATH_DATA, 'eye-{:04d}-0000.avi'.format(int(subdir)-1)) PupilData = np.array(readFormattedText(Path2text, 2)) VidObj = cv2.VideoCapture(Path2vid) keydict = {k:[] for k in ['pupil_loc', 'archive', 'data_type', 'resolution', 'dataset', 'subset']} # Generate empty dictionaries keydict['data_type'] = 0 # Only pupil center available keydict['resolution'] = [] keydict['dataset'] = 'Santini' keydict['subset'] = '{}-{}'.format(name, subdir) # Create an empty dictionary as per agreed structure Data = {k:[] for k in ['Images', 'Info', 'Masks', 'Masks_noSkin', 'Fits', 'pupil_loc']} Data['Fits'] = {k:[] for k in ['pupil', 'pupil_norm', 'pupil_phi', 'iris', 'iris_norm', 'iris_phi']} if not noDisp: fig, plts = plt.subplots(1,1) fr_num = 0 while(VidObj.isOpened()): ret, I = VidObj.read() if ret == True: I = cv2.cvtColor(I, cv2.COLOR_BGR2GRAY) I = cv2.resize(I, (640, 480), cv2.INTER_LANCZOS4) Data['Images'].append(I) keydict['resolution'].append(I.shape) keydict['archive'].append(ds_num) pupil_loc = fix_pupil_loc(PupilData[fr_num, 10:12]*sc, I.shape) keydict['pupil_loc'].append(pupil_loc) Data['pupil_loc'].append(pupil_loc) Data['Info'].append(str(fr_num)) fr_num+=1 Image_counter+=1 if not noDisp: if fr_num == 1: cI = plts.imshow(I) cX = plts.scatter(pupil_loc[0], pupil_loc[1]) plt.show() plt.pause(.01) else: newLoc = np.array([pupil_loc[0], pupil_loc[1]]) cI.set_data(I) cX.set_offsets(newLoc) mypause(0.01) else: # No more frames to load break Data['Images'] = np.stack(Data['Images'], axis=0) Data['pupil_loc'] = np.stack(Data['pupil_loc'], axis=0) keydict['pupil_loc'] = np.stack(keydict['pupil_loc'], axis=0) keydict['resolution'] = np.stack(keydict['resolution'], axis=0) keydict['archive'] = np.stack(keydict['archive'], axis=0) # Save out data dd.io.save(os.path.join(PATH_DS, str(ds_num)+'.h5'), Data) scio.savemat(os.path.join(PATH_MASTER, str(ds_num)), keydict, appendmat=True) ds_num=ds_num+1
invite-user-button.component.ts
import { Component, EventEmitter, Inject, Input, OnInit, Optional } from '@angular/core'; import { NgSelectComponent } from "@ng-select/ng-select"; import { OpEditingPortalSchemaToken } from "core-app/modules/fields/edit/edit-field.component"; import { IFieldSchema } from "core-app/modules/fields/field.base"; import { SelectEditFieldComponent } from "core-app/modules/fields/edit/field-types/select-edit-field.component"; import { MultiSelectEditFieldComponent } from "core-app/modules/fields/edit/field-types/multi-select-edit-field.component"; import { I18nService } from "core-app/modules/common/i18n/i18n.service"; import { PermissionsService } from "core-app/core/services/permissions/permissions.service"; import { OpInviteUserModalService } from "core-app/modules/invite-user-modal/invite-user-modal.service"; @Component({ selector: 'op-invite-user-button', templateUrl: './invite-user-button.component.html', styleUrls: ['./invite-user-button.component.sass'] }) export class
implements OnInit { /* This component does not provide an output, because both primary usecases were in places where the button was * destroyed before the modal closed, causing the data from the modal to never arrive at the parent. * If you want to do something with the output from the modal that is opened, use the OpInviteUserModalService * and subscribe to the `close` event there. */ get showButton() { const showButton = this.schema?.type === 'User' && this.canInviteUsersToProject && (this.selectEditFieldComponent || this.multiSelectEditFieldComponent); return showButton; } get parentIsMultiSelectEditFieldComponent() { return !!this.multiSelectEditFieldComponent; } text = { button: this.I18n.t('js.invite_user_modal.invite'), }; canInviteUsersToProject:boolean; constructor( readonly I18n:I18nService, readonly opInviteUserModalService:OpInviteUserModalService, readonly permissionsService:PermissionsService, readonly ngSelectComponent:NgSelectComponent, @Optional() readonly selectEditFieldComponent:SelectEditFieldComponent, @Optional() readonly multiSelectEditFieldComponent:MultiSelectEditFieldComponent, @Inject(OpEditingPortalSchemaToken) public schema:IFieldSchema, ) {} ngOnInit():void { this.permissionsService .canInviteUsersToProject() .subscribe(canInviteUsersToProject => this.canInviteUsersToProject = canInviteUsersToProject); } onAddNewClick($event:Event) { $event.stopPropagation(); this.opInviteUserModalService.open(); this.ngSelectComponent.close(); } }
InviteUserButtonComponent
__init__.py
import os import base64 from simpleutil.utils import digestutils from goperation.filemanager import LocalFile from goperation.manager.rpc.agent.application.taskflow.middleware import EntityMiddleware from goperation.manager.rpc.agent.application.taskflow.database import Database from goperation.manager.rpc.agent.application.taskflow.application import AppUpgradeFile from goperation.manager.rpc.agent.application.taskflow.application import AppLocalBackupFile from gogamechen3.api import gfile class GogameMiddle(EntityMiddleware): def __init__(self, entity, endpoint, objtype): super(GogameMiddle, self).__init__(entity, endpoint) self.objtype = objtype self.databases = {} self.waiter = None class GogameDatabase(Database): def __init__(self, **kwargs): super(GogameDatabase, self).__init__(**kwargs) self.database_id = kwargs.get('database_id') self.source = kwargs.get('source') self.rosource = kwargs.get('rosource') self.subtype = kwargs.get('subtype') self.ro_user = kwargs.get('ro_user') self.ro_passwd = kwargs.get('ro_passwd') class GogameAppFile(AppUpgradeFile): def __init__(self, source, objtype, revertable=False, rollback=False, stream=None): super(GogameAppFile, self).__init__(source, revertable, rollback) self.objtype = objtype self.stream = stream def post_check(self): gfile.check(self.objtype, self.file) def clean(self): if self.stream: os.remove(self.file) def prepare(self, middleware=None, timeout=None): if self.stream: if len(self.stream) > 5000: raise ValueError("Strem over size") file_path = os.path.join('/tmp', '%s.zip' % self.source) data = base64.b64decode(self.stream) if digestutils.strmd5(data) != self.source: raise ValueError('Md5 not match') with open(file_path, 'wb') as f: data = base64.b64decode(self.stream) f.write(data) self.localfile = LocalFile(file_path, self.source, len(data)) else:
try: self.post_check() except Exception: localfile = self.localfile self.localfile = None if self.stream: os.remove(localfile.path) else: middleware.filemanager.delete(self.source) raise class GogameAppBackupFile(AppLocalBackupFile): def __init__(self, destination, objtype): super(GogameAppBackupFile, self).__init__(destination, exclude=gfile.CompressConfAndLogExcluder(), topdir=False, native=True) self.objtype = objtype def post_check(self): gfile.check(self.objtype, self.file)
self.localfile = middleware.filemanager.get(self.source, download=True, timeout=timeout)
mod.rs
mod extract; mod font_mapping; use crate::BevyContext; pub use extract::extract_texts; pub use font_mapping::*; #[derive(Default)] pub struct TextRendererPlugin; impl Plugin for TextRendererPlugin { fn build(&self, app: &mut bevy::prelude::App) { app.init_resource::<FontMapping>() .add_system(process_loaded_fonts); } } fn process_loaded_fonts( mut font_mapping: ResMut<FontMapping>, fonts: Res<Assets<KayakFont>>, bevy_context: Option<Res<BevyContext>>, ) { if let Some(context) = bevy_context { if context.is_added() { font_mapping.mark_all_as_new(); } font_mapping.add_loaded_to_kayak(&fonts, &context); } }
use bevy::prelude::{Assets, Plugin, Res, ResMut}; use kayak_font::KayakFont;
lib.rs
#[macro_use] extern crate log; #[macro_use] extern crate lazy_static; extern crate nix; pub mod cond_stmt_base; pub mod config; pub mod defs; pub mod log_data; pub mod shm; pub mod tag; pub mod listen_semaphore; // void __unfold_branch_fn(uint32_t) {} #[no_mangle] pub fn
(_x: u32) { }
__unfold_branch_fn
PageWrapper.tsx
import React from 'react'; import Helmet from 'react-helmet'; import { injectIntl, InjectedIntlProps } from 'gatsby-plugin-intl'; import { Normaltekst } from 'nav-frontend-typografi'; import { RouterProps } from '@reach/router'; import GlobalPageHeader from './components/global-page-header/GlobalPageHeader'; import '../../../styles/main.less'; interface Props { showLanguageToggle: boolean;
const PageWrapper: React.FunctionComponent<Props & InjectedIntlProps & RouterProps> = ({ showLanguageToggle, pageTitle, pageMetaDescription, children, showFrontpageLink, intl }) => { return ( <Normaltekst tag="div"> <Helmet encodeSpecialCharacters={false} htmlAttributes={{ lang: `${intl.locale}-NO` }}> <meta charSet="utf-8" /> <title>{pageTitle}</title> <meta name="description" content={pageMetaDescription} /> </Helmet> <GlobalPageHeader showFrontpageLink={showFrontpageLink} showLanguageToggle={showLanguageToggle} /> {children} </Normaltekst> ); }; export default injectIntl(PageWrapper);
pageTitle: string; pageMetaDescription: string; showFrontpageLink?: boolean; }
index.ts
import { localStorage } from "@applicaster/zapp-react-native-bridge/ZappStorage/LocalStorage"; import { isWeb } from "@applicaster/zapp-react-native-utils/reactUtils"; const isWebPlatform = isWeb();
return await localStorage.getItem(key, namespace); } export async function localStorageSet( key: string, value: string, namespace?: string ) { return await localStorage.setItem(key, value, namespace); } export async function localStorageRemove(key: string, namespace?: string) { if (isWebPlatform) { const keyToDelete = namespace ? `${namespace}_::_${key}` : key; // @ts-ignore window.localStorage[keyToDelete] = null; return; } return await localStorage.removeItem(key, namespace); }
export async function localStorageGet(key: string, namespace?: string) {
school_segregation.py
import networkx as nx import numpy as np import matplotlib.pyplot as plt import pickle import math from dynsimf.models.Model import Model from dynsimf.models.Model import ModelConfiguration from dynsimf.models.components.Memory import MemoryConfiguration from dynsimf.models.components.Memory import MemoryConfigurationType from dynsimf.models.components.conditions.Condition import ConditionType from dynsimf.models.components.conditions.ThresholdCondition import ThresholdCondition from dynsimf.models.components.conditions.CustomCondition import CustomCondition from dynsimf.models.components.conditions.ThresholdCondition import ThresholdOperator from dynsimf.models.components.conditions.ThresholdCondition import ThresholdConfiguration if __name__ == "__main__": # Network definition g_list = pickle.load(open(r"C:/Users/Admin/MEGA/Uni/Master/Thesis/data/g_list.pkl", 'rb')) X_list = pickle.load(open(r"C:/Users/Admin/MEGA/Uni/Master/Thesis/data/x_list.pkl", 'rb')) school = 3 X = X_list[school] n = len(X['sex']) avg_initial_links = 5 # desired average degree in initial network link_prop = avg_initial_links/n g = np.random.choice([0, 1], size=(n, n), p=[1 - link_prop, link_prop]) np.fill_diagonal(g, 0) g = nx.convert_matrix.from_numpy_array(g, create_using=nx.DiGraph) cfg = { 'adjacency_memory_config': \ MemoryConfiguration(MemoryConfigurationType.ADJACENCY, { 'memory_size': 0 }), 'edge_values_memory_config': \ MemoryConfiguration(MemoryConfigurationType.EDGE_VALUES, { 'memory_size': 0 }) } model = Model(g, ModelConfiguration(cfg)) constants = { 'n': n, 'delta': 0.05, 'gamma': 0.65, 'c': 0.175, 'B1': 0.1, 'B2': 0.1, 'B3': 0.2, 'sigma': 0.035, 'alpha': 2, 'min_prop': 1000, 'X': X } def initial_utility(): utility = np.zeros((constants['n'], constants['n'])) race = list(constants['X']['race']) sex = list(constants['X']['sex']) grade = list(constants['X']['grade']) for i in range(constants['n']): for j in range(constants['n']): weighted_diffs = [constants['B1']*abs(sex[i] - sex[j]), constants['B2'] * (0 if grade[i] == grade[j] else 1), constants['B3'] * (0 if race[i] == race[j] else 1)] utility[i, j] = math.exp(-sum(weighted_diffs)) return utility def initial_prop(): prop = np.zeros((constants['n'], constants['n'])) utility = initial_utility() # Loop over the person and their peers for i in range(constants['n']): for j in range(constants['n']): if i == j: prop[i, j] = 0 else: prop[i, j] = utility[i, j] + constants['min_prop'] # Normalize prop[i, :] = prop[i, :] / np.sum(prop[i, :]) return prop constants['probability'] = initial_prop() constants['utility'] = initial_utility() def nb_update():
def node_utility(node, adj): utility = constants['utility'] # degree, connection gain and cost calculations d_i = adj[node].sum() direct_u = np.sum(adj[node] * utility[node]) mutual_u = np.sum(adj[node] * adj.T[node] * utility[node]) # indirect connection gain a = (adj.T.dot(adj[node, :]) * utility)[node] a[node] = 0 indirect_u = np.sum(a) return direct_u + constants['gamma'] * mutual_u + constants['delta'] * indirect_u - d_i ** constants['alpha'] * constants['c'] def network_update(nodes): adj = model.get_adjacency() order = nodes.copy() eps = np.random.normal(scale=constants['sigma'], size=constants['n']*2) np.random.shuffle(order) changes = {} P = constants['probability'] for node in order: other_node = node while other_node == node: other_node = np.random.choice(nodes, p=P[node]) existing_connection = not not adj[node, other_node] adj[node, other_node] = 0 U_without = node_utility(node, adj) + eps[node] adj[node, other_node] = 1 U_with = node_utility(node, adj) + eps[-node] if U_without > U_with and existing_connection: changes[node] = {'remove': [other_node]} elif U_without < U_with and not existing_connection: changes[node] = {'add': [other_node]} return { 'edge_change': changes } # Model definition model.constants = constants model.set_states(['Neighbors']) model.add_update(nb_update) model.set_edge_values(['utility']) model.set_initial_edge_values({ 'utility': initial_utility, }) model.add_network_update(network_update, get_nodes=True) output = model.simulate(500) visualization_config = { 'plot_interval': 10, 'edge_values': 'utility', 'plot_variable': 'Neighbors', 'variable_limits': { 'Neighbors': [0, 55] }, 'color_scale': 'Reds', 'show_plot': False, 'repeat': True, 'plot_output': '../animations/school_segregation/school_' + str(school) + '.gif', 'plot_title': 'School segregation' } model.configure_visualization(visualization_config, output) model.visualize('animation')
adj = model.get_adjacency() return {'Neighbors': np.sum(adj, axis=1)}
decentralization.py
import sasoptpy as so import pandas as pd def test(cas_conn): m = so.Model(name='decentralization', session=cas_conn) DEPTS = ['A', 'B', 'C', 'D', 'E'] CITIES = ['Bristol', 'Brighton', 'London'] benefit_data = pd.DataFrame([ ['Bristol', 10, 15, 10, 20, 5], ['Brighton', 10, 20, 15, 15, 15]], columns=['city'] + DEPTS).set_index('city') comm_data = pd.DataFrame([ ['A', 'B', 0.0], ['A', 'C', 1.0], ['A', 'D', 1.5], ['A', 'E', 0.0], ['B', 'C', 1.4], ['B', 'D', 1.2], ['B', 'E', 0.0], ['C', 'D', 0.0], ['C', 'E', 2.0], ['D', 'E', 0.7]], columns=['i', 'j', 'comm']).set_index(['i', 'j']) cost_data = pd.DataFrame([ ['Bristol', 'Bristol', 5], ['Bristol', 'Brighton', 14], ['Bristol', 'London', 13], ['Brighton', 'Brighton', 5], ['Brighton', 'London', 9], ['London', 'London', 10]], columns=['i', 'j', 'cost']).set_index( ['i', 'j']) max_num_depts = 3 benefit = {} for city in CITIES: for dept in DEPTS: try: benefit[dept, city] = benefit_data.loc[city, dept] except: benefit[dept, city] = 0 comm = {} for row in comm_data.iterrows(): (i, j) = row[0] comm[i, j] = row[1]['comm'] comm[j, i] = comm[i, j] cost = {} for row in cost_data.iterrows(): (i, j) = row[0] cost[i, j] = row[1]['cost'] cost[j, i] = cost[i, j] assign = m.add_variables(DEPTS, CITIES, vartype=so.BIN, name='assign') IJKL = [(i, j, k, l) for i in DEPTS for j in CITIES for k in DEPTS for l in CITIES if i < k] product = m.add_variables(IJKL, vartype=so.BIN, name='product') totalBenefit = so.expr_sum(benefit[i, j] * assign[i, j] for i in DEPTS for j in CITIES) totalCost = so.expr_sum(comm[i, k] * cost[j, l] * product[i, j, k, l] for (i, j, k, l) in IJKL) m.set_objective(totalBenefit-totalCost, name='netBenefit', sense=so.MAX) m.add_constraints((so.expr_sum(assign[dept, city] for city in CITIES) == 1 for dept in DEPTS), name='assign_dept') m.add_constraints((so.expr_sum(assign[dept, city] for dept in DEPTS) <= max_num_depts for city in CITIES), name='cardinality') product_def1 = m.add_constraints((assign[i, j] + assign[k, l] - 1 <= product[i, j, k, l] for (i, j, k, l) in IJKL), name='pd1') product_def2 = m.add_constraints((product[i, j, k, l] <= assign[i, j] for (i, j, k, l) in IJKL), name='pd2') product_def3 = m.add_constraints((product[i, j, k, l] <= assign[k, l] for (i, j, k, l) in IJKL), name='pd3') m.solve() print(m.get_problem_summary()) m.drop_constraints(product_def1) m.drop_constraints(product_def2) m.drop_constraints(product_def3) m.add_constraints(( so.expr_sum(product[i, j, k, l] for j in CITIES if (i, j, k, l) in IJKL) == assign[k, l] for i in DEPTS for k in DEPTS for l in CITIES if i < k), name='pd4') m.add_constraints(( so.expr_sum(product[i, j, k, l] for l in CITIES if (i, j, k, l) in IJKL) == assign[i, j] for k in DEPTS for i in DEPTS for j in CITIES if i < k), name='pd5') m.solve() print(m.get_problem_summary()) totalBenefit.set_name('totalBenefit') totalCost.set_name('totalCost') print(so.get_solution_table(totalBenefit, totalCost)) print(so.get_solution_table(assign).unstack(level=-1))
return m.get_objective_value()
volume.go
/* Copyright 2017 The OpenEBS Authors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package volume import ( "fmt" "github.com/openebs/maya/types/v1" v1_storage "k8s.io/api/storage/v1" "strings" "github.com/ghodss/yaml" "github.com/openebs/maya/pkg/apis/openebs.io/v1alpha1" m_k8s_client "github.com/openebs/maya/pkg/client/k8s" "github.com/openebs/maya/pkg/engine" menv "github.com/openebs/maya/pkg/env/v1alpha1" "github.com/openebs/maya/pkg/util" mach_apis_meta_v1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) // volumeOperationOptions contains the options with respect to // volume related operations type volumeOperationOptions struct { // k8sClient will make K8s API calls k8sClient *m_k8s_client.K8sClient } // VolumeOperation exposes methods with respect to volume related operations // e.g. read, create, delete. type VolumeOperation struct { // volumeOperationOptions has the options to various volume related // operations volumeOperationOptions // volume to create or read or delete volume *v1alpha1.CASVolume } // NewVolumeOperation returns a new instance of volumeOperation func
(volume *v1alpha1.CASVolume) (*VolumeOperation, error) { if volume == nil { return nil, fmt.Errorf("failed to instantiate volume operation: nil volume was provided") } if len(volume.Namespace) == 0 { return nil, fmt.Errorf("failed to instantiate volume operation: missing run namespace") } kc, err := m_k8s_client.NewK8sClient(volume.Namespace) if err != nil { return nil, err } return &VolumeOperation{ volume: volume, volumeOperationOptions: volumeOperationOptions{ k8sClient: kc, }, }, nil } // getCloneLabels returns a map of clone specific configuration func (v *VolumeOperation) getCloneLabels() (map[string]interface{}, error) { // Initially all the values are set to their defaults cloneLabels := map[string]interface{}{ string(v1alpha1.SnapshotNameVTP): "", string(v1alpha1.SourceVolumeTargetIPVTP): "", string(v1alpha1.IsCloneEnableVTP): "false", string(v1alpha1.StorageClassVTP): "", string(v1alpha1.SourceVolumeVTP): "", } // if volume is clone enabled then update cloneLabels map if v.volume.CloneSpec.IsClone { // fetch source PV using client go pv, err := v.k8sClient.GetPV(v.volume.CloneSpec.SourceVolume, mach_apis_meta_v1.GetOptions{}) if err != nil { return nil, fmt.Errorf("source volume %q for clone volume %q could not be retrieved", v.volume.CloneSpec.SourceVolume, v.volume.Name) } // Set isCloneEnable to true cloneLabels[string(v1alpha1.IsCloneEnableVTP)] = "true" // extract and assign relevant clone spec fields to cloneLabels cloneLabels[string(v1alpha1.SnapshotNameVTP)] = v.volume.CloneSpec.SnapshotName cloneLabels[string(v1alpha1.SourceVolumeTargetIPVTP)] = strings.TrimSpace(strings.Split(pv.Spec.ISCSI.TargetPortal, ":")[0]) cloneLabels[string(v1alpha1.StorageClassVTP)] = pv.Spec.StorageClassName cloneLabels[string(v1alpha1.SourceVolumeVTP)] = v.volume.CloneSpec.SourceVolume } return cloneLabels, nil } // Create provisions an OpenEBS volume func (v *VolumeOperation) Create() (*v1alpha1.CASVolume, error) { if v.k8sClient == nil { return nil, fmt.Errorf("unable to create volume: nil k8s client") } capacity := v.volume.Spec.Capacity if len(capacity) == 0 { return nil, fmt.Errorf("unable to create volume: missing volume capacity") } pvcName := v.volume.Labels[string(v1alpha1.PersistentVolumeClaimKey)] if len(pvcName) == 0 { return nil, fmt.Errorf("unable to create volume: missing persistent volume claim") } // fetch the pvc specifications pvc, err := v.k8sClient.GetPVC(pvcName, mach_apis_meta_v1.GetOptions{}) if err != nil { return nil, err } // extract the cas volume config from pvc casConfigPVC := pvc.Annotations[string(v1alpha1.CASConfigKey)] cloneLabels, err := v.getCloneLabels() if err != nil { return nil, err } scName := v.volume.Labels[string(v1alpha1.StorageClassKey)] if cloneLabels[string(v1alpha1.StorageClassVTP)] != "" { // get the storage class name corresponding to this volume scName = cloneLabels[string(v1alpha1.StorageClassVTP)].(string) } if len(scName) == 0 { return nil, fmt.Errorf("unable to create volume: missing storage class") } // scName might not be initialized in getCloneLabels // assign the latest available scName cloneLabels[string(v1alpha1.StorageClassVTP)] = scName // fetch the storage class specifications sc, err := v.k8sClient.GetStorageV1SC(scName, mach_apis_meta_v1.GetOptions{}) if err != nil { return nil, err } // extract the cas volume config from storage class casConfigSC := sc.Annotations[string(v1alpha1.CASConfigKey)] // cas template to create a cas volume castName := getCreateCASTemplate(sc) if len(castName) == 0 { return nil, fmt.Errorf("unable to create volume: missing create cas template at '%s'", v1alpha1.CASTemplateKeyForVolumeCreate) } // fetch CASTemplate specifications cast, err := v.k8sClient.GetOEV1alpha1CAST(castName, mach_apis_meta_v1.GetOptions{}) if err != nil { return nil, err } volumeLables := map[string]interface{}{ string(v1alpha1.OwnerVTP): v.volume.Name, string(v1alpha1.CapacityVTP): capacity, string(v1alpha1.RunNamespaceVTP): v.volume.Namespace, string(v1alpha1.PersistentVolumeClaimVTP): pvcName, } runtimeVolumeValues := util.MergeMaps(volumeLables, cloneLabels) // provision CAS volume via CAS volume specific CAS template engine cc, err := NewCASVolumeEngine( casConfigPVC, casConfigSC, cast, string(v1alpha1.VolumeTLP), runtimeVolumeValues, ) if err != nil { return nil, err } // create the volume data, err := cc.Create() if err != nil { return nil, err } // unmarshall into openebs volume vol := &v1alpha1.CASVolume{} err = yaml.Unmarshal(data, vol) if err != nil { return nil, err } return vol, nil } func (v *VolumeOperation) Delete() (*v1alpha1.CASVolume, error) { if len(v.volume.Name) == 0 { return nil, fmt.Errorf("unable to delete volume: volume name not provided") } // fetch the pv specifications pv, err := v.k8sClient.GetPV(v.volume.Name, mach_apis_meta_v1.GetOptions{}) if err != nil { return nil, err } // get the storage class name corresponding to this volume scName := pv.Spec.StorageClassName if len(scName) == 0 { return nil, fmt.Errorf("unable to delete volume %s: missing storage class in PV object", v.volume.Name) } // fetch the storage class specifications sc, err := v.k8sClient.GetStorageV1SC(scName, mach_apis_meta_v1.GetOptions{}) if err != nil { return nil, err } castName := getDeleteCASTemplate(sc) if len(castName) == 0 { return nil, fmt.Errorf("unable to delete volume %s: missing cas template for delete volume at annotation '%s'", v.volume.Name, v1alpha1.CASTemplateKeyForVolumeDelete) } // fetch delete cas template specifications cast, err := v.k8sClient.GetOEV1alpha1CAST(castName, mach_apis_meta_v1.GetOptions{}) if err != nil { return nil, err } // delete cas volume via cas template engine engine, err := engine.NewCASEngine( cast, string(v1alpha1.VolumeTLP), map[string]interface{}{ string(v1alpha1.OwnerVTP): v.volume.Name, string(v1alpha1.RunNamespaceVTP): v.volume.Namespace, }, ) if err != nil { return nil, err } // delete the cas volume data, err := engine.Delete() if err != nil { return nil, err } // unmarshall into openebs volume vol := &v1alpha1.CASVolume{} err = yaml.Unmarshal(data, vol) if err != nil { return nil, err } return vol, nil } // Get the openebs volume details func (v *VolumeOperation) Read() (*v1alpha1.CASVolume, error) { if len(v.volume.Name) == 0 { return nil, fmt.Errorf("unable to read volume: volume name not provided") } // check if sc name is already present, if not then extract it scName := v.volume.Labels[string(v1alpha1.StorageClassKey)] if len(scName) == 0 { // fetch the pv specification pv, err := v.k8sClient.GetPV(v.volume.Name, mach_apis_meta_v1.GetOptions{}) if err != nil { return nil, err } // extract the sc name scName = strings.TrimSpace(pv.Spec.StorageClassName) } if len(scName) == 0 { return nil, fmt.Errorf("unable to read volume '%s': missing storage class name", v.volume.Name) } // fetch the sc specification sc, err := v.k8sClient.GetStorageV1SC(scName, mach_apis_meta_v1.GetOptions{}) if err != nil { return nil, err } // extract read cas template name from sc annotation castName := getReadCASTemplate(sc) if len(castName) == 0 { return nil, fmt.Errorf("unable to read volume '%s': missing cas template for read '%s'", v.volume.Name, v1alpha1.CASTemplateKeyForVolumeRead) } // fetch read cas template specifications cast, err := v.k8sClient.GetOEV1alpha1CAST(castName, mach_apis_meta_v1.GetOptions{}) if err != nil { return nil, err } // read cas volume via cas template engine engine, err := engine.NewCASEngine( cast, string(v1alpha1.VolumeTLP), map[string]interface{}{ string(v1alpha1.OwnerVTP): v.volume.Name, string(v1alpha1.RunNamespaceVTP): v.volume.Namespace, }, ) if err != nil { return nil, err } // read the volume details data, err := engine.Read() if err != nil { return nil, err } // unmarshall into openebs volume vol := &v1alpha1.CASVolume{} err = yaml.Unmarshal(data, vol) if err != nil { return nil, err } return vol, nil } // VolumeListOperation exposes methods to execute volume list operation type VolumeListOperation struct { // volumeOperationOptions has the options to various volume related // operations volumeOperationOptions // volumes to list operation volumes *v1alpha1.CASVolumeList } // NewVolumeListOperation returns a new instance of VolumeListOperation that is // capable of listing volumes func NewVolumeListOperation(volumes *v1alpha1.CASVolumeList) (*VolumeListOperation, error) { if volumes == nil { return nil, fmt.Errorf("failed to instantiate 'volume list operation': nil list options provided") } kc, err := m_k8s_client.NewK8sClient("") if err != nil { return nil, err } return &VolumeListOperation{ volumes: volumes, volumeOperationOptions: volumeOperationOptions{ k8sClient: kc, }, }, nil } func (v *VolumeListOperation) List() (*v1alpha1.CASVolumeList, error) { // cas template to list cas volumes castNames := menv.Get(menv.CASTemplateToListVolumeENVK) if len(castNames) == 0 { return nil, fmt.Errorf("failed to list volume: cas template to list volume is not set as environment variable") } vols := &v1alpha1.CASVolumeList{ Items: []v1alpha1.CASVolume{}, } for _, castName := range strings.Split(castNames, ",") { // fetch read cas template specifications cast, err := v.k8sClient.GetOEV1alpha1CAST(castName, mach_apis_meta_v1.GetOptions{}) if err != nil { return nil, err } // read cas volume via cas template engine engine, err := engine.NewCASEngine( cast, string(v1alpha1.VolumeTLP), map[string]interface{}{ string(v1alpha1.RunNamespaceVTP): v.volumes.Namespace, }, ) if err != nil { return nil, err } // read the volume details data, err := engine.List() if err != nil { return nil, err } // unmarshall into openebs volume tvols := &v1alpha1.CASVolumeList{} err = yaml.Unmarshal(data, tvols) if err != nil { return nil, err } vols.Items = append(vols.Items, tvols.Items...) } return vols, nil } func getCreateCASTemplate(sc *v1_storage.StorageClass) string { castName := sc.Annotations[string(v1alpha1.CASTemplateKeyForVolumeCreate)] // if cas template for the given operation is empty then fetch from environment variables if len(castName) == 0 { casType := strings.ToLower(sc.Annotations[string(v1alpha1.CASTypeKey)]) // check for cas-type, if cstor, set create cas template to cstor, // if jiva or for jiva and if absent then default to jiva if casType == string(v1.CStorVolumeType) { castName = menv.Get(menv.CASTemplateToCreateCStorVolumeENVK) } else if casType == string(v1.JivaVolumeType) || casType == "" { castName = menv.Get(menv.CASTemplateToCreateJivaVolumeENVK) } } return castName } func getReadCASTemplate(sc *v1_storage.StorageClass) string { castName := sc.Annotations[string(v1alpha1.CASTemplateKeyForVolumeRead)] // if cas template for the given operation is empty then fetch from environment variables if len(castName) == 0 { casType := strings.ToLower(sc.Annotations[string(v1alpha1.CASTypeKey)]) // check for cas-type, if cstor, set create cas template to cstor, // if jiva or for jiva and if absent then default to jiva if casType == string(v1.CStorVolumeType) { castName = menv.Get(menv.CASTemplateToReadCStorVolumeENVK) } else if casType == string(v1.JivaVolumeType) || casType == "" { castName = menv.Get(menv.CASTemplateToReadJivaVolumeENVK) } } return castName } func getDeleteCASTemplate(sc *v1_storage.StorageClass) string { castName := sc.Annotations[string(v1alpha1.CASTemplateKeyForVolumeDelete)] // if cas template for the given operation is empty then fetch from environment variables if len(castName) == 0 { casType := strings.ToLower(sc.Annotations[string(v1alpha1.CASTypeKey)]) // check for cas-type, if cstor, set create cas template to cstor, // if jiva or for jiva and if absent then default to jiva if casType == string(v1.CStorVolumeType) { castName = menv.Get(menv.CASTemplateToDeleteCStorVolumeENVK) } else if casType == string(v1.JivaVolumeType) || casType == "" { castName = menv.Get(menv.CASTemplateToDeleteJivaVolumeENVK) } } return castName }
NewVolumeOperation
bar_chart_tests.rs
extern crate rustplot; use rustplot::data_parser; use rustplot::chart_builder; use rustplot::chart_builder::Chart; #[test] fn
() { let data_1 = data_parser::get_str_col(0, 0, 5, "./resources/bar_chart_tests.csv"); let data_2 = data_parser::get_num_col(1, 0, 5, "./resources/bar_chart_tests.csv"); let bar1 = chart_builder::VerticalBarChart::new(String::from("Test Bar Chart 1"), data_1.clone(), vec![data_2.clone()]); bar1.draw(); let data_3 = data_parser::get_str_col(2, 0, 5, "./resources/bar_chart_tests.csv"); let data_4 = data_parser::get_num_col(3, 0, 5, "./resources/bar_chart_tests.csv"); let bar2 = chart_builder::VerticalBarChart::new(String::from("Test Bar Chart 2"), data_3.clone(), vec![data_4.clone()]); bar2.draw(); let data_5 = data_parser::get_str_col(4, 0, 5, "./resources/bar_chart_tests.csv"); let data_6 = data_parser::get_num_col(5, 0, 5, "./resources/bar_chart_tests.csv"); let bar3 = chart_builder::VerticalBarChart::new(String::from("Test Bar Chart 3"), data_5.clone(), vec![data_6.clone()]); bar3.draw(); let data_7 = data_parser::get_num_col(6, 0, 5, "./resources/bar_chart_tests.csv"); let data_8 = data_parser::get_num_col(7, 0, 5, "./resources/bar_chart_tests.csv"); let data_9 = data_parser::get_num_col(8, 0, 5, "./resources/bar_chart_tests.csv"); let mut multi_bar_1 = chart_builder::VerticalBarChart::new(String::from("Test Bar Chart 4"), data_1.clone(), vec![data_7.clone(), data_8.clone(), data_9.clone()]); multi_bar_1.chart_prop.set_show_legend(true); multi_bar_1.chart_prop.set_legend_values(vec![String::from("Location 1"), String::from("Location 2"), String::from("Location 3")]); multi_bar_1.draw(); let data_10 = data_parser::get_num_col(9, 0, 5, "./resources/bar_chart_tests.csv"); let data_11 = data_parser::get_num_col(10, 0, 5, "./resources/bar_chart_tests.csv"); let data_12 = data_parser::get_num_col(11, 0, 5, "./resources/bar_chart_tests.csv"); let mut multi_bar_1 = chart_builder::VerticalBarChart::new(String::from("Test Bar Chart 5"), data_1.clone(), vec![data_10.clone(), data_11.clone(), data_12.clone()]); multi_bar_1.chart_prop.set_show_legend(true); multi_bar_1.chart_prop.set_legend_values(vec![String::from("Location 1"), String::from("Location 2"), String::from("Location 3")]); multi_bar_1.draw(); let mut multi_bar_3 = chart_builder::VerticalBarChart::new(String::from("Test Bar Chart 6"), data_1.clone(), vec![data_2.clone(), data_4.clone(), data_6.clone()]); multi_bar_3.chart_prop.set_show_legend(true); multi_bar_3.chart_prop.set_legend_values(vec![String::from("Location 1"), String::from("Location 2"), String::from("Location 3")]); multi_bar_3.draw(); }
bar_chart_tests
ad_group_pb2.py
# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/ads/googleads/v6/resources/ad_group.proto """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() from google.ads.google_ads.v6.proto.common import custom_parameter_pb2 as google_dot_ads_dot_googleads_dot_v6_dot_common_dot_custom__parameter__pb2 from google.ads.google_ads.v6.proto.common import explorer_auto_optimizer_setting_pb2 as google_dot_ads_dot_googleads_dot_v6_dot_common_dot_explorer__auto__optimizer__setting__pb2 from google.ads.google_ads.v6.proto.common import targeting_setting_pb2 as google_dot_ads_dot_googleads_dot_v6_dot_common_dot_targeting__setting__pb2 from google.ads.google_ads.v6.proto.enums import ad_group_ad_rotation_mode_pb2 as google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_ad__group__ad__rotation__mode__pb2 from google.ads.google_ads.v6.proto.enums import ad_group_status_pb2 as google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_ad__group__status__pb2 from google.ads.google_ads.v6.proto.enums import ad_group_type_pb2 as google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_ad__group__type__pb2 from google.ads.google_ads.v6.proto.enums import bidding_source_pb2 as google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_bidding__source__pb2 from google.ads.google_ads.v6.proto.enums import targeting_dimension_pb2 as google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_targeting__dimension__pb2 from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 DESCRIPTOR = _descriptor.FileDescriptor( name='google/ads/googleads/v6/resources/ad_group.proto', package='google.ads.googleads.v6.resources', syntax='proto3', serialized_options=b'\n%com.google.ads.googleads.v6.resourcesB\014AdGroupProtoP\001ZJgoogle.golang.org/genproto/googleapis/ads/googleads/v6/resources;resources\242\002\003GAA\252\002!Google.Ads.GoogleAds.V6.Resources\312\002!Google\\Ads\\GoogleAds\\V6\\Resources\352\002%Google::Ads::GoogleAds::V6::Resources', create_key=_descriptor._internal_create_key, serialized_pb=b'\n0google/ads/googleads/v6/resources/ad_group.proto\x12!google.ads.googleads.v6.resources\x1a\x35google/ads/googleads/v6/common/custom_parameter.proto\x1a\x44google/ads/googleads/v6/common/explorer_auto_optimizer_setting.proto\x1a\x36google/ads/googleads/v6/common/targeting_setting.proto\x1a=google/ads/googleads/v6/enums/ad_group_ad_rotation_mode.proto\x1a\x33google/ads/googleads/v6/enums/ad_group_status.proto\x1a\x31google/ads/googleads/v6/enums/ad_group_type.proto\x1a\x32google/ads/googleads/v6/enums/bidding_source.proto\x1a\x37google/ads/googleads/v6/enums/targeting_dimension.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1cgoogle/api/annotations.proto\"\x81\x0f\n\x07\x41\x64Group\x12?\n\rresource_name\x18\x01 \x01(\tB(\xe0\x41\x05\xfa\x41\"\n googleads.googleapis.com/AdGroup\x12\x14\n\x02id\x18\" \x01(\x03\x42\x03\xe0\x41\x03H\x00\x88\x01\x01\x12\x11\n\x04name\x18# \x01(\tH\x01\x88\x01\x01\x12N\n\x06status\x18\x05 \x01(\x0e\x32>.google.ads.googleads.v6.enums.AdGroupStatusEnum.AdGroupStatus\x12M\n\x04type\x18\x0c \x01(\x0e\x32:.google.ads.googleads.v6.enums.AdGroupTypeEnum.AdGroupTypeB\x03\xe0\x41\x05\x12h\n\x10\x61\x64_rotation_mode\x18\x16 \x01(\x0e\x32N.google.ads.googleads.v6.enums.AdGroupAdRotationModeEnum.AdGroupAdRotationMode\x12\x44\n\rbase_ad_group\x18$ \x01(\tB(\xe0\x41\x03\xfa\x41\"\n googleads.googleapis.com/AdGroupH\x02\x88\x01\x01\x12\"\n\x15tracking_url_template\x18% \x01(\tH\x03\x88\x01\x01\x12N\n\x15url_custom_parameters\x18\x06 \x03(\x0b\x32/.google.ads.googleads.v6.common.CustomParameter\x12@\n\x08\x63\x61mpaign\x18& \x01(\tB)\xe0\x41\x05\xfa\x41#\n!googleads.googleapis.com/CampaignH\x04\x88\x01\x01\x12\x1b\n\x0e\x63pc_bid_micros\x18\' \x01(\x03H\x05\x88\x01\x01\x12\x1b\n\x0e\x63pm_bid_micros\x18( \x01(\x03H\x06\x88\x01\x01\x12\x1e\n\x11target_cpa_micros\x18) \x01(\x03H\x07\x88\x01\x01\x12 \n\x0e\x63pv_bid_micros\x18* \x01(\x03\x42\x03\xe0\x41\x03H\x08\x88\x01\x01\x12\x1e\n\x11target_cpm_micros\x18+ \x01(\x03H\t\x88\x01\x01\x12\x18\n\x0btarget_roas\x18, \x01(\x01H\n\x88\x01\x01\x12#\n\x16percent_cpc_bid_micros\x18- \x01(\x03H\x0b\x88\x01\x01\x12\x65\n\x1f\x65xplorer_auto_optimizer_setting\x18\x15 \x01(\x0b\x32<.google.ads.googleads.v6.common.ExplorerAutoOptimizerSetting\x12n\n\x1c\x64isplay_custom_bid_dimension\x18\x17 \x01(\x0e\x32H.google.ads.googleads.v6.enums.TargetingDimensionEnum.TargetingDimension\x12\x1d\n\x10\x66inal_url_suffix\x18. \x01(\tH\x0c\x88\x01\x01\x12K\n\x11targeting_setting\x18\x19 \x01(\x0b\x32\x30.google.ads.googleads.v6.common.TargetingSetting\x12-\n\x1b\x65\x66\x66\x65\x63tive_target_cpa_micros\x18/ \x01(\x03\x42\x03\xe0\x41\x03H\r\x88\x01\x01\x12h\n\x1b\x65\x66\x66\x65\x63tive_target_cpa_source\x18\x1d \x01(\x0e\x32>.google.ads.googleads.v6.enums.BiddingSourceEnum.BiddingSourceB\x03\xe0\x41\x03\x12\'\n\x15\x65\x66\x66\x65\x63tive_target_roas\x18\x30 \x01(\x01\x42\x03\xe0\x41\x03H\x0e\x88\x01\x01\x12i\n\x1c\x65\x66\x66\x65\x63tive_target_roas_source\x18 \x01(\x0e\x32>.google.ads.googleads.v6.enums.BiddingSourceEnum.BiddingSourceB\x03\xe0\x41\x03\x12=\n\x06labels\x18\x31 \x03(\tB-\xe0\x41\x03\xfa\x41\'\n%googleads.googleapis.com/AdGroupLabel:U\xea\x41R\n googleads.googleapis.com/AdGroup\x12.customers/{customer_id}/adGroups/{ad_group_id}B\x05\n\x03_idB\x07\n\x05_nameB\x10\n\x0e_base_ad_groupB\x18\n\x16_tracking_url_templateB\x0b\n\t_campaignB\x11\n\x0f_cpc_bid_microsB\x11\n\x0f_cpm_bid_microsB\x14\n\x12_target_cpa_microsB\x11\n\x0f_cpv_bid_microsB\x14\n\x12_target_cpm_microsB\x0e\n\x0c_target_roasB\x19\n\x17_percent_cpc_bid_microsB\x13\n\x11_final_url_suffixB\x1e\n\x1c_effective_target_cpa_microsB\x18\n\x16_effective_target_roasB\xf9\x01\n%com.google.ads.googleads.v6.resourcesB\x0c\x41\x64GroupProtoP\x01ZJgoogle.golang.org/genproto/googleapis/ads/googleads/v6/resources;resources\xa2\x02\x03GAA\xaa\x02!Google.Ads.GoogleAds.V6.Resources\xca\x02!Google\\Ads\\GoogleAds\\V6\\Resources\xea\x02%Google::Ads::GoogleAds::V6::Resourcesb\x06proto3' , dependencies=[google_dot_ads_dot_googleads_dot_v6_dot_common_dot_custom__parameter__pb2.DESCRIPTOR,google_dot_ads_dot_googleads_dot_v6_dot_common_dot_explorer__auto__optimizer__setting__pb2.DESCRIPTOR,google_dot_ads_dot_googleads_dot_v6_dot_common_dot_targeting__setting__pb2.DESCRIPTOR,google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_ad__group__ad__rotation__mode__pb2.DESCRIPTOR,google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_ad__group__status__pb2.DESCRIPTOR,google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_ad__group__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_bidding__source__pb2.DESCRIPTOR,google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_targeting__dimension__pb2.DESCRIPTOR,google_dot_api_dot_field__behavior__pb2.DESCRIPTOR,google_dot_api_dot_resource__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,]) _ADGROUP = _descriptor.Descriptor( name='AdGroup', full_name='google.ads.googleads.v6.resources.AdGroup', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='resource_name', full_name='google.ads.googleads.v6.resources.AdGroup.resource_name', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\340A\005\372A\"\n googleads.googleapis.com/AdGroup', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='id', full_name='google.ads.googleads.v6.resources.AdGroup.id', index=1, number=34, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='name', full_name='google.ads.googleads.v6.resources.AdGroup.name', index=2, number=35, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='status', full_name='google.ads.googleads.v6.resources.AdGroup.status', index=3, number=5, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='type', full_name='google.ads.googleads.v6.resources.AdGroup.type', index=4, number=12, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\340A\005', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='ad_rotation_mode', full_name='google.ads.googleads.v6.resources.AdGroup.ad_rotation_mode', index=5, number=22, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='base_ad_group', full_name='google.ads.googleads.v6.resources.AdGroup.base_ad_group', index=6, number=36, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\340A\003\372A\"\n googleads.googleapis.com/AdGroup', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='tracking_url_template', full_name='google.ads.googleads.v6.resources.AdGroup.tracking_url_template', index=7, number=37, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='url_custom_parameters', full_name='google.ads.googleads.v6.resources.AdGroup.url_custom_parameters', index=8, number=6, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='campaign', full_name='google.ads.googleads.v6.resources.AdGroup.campaign', index=9, number=38, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\340A\005\372A#\n!googleads.googleapis.com/Campaign', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='cpc_bid_micros', full_name='google.ads.googleads.v6.resources.AdGroup.cpc_bid_micros', index=10, number=39, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='cpm_bid_micros', full_name='google.ads.googleads.v6.resources.AdGroup.cpm_bid_micros', index=11, number=40, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='target_cpa_micros', full_name='google.ads.googleads.v6.resources.AdGroup.target_cpa_micros', index=12, number=41, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='cpv_bid_micros', full_name='google.ads.googleads.v6.resources.AdGroup.cpv_bid_micros', index=13, number=42, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='target_cpm_micros', full_name='google.ads.googleads.v6.resources.AdGroup.target_cpm_micros', index=14, number=43, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='target_roas', full_name='google.ads.googleads.v6.resources.AdGroup.target_roas', index=15, number=44, type=1, cpp_type=5, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='percent_cpc_bid_micros', full_name='google.ads.googleads.v6.resources.AdGroup.percent_cpc_bid_micros', index=16, number=45, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='explorer_auto_optimizer_setting', full_name='google.ads.googleads.v6.resources.AdGroup.explorer_auto_optimizer_setting', index=17, number=21, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='display_custom_bid_dimension', full_name='google.ads.googleads.v6.resources.AdGroup.display_custom_bid_dimension', index=18, number=23, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None,
number=46, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='targeting_setting', full_name='google.ads.googleads.v6.resources.AdGroup.targeting_setting', index=20, number=25, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='effective_target_cpa_micros', full_name='google.ads.googleads.v6.resources.AdGroup.effective_target_cpa_micros', index=21, number=47, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='effective_target_cpa_source', full_name='google.ads.googleads.v6.resources.AdGroup.effective_target_cpa_source', index=22, number=29, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='effective_target_roas', full_name='google.ads.googleads.v6.resources.AdGroup.effective_target_roas', index=23, number=48, type=1, cpp_type=5, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='effective_target_roas_source', full_name='google.ads.googleads.v6.resources.AdGroup.effective_target_roas_source', index=24, number=32, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='labels', full_name='google.ads.googleads.v6.resources.AdGroup.labels', index=25, number=49, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\340A\003\372A\'\n%googleads.googleapis.com/AdGroupLabel', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=b'\352AR\n googleads.googleapis.com/AdGroup\022.customers/{customer_id}/adGroups/{ad_group_id}', is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ _descriptor.OneofDescriptor( name='_id', full_name='google.ads.googleads.v6.resources.AdGroup._id', index=0, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), _descriptor.OneofDescriptor( name='_name', full_name='google.ads.googleads.v6.resources.AdGroup._name', index=1, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), _descriptor.OneofDescriptor( name='_base_ad_group', full_name='google.ads.googleads.v6.resources.AdGroup._base_ad_group', index=2, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), _descriptor.OneofDescriptor( name='_tracking_url_template', full_name='google.ads.googleads.v6.resources.AdGroup._tracking_url_template', index=3, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), _descriptor.OneofDescriptor( name='_campaign', full_name='google.ads.googleads.v6.resources.AdGroup._campaign', index=4, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), _descriptor.OneofDescriptor( name='_cpc_bid_micros', full_name='google.ads.googleads.v6.resources.AdGroup._cpc_bid_micros', index=5, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), _descriptor.OneofDescriptor( name='_cpm_bid_micros', full_name='google.ads.googleads.v6.resources.AdGroup._cpm_bid_micros', index=6, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), _descriptor.OneofDescriptor( name='_target_cpa_micros', full_name='google.ads.googleads.v6.resources.AdGroup._target_cpa_micros', index=7, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), _descriptor.OneofDescriptor( name='_cpv_bid_micros', full_name='google.ads.googleads.v6.resources.AdGroup._cpv_bid_micros', index=8, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), _descriptor.OneofDescriptor( name='_target_cpm_micros', full_name='google.ads.googleads.v6.resources.AdGroup._target_cpm_micros', index=9, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), _descriptor.OneofDescriptor( name='_target_roas', full_name='google.ads.googleads.v6.resources.AdGroup._target_roas', index=10, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), _descriptor.OneofDescriptor( name='_percent_cpc_bid_micros', full_name='google.ads.googleads.v6.resources.AdGroup._percent_cpc_bid_micros', index=11, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), _descriptor.OneofDescriptor( name='_final_url_suffix', full_name='google.ads.googleads.v6.resources.AdGroup._final_url_suffix', index=12, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), _descriptor.OneofDescriptor( name='_effective_target_cpa_micros', full_name='google.ads.googleads.v6.resources.AdGroup._effective_target_cpa_micros', index=13, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), _descriptor.OneofDescriptor( name='_effective_target_roas', full_name='google.ads.googleads.v6.resources.AdGroup._effective_target_roas', index=14, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), ], serialized_start=635, serialized_end=2556, ) _ADGROUP.fields_by_name['status'].enum_type = google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_ad__group__status__pb2._ADGROUPSTATUSENUM_ADGROUPSTATUS _ADGROUP.fields_by_name['type'].enum_type = google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_ad__group__type__pb2._ADGROUPTYPEENUM_ADGROUPTYPE _ADGROUP.fields_by_name['ad_rotation_mode'].enum_type = google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_ad__group__ad__rotation__mode__pb2._ADGROUPADROTATIONMODEENUM_ADGROUPADROTATIONMODE _ADGROUP.fields_by_name['url_custom_parameters'].message_type = google_dot_ads_dot_googleads_dot_v6_dot_common_dot_custom__parameter__pb2._CUSTOMPARAMETER _ADGROUP.fields_by_name['explorer_auto_optimizer_setting'].message_type = google_dot_ads_dot_googleads_dot_v6_dot_common_dot_explorer__auto__optimizer__setting__pb2._EXPLORERAUTOOPTIMIZERSETTING _ADGROUP.fields_by_name['display_custom_bid_dimension'].enum_type = google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_targeting__dimension__pb2._TARGETINGDIMENSIONENUM_TARGETINGDIMENSION _ADGROUP.fields_by_name['targeting_setting'].message_type = google_dot_ads_dot_googleads_dot_v6_dot_common_dot_targeting__setting__pb2._TARGETINGSETTING _ADGROUP.fields_by_name['effective_target_cpa_source'].enum_type = google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_bidding__source__pb2._BIDDINGSOURCEENUM_BIDDINGSOURCE _ADGROUP.fields_by_name['effective_target_roas_source'].enum_type = google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_bidding__source__pb2._BIDDINGSOURCEENUM_BIDDINGSOURCE _ADGROUP.oneofs_by_name['_id'].fields.append( _ADGROUP.fields_by_name['id']) _ADGROUP.fields_by_name['id'].containing_oneof = _ADGROUP.oneofs_by_name['_id'] _ADGROUP.oneofs_by_name['_name'].fields.append( _ADGROUP.fields_by_name['name']) _ADGROUP.fields_by_name['name'].containing_oneof = _ADGROUP.oneofs_by_name['_name'] _ADGROUP.oneofs_by_name['_base_ad_group'].fields.append( _ADGROUP.fields_by_name['base_ad_group']) _ADGROUP.fields_by_name['base_ad_group'].containing_oneof = _ADGROUP.oneofs_by_name['_base_ad_group'] _ADGROUP.oneofs_by_name['_tracking_url_template'].fields.append( _ADGROUP.fields_by_name['tracking_url_template']) _ADGROUP.fields_by_name['tracking_url_template'].containing_oneof = _ADGROUP.oneofs_by_name['_tracking_url_template'] _ADGROUP.oneofs_by_name['_campaign'].fields.append( _ADGROUP.fields_by_name['campaign']) _ADGROUP.fields_by_name['campaign'].containing_oneof = _ADGROUP.oneofs_by_name['_campaign'] _ADGROUP.oneofs_by_name['_cpc_bid_micros'].fields.append( _ADGROUP.fields_by_name['cpc_bid_micros']) _ADGROUP.fields_by_name['cpc_bid_micros'].containing_oneof = _ADGROUP.oneofs_by_name['_cpc_bid_micros'] _ADGROUP.oneofs_by_name['_cpm_bid_micros'].fields.append( _ADGROUP.fields_by_name['cpm_bid_micros']) _ADGROUP.fields_by_name['cpm_bid_micros'].containing_oneof = _ADGROUP.oneofs_by_name['_cpm_bid_micros'] _ADGROUP.oneofs_by_name['_target_cpa_micros'].fields.append( _ADGROUP.fields_by_name['target_cpa_micros']) _ADGROUP.fields_by_name['target_cpa_micros'].containing_oneof = _ADGROUP.oneofs_by_name['_target_cpa_micros'] _ADGROUP.oneofs_by_name['_cpv_bid_micros'].fields.append( _ADGROUP.fields_by_name['cpv_bid_micros']) _ADGROUP.fields_by_name['cpv_bid_micros'].containing_oneof = _ADGROUP.oneofs_by_name['_cpv_bid_micros'] _ADGROUP.oneofs_by_name['_target_cpm_micros'].fields.append( _ADGROUP.fields_by_name['target_cpm_micros']) _ADGROUP.fields_by_name['target_cpm_micros'].containing_oneof = _ADGROUP.oneofs_by_name['_target_cpm_micros'] _ADGROUP.oneofs_by_name['_target_roas'].fields.append( _ADGROUP.fields_by_name['target_roas']) _ADGROUP.fields_by_name['target_roas'].containing_oneof = _ADGROUP.oneofs_by_name['_target_roas'] _ADGROUP.oneofs_by_name['_percent_cpc_bid_micros'].fields.append( _ADGROUP.fields_by_name['percent_cpc_bid_micros']) _ADGROUP.fields_by_name['percent_cpc_bid_micros'].containing_oneof = _ADGROUP.oneofs_by_name['_percent_cpc_bid_micros'] _ADGROUP.oneofs_by_name['_final_url_suffix'].fields.append( _ADGROUP.fields_by_name['final_url_suffix']) _ADGROUP.fields_by_name['final_url_suffix'].containing_oneof = _ADGROUP.oneofs_by_name['_final_url_suffix'] _ADGROUP.oneofs_by_name['_effective_target_cpa_micros'].fields.append( _ADGROUP.fields_by_name['effective_target_cpa_micros']) _ADGROUP.fields_by_name['effective_target_cpa_micros'].containing_oneof = _ADGROUP.oneofs_by_name['_effective_target_cpa_micros'] _ADGROUP.oneofs_by_name['_effective_target_roas'].fields.append( _ADGROUP.fields_by_name['effective_target_roas']) _ADGROUP.fields_by_name['effective_target_roas'].containing_oneof = _ADGROUP.oneofs_by_name['_effective_target_roas'] DESCRIPTOR.message_types_by_name['AdGroup'] = _ADGROUP _sym_db.RegisterFileDescriptor(DESCRIPTOR) AdGroup = _reflection.GeneratedProtocolMessageType('AdGroup', (_message.Message,), { 'DESCRIPTOR' : _ADGROUP, '__module__' : 'google.ads.googleads.v6.resources.ad_group_pb2' # @@protoc_insertion_point(class_scope:google.ads.googleads.v6.resources.AdGroup) }) _sym_db.RegisterMessage(AdGroup) DESCRIPTOR._options = None _ADGROUP.fields_by_name['resource_name']._options = None _ADGROUP.fields_by_name['id']._options = None _ADGROUP.fields_by_name['type']._options = None _ADGROUP.fields_by_name['base_ad_group']._options = None _ADGROUP.fields_by_name['campaign']._options = None _ADGROUP.fields_by_name['cpv_bid_micros']._options = None _ADGROUP.fields_by_name['effective_target_cpa_micros']._options = None _ADGROUP.fields_by_name['effective_target_cpa_source']._options = None _ADGROUP.fields_by_name['effective_target_roas']._options = None _ADGROUP.fields_by_name['effective_target_roas_source']._options = None _ADGROUP.fields_by_name['labels']._options = None _ADGROUP._options = None # @@protoc_insertion_point(module_scope)
is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='final_url_suffix', full_name='google.ads.googleads.v6.resources.AdGroup.final_url_suffix', index=19,
card.service.ts
import { HttpClient, HttpErrorResponse, HttpHeaders } from '@angular/common/http'; import { Injectable } from '@angular/core'; import { GlobalBaseurl } from 'app/config/baseUrl'; import { throwError } from 'rxjs'; import { catchError } from 'rxjs/operators'; @Injectable({ providedIn: 'root', }) export class
{ private baseApiUrl = GlobalBaseurl.BASE_API_URL; constructor(private http: HttpClient) {} pageNo = 0; header = { headers: new HttpHeaders({ 'Content-Type': 'application/json', 'Cache-Control': 'no-cache', responseType: 'json', Authorization: 'zROnP44uHEFkaRtoi7', }), }; listCard() { return this.http .get<any>(this.baseApiUrl + 'card/cardlist', this.header) .pipe(catchError(this.handleError)); } addCard(data) { return this.http .post<any>(this.baseApiUrl + 'card/addcard', data, this.header) .pipe(catchError(this.handleError)); } getCardById(data) { return this.http .post<any>( this.baseApiUrl + 'card/carddetails', data, this.header, ) .pipe(catchError(this.handleError)); } updateCard(data) { return this.http .post<any>(this.baseApiUrl + 'card/updatecard', data, this.header) .pipe(catchError(this.handleError)); } deleteCard() { const params = { cardId : sessionStorage.cardId, }; return this.http .post<any>(this.baseApiUrl + 'card/deletecard', params, this.header) .pipe(catchError(this.handleError)); } private handleError(error: HttpErrorResponse) { if (error.error instanceof ErrorEvent) { console.error('An error occurred:', error.error.message); } else { console.error( 'Backend returned code ${error.status}, ' + 'body was: ${error.error}' + error, ); } return throwError( 'Something bad happened; please try again later.' + JSON.stringify(error), ); } }
CardService
main.go
package main import ( "context" "crypto/tls" "flag" "fmt" "net/http" "os" "os/signal" "syscall" "github.com/golang/glog" ) func
() { var parameters WhSvrParameters // get command line parameters flag.IntVar(&parameters.port, "port", 443, "Webhook server port.") flag.StringVar(&parameters.certFile, "tlsCertFile", "/etc/webhook/certs/cert.pem", "File containing the x509 Certificate for HTTPS.") flag.StringVar(&parameters.keyFile, "tlsKeyFile", "/etc/webhook/certs/key.pem", "File containing the x509 private key to --tlsCertFile.") flag.StringVar(&parameters.sidecarCfgFile, "sidecarCfgFile", "/etc/webhook/config/sidecarconfig.yaml", "File containing the mutation configuration.") flag.Parse() sidecarConfig, err := loadConfig(parameters.sidecarCfgFile) if err != nil { glog.Errorf("Filed to load configuration: %v", err) } pair, err := tls.LoadX509KeyPair(parameters.certFile, parameters.keyFile) if err != nil { glog.Errorf("Filed to load key pair: %v", err) } whsvr := &WebhookServer { sidecarConfig: sidecarConfig, server: &http.Server { Addr: fmt.Sprintf(":%v", parameters.port), TLSConfig: &tls.Config{Certificates: []tls.Certificate{pair}}, }, } // define http server and server handler mux := http.NewServeMux() mux.HandleFunc("/mutate", whsvr.serve) whsvr.server.Handler = mux // start webhook server in new rountine go func() { if err := whsvr.server.ListenAndServeTLS("", ""); err != nil { glog.Errorf("Filed to listen and serve webhook server: %v", err) } }() // listening OS shutdown singal signalChan := make(chan os.Signal, 1) signal.Notify(signalChan, syscall.SIGINT, syscall.SIGTERM) <-signalChan glog.Infof("Got OS shutdown signal, shutting down wenhook server gracefully...") whsvr.server.Shutdown(context.Background()) }
main
app.py
from typing import Counter from warnings import resetwarnings from bottle import route, run, request, template import os import csv import webbrowser cwd = os.getcwd() xcwd = cwd.replace('\\','/') print(xcwd) sscwd = xcwd + "/server/security" print(sscwd) def fsync(): global sscwd ssucwd = sscwd + "/user.csv" sspcwd = sscwd + "/pass.csv" webbrowser.open('http://localhost:5000/login') #ユーザ情報の読み込み user_file = open(ssucwd,'r') user_reader = csv.reader(user_file) user_line = [row for row in user_reader] #パスワード情報の読み込み pass_file = open(sspcwd,'r') pass_reader = csv.reader(pass_file) pass_line = [row for row in pass_reader] @route("/login") def login(): return """ <h1>ログイン</h1> <p>ユーザIDとパスワードを入力してください。</p> <form action="/login" method="post"> Username: <input name="username" type="text" /> Password: <input name="password" type="password" /> <input value="Login" type="submit" /> </form> """ @route("/login", method="POST") def do_login(): username = [request.forms.get("username")] for i in range(3): #rangeの()内はユーザの人数を入れる if username == user_line[i]: return do_login_pass(i) else: None return "<p>no such username.</p>" def do_login_pass(count): password = [request.forms.get("password")] if check_login_pass(password,count): return index() else: return """ <p>Login failed.</p> <button type=“button” onclick="location.href='/login'">戻る</button> """ def check_login_pass(password,count): if password == pass_line[count]: return True else: return False #認証後のサイト @route("/") def index(): return """ <h1><a href="/">管理モード</a></h1> <p>開きたいファイルを選択してください</p> <!-- ファイル選択画面 --> <ul id="menu"> <li><button type=“button” onclick="location.href='/gakusei_ex'">学生リスト送信</button></li> <li><button type=“button” onclick="location.href='/gakusei_im'">学生リストダウンロード</button></li> <li><button type=“button” onclick="location.href='/kougi_rule_ex'">講義科目ルール送信</button></li> <li><button type=“button” onclick="location.href='/kougi_rule_im'">講義科目ルールダウンロード</button></li> <li><button type=“button” onclick="location.href='/risyuuF1_ex'">履修者-F1送信</button></li> <li><button type=“button” onclick="location.href='/risyuuF1_im'">履修者-F1ダウンロード</button></li> <li><button type=“button” onclick="location.href='/risyuuF2_ex'">履修者-F2送信</button></li> <li><button type=“button” onclick="location.href='/risyuuF2_im'">履修者-F2ダウンロード</button></li> <li><button type=“button” onclick="location.href='/risyuuF3_ex'">履修者-F3送信</button></li> <li><button type=“button” onclick="location.href='/risyuuF3_im'">履修者-F3ダウンロード</button></li> <li><button type=“button” onclick="location.href='/risyuuF4_1_ex'">履修者-F4_1送信</button></li> <li><button type=“button” onclick="location.href='/risyuuF4_1_im'">履修者-F4_1ダウンロード</button></li> <li><button type=“button” onclick="location.href='/risyuuF4_2_ex'">履修者-F4_2送信</button></li> <li><button type=“button” onclick="location.href='/risyuuF4_2_im'">履修者-F4_2ダウンロード</button></li> <li><button type=“button” onclick="location.href='/risyuuM1_ex'">履修者-M1送信</button></li> <li><button type=“button” onclick="location.href='/risyuuM1_im'">履修者-M1ダウンロード</button></li> <li><button type=“button” onclick="location.href='/risyuuM2_ex'">履修者-M2送信</button></li> <li><button type=“button” onclick="location.href='/risyuuM2_im'">履修者-M2ダウンロード</button></li> <li><button type=“button” onclick="location.href='/risyuuM3_ex'">履修者-M3送信</button></li> <li><button type=“button” onclick="location.href='/risyuuM3_im'">履修者-M3ダウンロード</button></li> <li><button type=“button” onclick="location.href='/risyuuM4_ex'">履修者-M4送信</button></li> <li><button type=“button” onclick="location.href='/risyuuM4_im'">履修者-M4ダウンロード</button></li> <li><button type=“button” onclick="location.href='/risyuuTh2_ex'">履修者-Th2送信</button></li> <li><button type=“button” onclick="location.href='/risyuuTh2_im'">履修者-Th2ダウンロード</button></li> <li><button type=“button” onclick="location.href='/risyuuTh34_ex'">履修者-Th34送信</button></li> <li><button type=“button” onclick="location.href='/risyuuTh34_im'">履修者-Th34ダウンロード</button></li> <li><button type=“button” onclick="location.href='/risyuuTh5_1_ex'">履修者-Th5_1送信</button></li> <li><button type=“button” onclick="location.href='/risyuuTh5_1_im'">履修者-Th5_1ダウンロード</button></li> <li><button type=“button” onclick="location.href='/risyuuTh5_2_ex'">履修者-Th5_2送信</button></li> <li><button type=“button” onclick="location.href='/risyuuTh5_2_im'">履修者-Th5_2ダウンロード</button></li> <li><button type=“button” onclick="location.href='/risyuuTu2_ex'">履修者-Tu2送信</button></li> <li><button type=“button” onclick="location.href='/risyuuTu2_im'">履修者-Tu2ダウンロード</button></li> <li><button type=“button” onclick="location.href='/risyuuTu3_1_ex'">履修者-Tu3_1送信</button></li> <li><button type=“button” onclick="location.href='/risyuuTu3_1_im'">履修者-Tu3_1ダウンロード</button></li> <li><button type=“button” onclick="location.href='/risyuuTu3_2_ex'">履修者-Tu3_2送信</button></li> <li><button type=“button” onclick="location.href='/risyuuTu3_2_im'">履修者-Tu3_2ダウンロード</button></li> <li><button type=“button” onclick="location.href='/risyuuTu4_ex'">履修者-Tu4送信</button></li> <li><button type=“button” onclick="location.href='/risyuuTu4_im'">履修者-Tu4ダウンロード</button></li> <li><button type=“button” onclick="location.href='/risyuuTu5_ex'">履修者-Tu5送信</button></li> <li><button type=“button” onclick="location.href='/risyuuTu5_im'">履修者-Tu5ダウンロード</button></li> <li><button type=“button” onclick="location.href='/risyuuW12_ex'">履修者-W12送信</button></li> <li><button type=“button” onclick="location.href='/risyuuW12_im'">履修者-W12ダウンロード</button></li> <li><button type=“button” onclick="location.href='/risyuuW3_1_ex'">履修者-W3_1送信</button></li> <li><button type=“button” onclick="location.href='/risyuuW3_1_im'">履修者-W3_1ダウンロード</button></li> <li><button type=“button” onclick="location.href='/risyuuW3_2_ex'">履修者-W3_2送信</button></li> <li><button type=“button” onclick="location.href='/risyuuW3_2_im'">履修者-W3_2ダウンロード</button></li> <li><button type=“button” onclick="location.href='/risyuuW4_ex'">履修者-W4送信</button></li> <li><button type=“button” onclick="location.href='/risyuuW4_im'">履修者-W4ダウンロード</button></li> <li><button type=“button” onclick="location.href='/risyuuW5_1_ex'">履修者-W5_1送信</button></li> <li><button type=“button” onclick="location.href='/risyuuW5_1_im'">履修者-W5_1ダウンロード</button></li> <li><button type=“button” onclick="location.href='/risyuuW5_2_ex'">履修者-W5_2送信</button></li> <li><button type=“button” onclick="location.href='/risyuuW5_2_im'">履修者-W5_2ダウンロード</button></li> <li><button type=“button” onclick="location.href='/tanntou_kyouinn_ex'">教員・担当科目リスト送信</button></li> <li><button type=“button” onclick="location.href='/tanntou_kyouinn_im'">教員・担当科目リストダウンロード</button></li> <li><button type=“button” onclick="location.href='/attendlist_ex'">出席者リスト送信</button></li> <li><button type=“button” onclick="location.href='/attendlist_im'">出席者リストダウンロード</button></li> </ul> <button type=“button” onclick="location.href='/login'">ログアウト</button> """ @route("/gakusei_ex") def gakusei_ex(): import gakusei_ex gakusei_ex.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/gakusei_im") def gakusei_im(): import gakusei_im gakusei_im.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/kougi_rule_ex") def kougi_rule_ex(): import kougi_rule_ex kougi_rule_ex.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/kougi_rule_im") def kougi_rule_im(): import kougi_rule_im kougi_rule_im.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuF1_ex") def irisyuuF1_ex(): import risyuuF1_ex risyuuF1_ex.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuF1_im") def risyuuF1_im(): import risyuuF1_im risyuuF1_im.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuF2_ex") def risyuuF2_ex(): import risyuuF2_ex risyuuF2_ex.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuF2_im") def risyuuF2_im(): import risyuuF2_im risyuuF2_im.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuF3_ex") def risyuuF3_ex(): import risyuuF3_ex risyuuF3_ex.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuF3_im") def risyuuF3_im(): import risyuuF3_im risyuuF3_im.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuF4_1_ex") def risyuuF4_1_ex(): import risyuuF4_1_ex risyuuF4_1_ex.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuF4_1_im") def risyuuF4_1_im(): import risyuuF4_1_im risyuuF4_1_im.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuF4_2_ex") def risyuuF4_2_ex(): import risyuuF4_2_ex risyuuF4_2_ex.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuF4_2_im") def risyuuF4_2_im(): import risyuuF4_2_im risyuuF4_2_im.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuM1_ex") def risyuuM1_ex(): import risyuuM1_ex risyuuM1_ex.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuM1_im") def risyuuM1_im(): import risyuuM1_im risyuuM1_im.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuM2_ex") def risyuuM2_ex(): import risyuuM2_ex risyuuM2_ex.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuM2_im") def risyuuM2_im(): import risyuuM2_im risyuuM2_im.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuM3_ex") def risyuuM3_ex(): import risyuuM3_ex risyuuM3_ex.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuM3_im") def risyuuM3_im(): import risyuuM3_im risyuuM3_im.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuM4_ex") def risyuuM4_ex(): import risyuuM4_ex risyuuM4_ex.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuM4_im") def risyuuM4_im(): import risyuuM4_im risyuuM4_im.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuTh2_ex") def risyuuTh2_ex(): import risyuuTh2_ex risyuuTh2_ex.fsync() return """<button type
risyuuTh2_im.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuTh34_ex") def risyuuTh34_ex(): import risyuuTh34_ex risyuuTh34_ex.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuTh34_im") def risyuuTh34_im(): import risyuuTh34_im risyuuTh34_im.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuTh5_1_ex") def risyuuTh5_1_ex(): import risyuuTh5_1_ex risyuuTh5_1_ex.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuTh5_1_im") def risyuuTh5_1_im(): import risyuuTh5_1_im risyuuTh5_1_im.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuTh5_2_ex") def risyuuTh5_2_ex(): import risyuuTh5_2_ex risyuuTh5_2_ex.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuTh5_2_im") def risyuuTh5_2_im(): import risyuuTh5_2_im risyuuTh5_2_im.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuTu2_ex") def risyuuTu2_ex(): import risyuuTu2_ex risyuuTu2_ex.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuTu2_im") def risyuuTu2_im(): import risyuuTu2_im risyuuTu2_im.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuTu3_1_ex") def risyuuTu3_1_ex(): import risyuuTu3_1_ex risyuuTu3_1_ex.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuTu3_1_im") def risyuuTu3_1_im(): import risyuuTu3_1_im risyuuTu3_1_im.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuTu3_2_ex") def risyuuTu3_2_ex(): import risyuuTu3_2_ex risyuuTu3_2_ex.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuTu3_2_im") def risyuuTu3_2_im(): import risyuuTu3_2_im risyuuTu3_2_im.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuTu4_ex") def risyuuTu4_ex(): import risyuuTu4_ex risyuuTu4_ex.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuTu4_im") def risyuuTu4_im(): import risyuuTu4_im risyuuTu4_im.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuTu5_ex") def risyuuTu5_ex(): import risyuuTu5_ex risyuuTu5_ex.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuTu5_im") def risyuuTu5_im(): import risyuuTu5_im risyuuTu5_im.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuW12_ex") def risyuuW12_ex(): import risyuuW12_ex risyuuW12_ex.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuW12_im") def risyuuW12_im(): import risyuuW12_im risyuuW12_im.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuW3_1_ex") def risyuuW3_1_ex(): import risyuuW3_1_ex risyuuW3_1_ex.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuW3_1_im") def risyuuW3_1_im(): import risyuuW3_1_im risyuuW3_1_im.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuW3_2_ex") def risyuuW3_2_ex(): import risyuuW3_2_ex risyuuW3_2_ex.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuW3_2_im") def risyuuW3_2_im(): import risyuuW3_2_im risyuuW3_2_im.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuW4_ex") def risyuuW4_ex(): import risyuuW4_ex risyuuW4_ex.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuW4_im") def risyuuW4_im(): import risyuuW4_im risyuuW4_im.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuW5_1_ex") def risyuuW5_1_ex(): import risyuuW5_1_ex risyuuW5_1_ex.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuW5_1_im") def risyuuW5_1_im(): import risyuuW5_1_im risyuuW5_1_im.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuW5_2_ex") def risyuuW5_2_ex(): import risyuuW5_2_ex risyuuW5_2_ex.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuW5_2_im") def risyuuW5_2_im(): import risyuuW5_2_im risyuuW5_2_im.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/tanntou_kyouinn_ex") def tanntou_kyouinn_ex(): import tanntou_kyouinn_ex tanntou_kyouinn_ex.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/tanntou_kyouinn_im") def tanntou_kyouinn_im(): import tanntou_kyouinn_im tanntou_kyouinn_im.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/attendlist_ex") def attendlist_ex(): import attendlist_ex attendlist_ex.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" @route("/attendlist_im") def tattendlist_im(): import attendlist_im attendlist_im.fsync() return """<button type=“button” onclick="location.href='/'">戻る</button>""" run(host="0.0.0.0", port=int(os.environ.get("PORT", 5000))) fsync()
=“button” onclick="location.href='/'">戻る</button>""" @route("/risyuuTh2_im") def risyuuTh2_im(): import risyuuTh2_im
iovec.rs
use core::borrow::{Borrow, BorrowMut}; use core::fmt; use core::marker::PhantomData; use core::mem::MaybeUninit; use core::ops::{Deref, DerefMut}; #[cfg(feature = "redox_syscall")] use syscall::data::IoVec; #[cfg(all(windows, feature = "winapi"))] use winapi::shared::{ ntdef::{CHAR, ULONG}, ws2def::WSABUF, }; pub mod init_marker { use super::*; mod private { pub trait Sealed: Sized + 'static + Send + Sync + Unpin {} } pub unsafe trait InitMarker: private::Sealed { const IS_INITIALIZED: bool; type DerefTargetItem: fmt::Debug; } pub enum Init {} pub enum Uninit {} impl private::Sealed for Init {} impl private::Sealed for Uninit {} unsafe impl InitMarker for Init { const IS_INITIALIZED: bool = true; type DerefTargetItem = u8; } unsafe impl InitMarker for Uninit { const IS_INITIALIZED: bool = false; type DerefTargetItem = MaybeUninit<u8>; } } // pub mod init_marker use self::init_marker::*; /// A `#![no_std]`-friendly wrapper over the [`std::io::IoSliceMut`]. /// /// Internally, the struct will store the following based on crate features: /// /// * `std` - wrapping [`std::io::IoSlice`] directly, with accessors for it as well as conversion /// functions and From impls. /// * `libc` (with `#[cfg(unix)]` - wrapping [`libc::iovec`] directly on platforms that support it. /// * `winapi` (with `#[cfg(windows)]`) - wrapping `WSABUF` directly. /// * (none) - wrapping a regular slice, that may not have the same ABI guarantees as the types /// from std or libc have. #[repr(transparent)] #[derive(Clone, Copy)] pub struct IoSlice<'a, I: InitMarker = Init> { #[cfg(all(unix, feature = "libc", not(all(feature = "redox_syscall"))))] inner: (libc::iovec, PhantomData<&'a [I::DerefTargetItem]>), #[cfg(feature = "redox_syscall")] inner: (IoVec, PhantomData<&'a [I::DerefTargetItem]>), #[cfg(all(windows, feature = "winapi"))] inner: (WSABUF, PhantomData<&'a [I::DerefTargetItem]>), #[cfg(not(any(all(unix, feature = "libc"), all(windows, feature = "winapi"))))] inner: &'a [I::DerefTargetItem], _marker: PhantomData<I>, } // SAFETY: This is safe because whatever pointer that is sent to this slice must be Send in the // first place. Regular slices implement Send and Sync because of this. unsafe impl<'a, I: InitMarker> Send for IoSlice<'a, I> {} // SAFETY: Same as above. unsafe impl<'a, I: InitMarker> Sync for IoSlice<'a, I> {} impl<'a, I: InitMarker> Unpin for IoSlice<'a, I> {} #[cfg(feature = "std")] impl<'a, I: InitMarker> std::panic::UnwindSafe for IoSlice<'a, I> {} #[cfg(feature = "std")] impl<'a, I: InitMarker> std::panic::RefUnwindSafe for IoSlice<'a, I> {} impl<'a, I: InitMarker> IoSlice<'a, I> { /// Convert a regular slice into an I/O slice. /// /// The initializedness of the resulting I/O slice is dependent on the `I` generic parameter, /// which by default is [`Init`]. Note that it is highly recommended not to call this /// with [`Uninit`], since immutable slices cannot be made initialized, and one /// therefore has to prove externally that the memory is in fact initialized before using it. #[inline] pub fn new(slice: &'a [u8]) -> Self { unsafe { Self::__construct(slice.as_ptr(), slice.len()) } } /// Cast any I/O slice into an [`Uninit`] slice, forgetting about the original /// initializedness. #[inline] pub fn as_uninit(&self) -> &IoSlice<'a, Uninit> { unsafe { &*(self as *const Self as *const IoSlice<'a, Uninit>) } } #[inline] pub fn as_uninit_mut(&mut self) -> &mut IoSlice<'a, Uninit> { unsafe { &mut *(self as *mut Self as *mut IoSlice<'a, Uninit>) } } /// Cast any slice of I/O slice into its uninitialized counterpart. #[inline] pub fn cast_to_uninit_slices(selves: &[Self]) -> &[IoSlice<'a, Uninit>] { unsafe { crate::cast_slice_same_layout(selves) } } /// Cast any mutable slice of I/O slice into its uninitialized counterpart. #[inline] pub fn cast_to_uninit_slices_mut(selves: &mut [Self]) -> &mut [IoSlice<'a, Uninit>] { unsafe { crate::cast_slice_same_layout_mut(selves) } } /// Turn any I/O slice into an [`Uninit`] slice, forgetting about the original /// initializedness. #[inline] pub fn into_uninit(self) -> IoSlice<'a, Uninit> { unsafe { IoSlice::__construct(self.__ptr(), self.__len()) } } /// Unsafely turn an I/O slice, being already [`Init`] or not, into an I/O slice that is [`Init`]. /// /// # Safety /// /// For this to be safe, the initialization invariant must be upheld. Refer to the /// [`std::mem::MaybeUninit`] docs. #[inline] pub unsafe fn assume_init(self) -> IoSlice<'a, Init> { IoSlice::__construct(self.__ptr(), self.__len()) } /// Wrap a system [`libc::iovec`] into a wrapped I/O slice, assuming the iovec can be /// represented as borrowed for the lifetime `'a`. If the iovec is otherwise owned and /// allocated via the system allocator, consider wrapping it in [`IoBox`] if the `alloc` /// feature is used. /// /// _This is only available on Unix targets with the `libc` feature enabled_. /// /// # Safety /// /// This is unsafe because the slice must be valid (refer to libstd's section about pointer and /// slice validity near [`std::ptr`]). #[cfg(all(unix, feature = "libc"))] #[inline] pub unsafe fn from_raw_iovec(slice: libc::iovec) -> Self { Self { #[cfg(not(feature = "redox_syscall"))] inner: (slice, PhantomData), #[cfg(feature = "redox_syscall")] inner: ( IoVec { addr: slice.iov_base as usize, len: slice.iov_len, }, PhantomData, ), _marker: PhantomData, } } /// Wrap a system `WSABUF` into a wrapped I/O slice, assuming the buffer can be represented as /// borrowed for the lifetime `'a`. Consider wrapping it in an [`IoBox`] if ownership of the /// `WSABUF` is desired. /// /// # Safety /// /// For this to be safe, the slice must be _valid_ (refer to the [`std::ptr`] docs) and not /// aliased mutably. If the generic parameter `I` is set to `Init`, the slice must also /// contain initialized data. #[cfg(all(windows, feature = "winapi"))] #[inline] pub unsafe fn from_raw_wsabuf(slice: WSABUF) -> Self { Self { inner: (slice, PhantomData), _marker: PhantomData, } } /// Retrieve the inner iovec from this I/O slice. /// /// The raw iovec must be considered borrowed from this slice, even though it is not tracked /// with a lifetime. /// /// _This is only available on Unix targets with the `libc` feature enabled_. #[cfg(all(unix, feature = "libc"))] #[inline] pub fn as_raw_iovec(&self) -> libc::iovec { #[cfg(not(feature = "redox_syscall"))] return self.inner.0; #[cfg(feature = "redox_syscall")] return libc::iovec { iov_base: self.inner.0.addr as *mut libc::c_void, iov_len: self.inner.0.len, }; } /// Retrieve the inner WSABUF from this I/O slice. /// /// The raw WSABUF must be considered borrowed from this slice, even though it is not /// explicitly tracked using a lifetime. /// /// _This is only available on Windows targets with the `winapi` feature enabled._ #[cfg(all(windows, feature = "winapi"))] #[inline] pub fn as_raw_wsabuf(&self) -> WSABUF { self.inner.0 } /// Cast a slice of I/O slices into a slice of iovecs. Since these must share the same ABI /// layout, this is completely safe, and can be directly passed to system calls. /// /// _This is only available on Unix targets with the `libc` feature enabled_. #[cfg(all(unix, feature = "libc"))] #[inline] pub fn cast_to_raw_iovecs(slices: &'a [Self]) -> &'a [libc::iovec] { unsafe { crate::cast_slice_same_layout(slices) } } /// Cast a slice of I/O slices into a slice of `WSABUF`s. Since these must share the same ABI /// layout, this is completely safe, and the resulting slice can directly be passed to system /// calls. /// /// _This is only available on Windows targets with the `winapi` feature enabled_. #[cfg(all(windows, feature = "winapi"))] #[inline] pub fn cast_to_raw_wsabufs(slices: &'a [Self]) -> &'a [WSABUF] { unsafe { crate::cast_slice_same_layout(slices) } } /// Cast a mutable slice of I/O slices into a mutable slice of iovecs. iovecs share the exact /// same ABI guarantees as this wrapper. /// /// _This is only available on Unix targets with the `libc` feature enabled_. /// /// # Safety /// /// This is unsafe, since the iovecs can be mutated, which will cause the original wrapped /// slices to be changed as well. If the iovecs are changed to invalid values in any way, this /// breaks the validity invariant upheld by this wrapped type, leading to UB. #[cfg(all(unix, feature = "libc"))] #[inline] pub unsafe fn cast_to_raw_iovecs_mut(slices: &'a mut [Self]) -> &'a mut [libc::iovec] { crate::cast_slice_same_layout_mut(slices) } /// Cast a mutable slice of I/O slices into a mutable slice of `WSABUF`s. Those share the exact /// same ABI guarantees as this wrapper does. /// /// _This is only available on WIndows targets with the `winapi` feature enabled_. /// /// # Safety /// /// This is unsafe, since the `WSABUF`s can be mutated entirely in safe code, which will cause /// the original wrapped slices to be changed as well. If the buffers are changed to invalid /// values in any way, this breaks the validity invariant upheld by this wrapped type, leading /// to UB. #[cfg(all(windows, feature = "winapi"))] #[inline] pub unsafe fn cast_to_raw_wsabufs_mut(slices: &'a mut [Self]) -> &'a mut [WSABUF] { cast_slice_same_layout_mut(slices) } // TODO: from_raw_{iovec,wsabuf}s{,_mut} /// Advance the start offset of an I/O slice, effectively shrinking it from the start. /// /// # Panics /// /// This will panic if count is greater than the current length. On Windows, this will also /// therefore instantly fail if count is greater than 2^32, since larger buffers cannot be /// constructed. #[inline] pub fn advance(&mut self, count: usize) { unsafe { self.__set_len( self.__len() .checked_sub(count) .expect("IoSlice::advance causes length to overflow"), ); self.__set_ptr(self.__ptr().add(count)); } } /// Advance a range of slices by a specific offset, by advancing each slice individually until /// the offset is reached. /// /// __Note that while this may modify the original slices in-place, the return value should /// always be used, since the original value may contain old slices that were completely /// skipped and never made empty__. /// /// This returns an Option rather than panicking when `n` is greater than the total length, to /// reduce the need for counting, or blind reliance on system call correctness. #[must_use] pub fn advance_within(mut slices: &mut [Self], mut n: usize) -> Option<&mut [Self]> { while let Some(buffer) = slices.first_mut() { if n == 0 { return Some(slices); }; let buffer_len = buffer.len(); if buffer_len > n { buffer.advance(n); } else { slices = &mut slices[1..]; } n -= core::cmp::min(buffer_len, n); } if n > 0 { return None; } Some(slices) } /// Get a slice to the "inner data" pointed to by this slice, which may be either `[u8]` or /// `[MaybeUninit<u8>]`, depending on the `I` generic parameter. Prefer [`as_slice`] or /// [`as_maybe_uninit_slice`] instead; this is only used to make various methods easier to /// implement generically. /// /// [`as_slice`]: #method.as_slice /// [`as_maybe_uninit_slice`]: #method.as_maybe_uninit_slice #[inline] pub fn inner_data(&self) -> &'a [I::DerefTargetItem] { unsafe { core::slice::from_raw_parts(self.__ptr() as *const I::DerefTargetItem, self.__len()) } } /// Construct an I/O slice based on the inner data, which is either `[u8]` or `[MaybeUninit]`. #[inline] pub fn from_inner_data(inner_data: &'a [I::DerefTargetItem]) -> Self { unsafe { Self::__construct(inner_data.as_ptr() as *const u8, inner_data.len()) } } /// Retrieve a slice of possibly uninitialized data, but which is still always valid. #[inline] pub fn as_maybe_uninit_slice(&self) -> &'a [MaybeUninit<u8>] { self.as_uninit().inner_data() } fn __ptr(&self) -> *const u8 { #[cfg(all(unix, feature = "libc", not(feature = "redox_syscall")))] return self.inner.0.iov_base as *const u8; #[cfg(feature = "redox_syscall")] return self.inner.0.addr as *const u8; #[cfg(all(windows, feature = "winapi"))] return self.inner.0.buf as *const u8; #[cfg(not(any(all(unix, feature = "libc"), all(windows, feature = "winapi"))))] return self.inner.as_ptr() as *const u8; } fn __len(&self) -> usize { #[cfg(all(unix, feature = "libc", not(feature = "redox_syscall")))] return self.inner.0.iov_len as usize; #[cfg(feature = "redox_syscall")] return self.inner.0.len; #[cfg(all(windows, feature = "winapi"))] return self.inner.0.len as usize; #[cfg(not(any(all(unix, feature = "libc"), all(windows, feature = "winapi"))))] return self.inner.len(); } #[inline] unsafe fn __set_ptr(&mut self, ptr: *const u8) { #[cfg(all(unix, feature = "libc", not(feature = "redox_syscall")))] { self.inner.0.iov_base = ptr as *mut libc::c_void; } #[cfg(feature = "redox_syscall")] { self.inner.0.addr = ptr as usize; } #[cfg(all(windows, feature = "winapi"))] { self.inner.0.buf = ptr as *mut CHAR; } #[cfg(not(any(all(unix, feature = "libc"), all(windows, feature = "winapi"))))] { self.inner = core::slice::from_raw_parts(ptr as *const I::DerefTargetItem, self.__len()); } } #[inline] unsafe fn __set_len(&mut self, len: usize) { #[cfg(all(unix, feature = "libc", not(feature = "redox_syscall")))] { self.inner.0.iov_len = len as usize; } #[cfg(feature = "redox_syscall")] { self.inner.0.len = len; } #[cfg(all(windows, feature = "libc"))] { use core::convert::TryInto; self.inner.0.len = len .try_into() .expect("length exceeding 2^32 bytes, which is the limit of WSABUF"); } #[cfg(not(any(all(unix, feature = "libc"), all(windows, feature = "winapi"))))] { self.inner = core::slice::from_raw_parts(self.__ptr() as *const I::DerefTargetItem, len); } } unsafe fn __construct(ptr: *const u8, len: usize) -> Self { #[cfg(all(windows, feature = "winapi"))] use core::convert::TryInto; Self { #[cfg(all(unix, feature = "libc", not(feature = "redox_syscall")))] inner: ( libc::iovec { iov_base: ptr as *mut libc::c_void, iov_len: len as usize, }, PhantomData, ), #[cfg(feature = "redox_syscall")] inner: ( IoVec { addr: ptr as usize, len, }, PhantomData, ), #[cfg(all(windows, feature = "winapi"))] inner: ( WSABUF { len: len.try_into().expect( "Constructing an IoSlice that is larger than the 2^32 limit of WSABUF", ), buf: ptr as *mut CHAR, }, PhantomData, ), #[cfg(not(any(all(unix, feature = "libc"), all(windows, feature = "winapi"))))] inner: { core::slice::from_raw_parts(ptr as *const I::DerefTargetItem, len) }, _marker: PhantomData, } } #[inline] pub fn split_at(self, mid: usize) -> (Self, Self) { let (a, b) = self.inner_data().split_at(mid); (Self::from_inner_data(a), Self::from_inner_data(b)) } } impl<'a> IoSlice<'a, Init> { /// Retrieve an initialized byte slice from this I/O slice. #[inline] pub fn as_slice(&self) -> &'a [u8] { self.inner_data() } /// Convert this slice into an initialized [`std::io::IoSlice`]. /// /// _This is only available with the `std` feature enabled_. #[cfg(feature = "std")] #[inline] pub fn into_std_ioslice(self) -> std::io::IoSlice<'a> { std::io::IoSlice::new(self.as_slice()) } /// Cast a slice of I/O slices, into a slice of libstd's [`std::io::IoSlice`]. This is safe /// since they both must share the same ABI layout as [`libc::iovec`]. /// /// _This is only available with the `std` feature enabled_. #[cfg(feature = "std")] #[inline] pub fn cast_to_std_ioslices<'b>(slices: &'b [Self]) -> &'b [std::io::IoSlice<'a>] { unsafe { crate::cast_slice_same_layout(slices) } } /// Cast a mutable slice of I/O slices, into a mutable slice of libstd's [`std::io::IoSlice`]. /// This is safe since they both must share the same ABI layout as [`libc::iovec`], and since /// libstd's I/O slices have the same validity invariant as this wrapper and slices in general. /// /// _This is only available with the `std` feature enabled_. #[cfg(feature = "std")] #[inline] pub fn cast_to_std_ioslices_mut(slices: &'a mut [Self]) -> &'a mut [std::io::IoSlice<'a>] { unsafe { crate::cast_slice_same_layout_mut(slices) } } } impl<'a, I: InitMarker> fmt::Debug for IoSlice<'a, I> { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { if I::IS_INITIALIZED { write!(f, "{:?}", self.inner_data()) } else { write!( f, "[possibly uninitialized immutable I/O slice at {:p}, len {} bytes]", self.as_maybe_uninit_slice().as_ptr(), self.as_maybe_uninit_slice().len() ) } } } impl<'a, I: InitMarker> AsRef<[MaybeUninit<u8>]> for IoSlice<'a, I> { #[inline] fn as_ref(&self) -> &[MaybeUninit<u8>] { self.as_maybe_uninit_slice() } } impl<'a> AsRef<[u8]> for IoSlice<'a, Init> { #[inline] fn as_ref(&self) -> &[u8] { self.as_slice() } } impl<'a, I: InitMarker> Borrow<[I::DerefTargetItem]> for IoSlice<'a, I> { #[inline] fn borrow(&self) -> &[I::DerefTargetItem] { self.inner_data() } } impl<'a> Borrow<[MaybeUninit<u8>]> for IoSlice<'a, Init> { #[inline] fn borrow(&self) -> &[MaybeUninit<u8>] { self.as_maybe_uninit_slice() } } impl<'a, I: InitMarker> Deref for IoSlice<'a, I> { type Target = [I::DerefTargetItem]; #[inline] fn deref(&self) -> &Self::Target { self.inner_data() } } impl<'a, I: InitMarker> From<&'a [I::DerefTargetItem]> for IoSlice<'a, I> { #[inline] fn from(slice: &'a [I::DerefTargetItem]) -> Self { Self::from_inner_data(slice) } } impl<'a, I: InitMarker> From<&'a mut [I::DerefTargetItem]> for IoSlice<'a, I> { #[inline] fn from(slice: &'a mut [I::DerefTargetItem]) -> Self { Self::from_inner_data(&*slice) } } impl<'a> From<&'a [u8]> for IoSlice<'a, Uninit> { fn from(maybe_uninit_slice: &'a [u8]) -> Self { Self::new(maybe_uninit_slice) } } impl<'a> From<&'a mut [u8]> for IoSlice<'a, Uninit> { fn from(maybe_uninit_slice: &'a mut [u8]) -> Self { Self::new(&*maybe_uninit_slice) } } #[cfg(feature = "nightly")] impl<'a, I: InitMarker, const N: usize> From<&'a [I::DerefTargetItem; N]> for IoSlice<'a, I> { #[inline] fn from(array_ref: &'a [I::DerefTargetItem; N]) -> Self { Self::from_inner_data(&array_ref[..]) } } #[cfg(feature = "nightly")] impl<'a, I: InitMarker, const N: usize> From<&'a mut [I::DerefTargetItem; N]> for IoSlice<'a, I> { #[inline] fn from(array_ref: &'a mut [I::DerefTargetItem; N]) -> Self { Self::from_inner_data(&array_ref[..]) } } #[cfg(feature = "nightly")] impl<'a, const N: usize> From<&'a [u8; N]> for IoSlice<'a, Uninit> { #[inline] fn from(array_ref: &'a [u8; N]) -> Self { Self::new(&array_ref[..]) } } #[cfg(feature = "nightly")] impl<'a, const N: usize> From<&'a mut [u8; N]> for IoSlice<'a, Uninit> { #[inline] fn from(array_ref: &'a mut [u8; N]) -> Self { Self::new(&array_ref[..]) } } impl<'a> PartialEq for IoSlice<'a, Init> { #[inline] fn eq(&self, other: &Self) -> bool { self == other.as_slice() } } impl<'a> PartialEq<[u8]> for IoSlice<'a, Init> { #[inline] fn eq(&self, other: &[u8]) -> bool { self.as_slice() == other } } #[cfg(feature = "nightly")] impl<'a, const N: usize> PartialEq<[u8; N]> for IoSlice<'a, Init> { #[inline] fn eq(&self, other: &[u8; N]) -> bool { self == &other[..] } } impl<'a, 'b> PartialEq<IoSliceMut<'b, Init>> for IoSlice<'a, Init> { #[inline] fn eq(&self, other: &IoSliceMut<'b>) -> bool { self == other.as_slice() } } impl<'a> Eq for IoSlice<'a, Init> {} impl<'a> PartialOrd<[u8]> for IoSlice<'a, Init> { #[inline] fn partial_cmp(&self, other: &[u8]) -> Option<core::cmp::Ordering> { PartialOrd::partial_cmp(self.as_slice(), other) } } impl<'a, 'b> PartialOrd<IoSliceMut<'b, Init>> for IoSlice<'a, Init> { #[inline] fn partial_cmp(&self, other: &IoSliceMut<'b>) -> Option<core::cmp::Ordering> { PartialOrd::partial_cmp(self.as_slice(), other.as_slice()) } } #[cfg(feature = "nightly")] impl<'a, const N: usize> PartialOrd<[u8; N]> for IoSlice<'a, Init> { #[inline] fn partial_cmp(&self, other: &[u8; N]) -> Option<core::cmp::Ordering> { PartialOrd::partial_cmp(self.as_slice(), other) } } impl<'a> PartialOrd for IoSlice<'a, Init> { #[inline] fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> { Some(Ord::cmp(self, other)) } } impl<'a> Ord for IoSlice<'a, Init> { #[inline] fn cmp(&self, other: &Self) -> core::cmp::Ordering { Ord::cmp(self.as_slice(), other.as_slice()) } } impl<'a, I: InitMarker> Default for IoSlice<'a, I> { #[inline] fn default() -> Self { Self::new(&[]) } } impl<'a> core::hash::Hash for IoSlice<'a, Init> { #[inline] fn hash<H: core::hash::Hasher>(&self, state: &mut H) { state.write(self.as_slice()) } } #[cfg(feature = "std")] impl<'a, I: InitMarker> From<std::io::IoSlice<'a>> for IoSlice<'a, I> { #[inline] fn from(slice: std::io::IoSlice<'a>) -> Self { unsafe { Self::__construct(slice.as_ptr(), slice.len()) } } } #[cfg(feature = "std")] impl<'a, I: InitMarker> From<std::io::IoSliceMut<'a>> for IoSlice<'a, I> { #[inline] fn from(mut slice: std::io::IoSliceMut<'a>) -> Self { unsafe { Self::__construct(slice.as_mut_ptr(), slice.len()) } } } #[cfg(all(unix, feature = "libc"))] impl<'a, I: InitMarker> From<IoSlice<'a, I>> for libc::iovec { #[inline] fn from(slice: IoSlice<'a, I>) -> Self { slice.as_raw_iovec() } } #[cfg(feature = "stable_deref_trait")] unsafe impl<'a, I: InitMarker> stable_deref_trait::StableDeref for IoSlice<'a, I> {} /// A `#![no_std]`-friendly wrapper over the [`std::io::IoSliceMut`]. /// /// Internally, the struct will store the following based on crate features: /// /// * `std` - wrapping [`std::io::IoSliceMut`] directly, with accessors for it as well as conversion /// functions and From impls. /// * `libc` (with `#[cfg(unix)]` - wrapping [`libc::iovec`] directly on platforms that support it, /// together with a marker making rustc think this stores a `&'a mut [u8]`. /// * (none) - wrapping a regular slice, that may not have the same ABI guarantees as the types /// from std or libc have. #[repr(transparent)] pub struct IoSliceMut<'a, I: InitMarker = Init> { #[cfg(all(unix, feature = "libc"))] inner: (libc::iovec, PhantomData<&'a mut [I::DerefTargetItem]>), #[cfg(all(windows, feature = "winapi"))] inner: (WSABUF, PhantomData<&'a mut [I::DerefTargetItem]>), #[cfg(not(any(all(unix, feature = "libc"), all(windows, feature = "winapi"))))] inner: &'a mut [I::DerefTargetItem], _marker: PhantomData<I>, } // SAFETY: Same as the safety section of impl Send for IoSlice. unsafe impl<'a, I: InitMarker> Send for IoSliceMut<'a, I> {} // SAFETY: Same as the safety section of impl Send for IoSlice. unsafe impl<'a, I: InitMarker> Sync for IoSliceMut<'a, I> {} impl<'a, I: InitMarker> Unpin for IoSliceMut<'a, I> {} #[cfg(feature = "std")] impl<'a, I: InitMarker> std::panic::UnwindSafe for IoSliceMut<'a, I> {} #[cfg(feature = "std")] impl<'a, I: InitMarker> std::panic::RefUnwindSafe for IoSliceMut<'a, I> {} impl<'a, I: InitMarker> IoSliceMut<'a, I> { /// Construct a new mutable I/O slice, from an existing initialized slice. The initializedness /// is determined based on the generic parameter `I`, while the original slice obviously has to /// be initialized since its type is [`u8`] and not [`MaybeUninit<u8>`]. #[inline] pub fn new(slice: &'a mut [u8]) -> Self { unsafe { Self::__construct(slice.as_mut_ptr(), slice.len()) } } /// Unsafely cast a possibly uninitialized slice into an initialized slice. /// /// __NOTE: THIS MUST NOT BE USED FOR INITIALIZATION; THAT IS DIRECT UB__ /// /// # Safety /// /// For this to be safe, the initialization invariant must be upheld. Refer to the /// [`std::mem::MaybeUninit`] docs. #[inline] pub unsafe fn assume_init(self) -> IoSliceMut<'a, Init> { IoSliceMut::__construct(self.__ptr(), self.__len()) } /// Unsafely cast a possibly uninitialized slice into an initialized slice, by reference. /// /// # Safety /// /// This must uphold the initialization invariant. #[inline] pub unsafe fn assume_init_ref(&self) -> &IoSliceMut<'a, Init> { &*(self as *const Self as *const IoSliceMut<'a, Init>) } /// Unsafely cast a possibly uninitialized slice into an initialized slice, by mutable reference. /// /// # Safety /// /// This must uphold the initialization invariant. #[inline] pub unsafe fn assume_init_mut(&mut self) -> &mut IoSliceMut<'a, Init> { &mut *(self as *mut Self as *mut IoSliceMut<'a, Init>) } /// Cast an I/O slice, being [`Init`] or not, into an [`Uninit`] I/O slice. #[inline] pub fn into_uninit(self) -> IoSliceMut<'a, Uninit> { unsafe { IoSliceMut::__construct(self.__ptr(), self.__len()) } } /// Cast an I/O slice, being [`Init`] or not, into an [`Uninit`] I/O slice, by /// reference. #[inline] pub fn as_uninit(&self) -> &IoSliceMut<'a, Uninit> { unsafe { &*(self as *const Self as *const IoSliceMut<'a, Uninit>) } } /// Cast an I/O slice, being [`Init`] or not, into an [`Uninit`] I/O slice, by /// mutable reference. #[inline] pub fn as_uninit_mut(&mut self) -> &mut IoSliceMut<'a, Uninit> { unsafe { &mut *(self as *mut Self as *mut IoSliceMut<'a, Uninit>) } } /// Cast any slice of I/O slice into its uninitialized counterpart. #[inline] pub fn cast_to_uninit_slices(selves: &[Self]) -> &[IoSliceMut<'a, Uninit>] { unsafe { crate::cast_slice_same_layout(selves) } } /// Cast any mutable slice of I/O slice into its uninitialized counterpart. /// /// # Safety /// /// The returned slice must not be used to de-initialize any data. #[inline] pub unsafe fn cast_to_uninit_slices_mut(selves: &mut [Self]) -> &mut [IoSliceMut<'a, Uninit>] { crate::cast_slice_same_layout_mut(selves) } /// Cast any slice of I/O slice into its uninitialized counterpart. /// /// # Safety /// /// The initialization invariant must be upheld. #[inline] pub unsafe fn cast_to_init_slices(selves: &[Self]) -> &[IoSliceMut<'a, Init>] { crate::cast_slice_same_layout(selves) } /// Cast any mutable slice of I/O slice into its uninitialized counterpart. /// /// # Safety /// /// The initialization invariant must be upheld. #[inline] pub unsafe fn cast_to_init_slices_mut(selves: &mut [Self]) -> &mut [IoSliceMut<'a, Init>] { crate::cast_slice_same_layout_mut(selves) } /// Wrap a system [`libc::iovec`] into this wrapper. /// /// _This is only available on Unix targets with the `libc` feature enabled_. /// /// # Safety /// /// For this to be safe, the validity invariant must be upheld, which takes things like size, /// alignment, concurrent use, etc. in parallel. In short, the slice must be considered mutably /// borrowed, and it must be safe to assume that it will not outlive the lifetime `'a`. Refer /// to the [`std::ptr`] docs for more information regarding validity. /// /// Additionally, if the `I` generic parameter is [`Init`], the iovec must also point to /// initialized data. #[cfg(all(unix, feature = "libc"))] #[inline] pub unsafe fn from_raw_iovec(slice: libc::iovec) -> Self { Self { inner: (slice, PhantomData), _marker: PhantomData, } } /// Wrap a system `WSABUF` into this wrapper. /// /// _This is only available on Windows targets with the `winapi` feature enabled._ #[cfg(all(windows, feature = "winapi"))] #[inline] pub unsafe fn from_raw_wsabuf(slice: WSABUF) -> Self { Self { inner: (slice, PhantomData), _marker: PhantomData, } } /// Retrieve the wrapped raw [`libc::iovec ] from this wrapper. /// /// The resulting slice is considered immutable, even though it is neither UB nor more unsafe /// than [`as_raw_iovecs_mut`]. This simply exists to prevent accidentally obtaining a /// "mutable" [`libc::iovec`] where that is not possible (e.g. inside an [`std::sync::Arc`]). /// /// [`as_raw_iovecs_mut`]: #method.as_raw_iovecs_mut #[cfg(all(unix, feature = "libc"))] #[inline] pub fn as_raw_iovec(&self) -> libc::iovec { self.inner.0 } /// Retrieve the wrapped raw [`libc::iovec ] from this wrapper, requiring exclusive access of /// this slice, to obtain. #[cfg(all(unix, feature = "libc"))] #[inline] pub fn as_raw_iovec_mut(&mut self) -> libc::iovec { self.inner.0 } #[cfg(all(windows, feature = "winapi"))] #[inline] pub fn as_raw_wsabuf(&self) -> WSABUF { self.inner.0 } #[cfg(all(windows, feature = "winapi"))] #[inline] pub fn as_raw_wsabuf_mut(&mut self) -> WSABUF { self.inner.0 } /// Cast a slice of wrapped I/O slices into a slice of [`libc::iovec`]s. #[cfg(all(unix, feature = "libc"))] #[inline] pub fn cast_to_raw_iovecs(slices: &[Self]) -> &[libc::iovec] { unsafe { crate::cast_slice_same_layout(slices) } } /// Cast a slice of wrapped I/O slices into a slice of `WSABUF`s. #[cfg(all(windows, feature = "winapi"))] #[inline] pub fn cast_to_raw_wsabufs(slices: &[Self]) -> &[WSABUF] { unsafe { crate::cast_slice_same_layout(slices) } } /// Unsafely cast a mutable slice of wrapped I/O slices into a mutable slice of /// [`libc::iovec`]s. /// /// # Safety /// /// This is unsafe because the initialization or validity invariants may be broken since the /// iovecs can be changed arbitrarily in a mutable reference. #[cfg(all(unix, feature = "libc"))] #[inline] pub unsafe fn cast_to_raw_iovecs_mut(slices: &mut [Self]) -> &mut [libc::iovec] { crate::cast_slice_same_layout_mut(slices) } /// Unsafely cast a mutable slice of wrapped I/O slices into a mutable slice of /// `WSABUF`s. /// /// # Safety /// /// This is unsafe because the initialization or validity invariants may be broken since the /// WSABUFs can be changed arbitrarily in a mutable reference. #[cfg(all(windows, feature = "winapi"))] #[inline] pub unsafe fn cast_to_raw_wsabufs_mut(slices: &mut [Self]) -> &mut [WSABUF] { cast_slice_same_layout_mut(slices) } /// Unsafely cast a slice of [`libc::iovec`]s into a slice of [`IoSliceMut`]. /// /// _This is only available on Unix platforms with the `libc` feature enabled._ /// /// # Safety /// /// This is unsafe since the iovecs must uphold the validity and initialization invariants. #[cfg(all(unix, feature = "libc"))] #[inline] pub unsafe fn from_raw_iovecs(slice: &[libc::iovec]) -> &[Self] { crate::cast_slice_same_layout(slice) } /// Unsafely cast a mutable slice of [`libc::iovec`]s into a mutable slice of [`IoSliceMut`]. /// /// _This is only available on Unix platforms with the `libc` feature enabled._ /// /// # Safety /// /// This is unsafe since the iovecs must uphold the validity and initialization invariants. #[cfg(all(unix, feature = "libc"))] #[inline] pub unsafe fn from_raw_iovecs_mut(slice: &mut [libc::iovec]) -> &mut [Self] { crate::cast_slice_same_layout_mut(slice) } /// Unsafely cast a slice of `WSABUF`s into a slice of [`IoSliceMut`]. /// /// _This is only available on Windows platforms with the `winapi` feature enabled._ /// /// # Safety /// /// This is unsafe since the buffers must uphold the validity and initialization invariants. #[cfg(all(windows, feature = "winapi"))] #[inline] pub unsafe fn from_raw_wsabufs(slice: &[WSABUF]) -> &[Self] { cast_slice_same_layout(slice) } /// Unsafely cast a mutable slice of `WSABUF`s into a mutable slice of [`IoSliceMut`]. /// /// _This is only available on Windows platforms with the `winapi` feature enabled._ /// /// # Safety /// /// This is unsafe since the buffers must uphold the validity and initialization invariants. #[cfg(all(windows, feature = "winapi"))] #[inline] pub unsafe fn from_raw_wsabufs_mut(slice: &mut [WSABUF]) -> &mut [Self] { cast_slice_same_layout_mut(slice) } /// Advance the start offset of a single slice by `count` bytes, reducing the length as well. /// /// # Panics /// /// This will panic if `count` is greater than the current length. #[inline] pub fn advance(&mut self, count: usize) { unsafe { self.__set_len( self.__len() .checked_sub(count) .expect("IoSlice::advance causes length to overflow"), ); self.__set_ptr(self.__ptr().add(count)); } } /// Advance multiple slices by `n`, skipping and truncating slices until there are `n` less /// total bytes. /// /// They are always advanced from start to end, and only the last slice will actually be /// changed if the count turned out to be uneven. `None` is returned if `n` turns out to be /// greater than the total length of the slices, so that counting beforehand becomes /// unnecessary. #[must_use] #[inline] pub fn advance_within(mut slices: &mut [Self], mut n: usize) -> Option<&mut [Self]> { while let Some(buffer) = slices.first_mut() { if n == 0 { return Some(slices); }; let buffer_len = buffer.len(); if buffer_len > n { buffer.advance(n); } else { slices = &mut slices[1..]; } n -= core::cmp::min(buffer_len, n); } if n > 0 { return None; } Some(slices) } /// Retrieve the "inner data" immutably, pointed to by the I/O slice, being to either `&[u8]` /// or `&[MaybeUninit<u8>]` depending on the generic type parameter `I`. #[inline] pub fn inner_data(&self) -> &[I::DerefTargetItem] { unsafe { core::slice::from_raw_parts(self.__ptr() as *const I::DerefTargetItem, self.__len()) } } /// Retrieve the "inner data" mutably, pointed to by the I/O slice, being to either `&mut [u8]` /// or `&mut [MaybeUninit<u8>]` depending on the generic type parameter `I`. #[inline] pub fn inner_data_mut(&mut self) -> &mut [I::DerefTargetItem] { unsafe { core::slice::from_raw_parts_mut(self.__ptr() as *mut I::DerefTargetItem, self.__len()) } } /// Get the "inner data" mutably, but with the lifetime `'a` rather than the lifetime of /// `self`. #[inline] pub fn into_inner_data(self) -> &'a mut [I::DerefTargetItem] { unsafe { core::slice::from_raw_parts_mut(self.__ptr() as *mut I::DerefTargetItem, self.__len()) } } /// Convert a regular slice that points to either `u8` or `MaybeUninit<u8>`, into /// [`IoSliceMut`]. #[inline] pub fn from_inner_data(inner_data: &'a mut [I::DerefTargetItem]) -> Self { unsafe { Self::__construct(inner_data.as_mut_ptr() as *mut u8, inner_data.len()) } } #[inline] pub fn as_maybe_uninit_slice(&self) -> &[MaybeUninit<u8>] { self.as_uninit().inner_data() } #[inline] pub fn as_maybe_uninit_slice_mut(&mut self) -> &mut [MaybeUninit<u8>] { self.as_uninit_mut().inner_data_mut() } #[inline] #[must_use] pub fn zeroed_by_ref<'b>(&'b mut self) -> &'b mut IoSliceMut<'a, Init> { self.as_maybe_uninit_slice_mut().fill(MaybeUninit::new(0)); unsafe { self.assume_init_mut() } } #[inline] fn __ptr(&self) -> *mut u8 { #[cfg(all(unix, feature = "libc"))] return self.inner.0.iov_base as *mut u8; #[cfg(all(windows, feature = "libc"))] return self.inner.0.buf as *mut u8; #[cfg(not(any(all(unix, feature = "libc"), all(windows, feature = "winapi"))))] return self.inner.as_ptr() as *mut u8; } #[inline] fn __len(&self) -> usize { #[cfg(all(unix, feature = "libc"))] return self.inner.0.iov_len as usize; #[cfg(all(windows, feature = "libc"))] return self.inner.0.len as usize; #[cfg(not(any(all(unix, feature = "libc"), all(windows, feature = "winapi"))))] return self.inner.len(); } #[inline] unsafe fn __set_ptr(&mut self, ptr: *mut u8) { #[cfg(all(unix, feature = "libc"))] { self.inner.0.iov_base = ptr as *mut libc::c_void; } #[cfg(all(windows, feature = "winapi"))] { self.inner.0.buf = ptr as *mut CHAR; } #[cfg(not(any(all(unix, feature = "libc"), all(windows, feature = "winapi"))))] { self.inner = core::slice::from_raw_parts_mut(ptr as *mut I::DerefTargetItem, self.__len()); } } #[inline] unsafe fn __set_len(&mut self, len: usize) { #[cfg(all(unix, feature = "libc"))] { self.inner.0.iov_len = len as usize; } #[cfg(all(windows, feature = "libc"))] { use core::convert::TryInto; self.inner.0.len = len .try_into() .expect("length exceeding 2^32 bytes, which is the limit of WSABUF"); } #[cfg(not(any(all(unix, feature = "libc"), all(windows, feature = "winapi"))))] { self.inner = core::slice::from_raw_parts_mut(self.__ptr() as *mut I::DerefTargetItem, len); } } #[inline] unsafe fn __construct(ptr: *mut u8, len: usize) -> Self { #[cfg(all(windows, feature = "winapi"))] use core::convert::TryInto; Self { #[cfg(all(unix, feature = "libc"))] inner: ( libc::iovec { iov_base: ptr as *mut libc::c_void, iov_len: len as usize, }, PhantomData, ), #[cfg(all(windows, feature = "winapi"))] inner: ( WSABUF { len: len.try_into().expect( "constructing an IoSlice that is larger than the 2^32 limits of WSABUF", ), buf: ptr as *mut CHAR, }, PhantomData, ), #[cfg(not(any(all(unix, feature = "libc"), all(windows, feature = "winapi"))))] inner: { core::slice::from_raw_parts_mut(ptr as *mut I::DerefTargetItem, len) }, _marker: PhantomData, } } #[inline] pub fn split_at(self, mid: usize) -> (Self, Self) { let (a, b) = self.into_inner_data().split_at_mut(mid); (Self::from_inner_data(a), Self::from_inner_data(b)) } } impl<'a> IoSliceMut<'a, Uninit> { /// Create an uninitialized mutable I/O slice from a regular uninitialized mutable byte slice. pub fn from_uninit(uninit: &'a mut [MaybeUninit<u8>]) -> Self { Self::from_inner_data(uninit) } } impl<'a> IoSliceMut<'a, Init> { /// Retrieve the inner slice immutably. This requires the I/O slice to be initialized. /// /// Unlike the immutable [`IoSlice`], this will require a secondary lifetime of `self`, to /// prevent aliasing when the data can be mutated. If it is necessary to obtain a byte slice /// with lifetime `'a`, use [`to_slice`]. /// /// [`to_slice`]: #method.to_slice #[inline] pub fn as_slice(&self) -> &[u8] { unsafe { crate::cast_slice_same_layout(self.inner_data()) } } /// Take an [`IoSliceMut`] by value, turning it into an immutable byte slice of lifetime `'a`. #[inline] pub fn into_slice(self) -> &'a [u8] { &*self.into_slice_mut() } /// Retrieve the inner slice mutably. This requires the I/O slice to be initialized. /// /// Note that unlike [`into_slice_mut`], this will have the lifetime of `self`, not `'a`. /// /// [`into_slice_mut`]: #method.into_slice_mut #[inline] pub fn as_slice_mut(&mut self) -> &mut [u8] { unsafe { crate::cast_slice_same_layout_mut(self.inner_data_mut()) } } /// Take an [`IoSliceMut`] by value, turning it into a mutable byte slice of lifetime `'a`. #[inline] pub fn into_slice_mut(self) -> &'a mut [u8] { unsafe { core::slice::from_raw_parts_mut(self.__ptr(), self.__len()) } } // TODO: conversion by reference between std I/O slices /// Cast `&[IoSliceMut]` to `&[std::io::IoSlice]`. #[cfg(feature = "std")] #[inline] pub fn cast_to_std_ioslices<'b>(slices: &'b [Self]) -> &'b [std::io::IoSlice<'a>] { unsafe { crate::cast_slice_same_layout(slices) } } /// Cast `&[IoSliceMut]` to `&[std::io::IoSliceMut]`. #[cfg(feature = "std")] #[inline] pub fn cast_to_std_mut_ioslices<'b>(slices: &'b [Self]) -> &'b [std::io::IoSliceMut<'a>] { unsafe { crate::cast_slice_same_layout(slices) } } /// Cast `&mut [IoSliceMut]` to `&mut [std::io::IoSlice]`. #[cfg(feature = "std")] #[inline] pub fn cast_to_std_ioslices_mut<'b>(slices: &'b mut [Self]) -> &'b mut [std::io::IoSlice<'a>] { unsafe { crate::cast_slice_same_layout_mut(slices) } } /// Cast `&mut [IoSliceMut]` to `&mut [std::io::IoSliceMut]`. #[cfg(feature = "std")] #[inline] pub fn cast_to_std_mut_ioslices_mut( slices: &'a mut [Self], ) -> &'a mut [std::io::IoSliceMut<'a>] { unsafe { crate::cast_slice_same_layout_mut(slices) } } } impl<'a, I: InitMarker> AsRef<[I::DerefTargetItem]> for IoSliceMut<'a, I> { #[inline] fn as_ref(&self) -> &[I::DerefTargetItem] { self.inner_data() } } impl<'a> AsRef<[MaybeUninit<u8>]> for IoSliceMut<'a, Init> { #[inline] fn as_ref(&self) -> &[MaybeUninit<u8>] { self.as_maybe_uninit_slice() } } // TODO: Use #![feature(specialization)] and make sure that there is always an AsRef and Borrow // impl for MaybeUninit and u8 regardless of the generic parameter. // TODO: What about #![feature(const_generics)] and use an exhaustive enum for the type markers? impl<'a, I: InitMarker> Borrow<[MaybeUninit<u8>]> for IoSliceMut<'a, I> { #[inline] fn borrow(&self) -> &[MaybeUninit<u8>] { self.as_maybe_uninit_slice() } } impl<'a> Borrow<[u8]> for IoSliceMut<'a, Init> { #[inline] fn borrow(&self) -> &[u8] { self.as_slice() } } impl<'a, I: InitMarker> Deref for IoSliceMut<'a, I> { type Target = [I::DerefTargetItem]; #[inline] fn deref(&self) -> &Self::Target { self.inner_data() } } impl<'a> AsMut<[u8]> for IoSliceMut<'a, Init> { #[inline] fn as_mut(&mut self) -> &mut [u8] { self.as_slice_mut() } } impl<'a, I: InitMarker> AsMut<[MaybeUninit<u8>]> for IoSliceMut<'a, I> { #[inline] fn as_mut(&mut self) -> &mut [MaybeUninit<u8>] { self.as_maybe_uninit_slice_mut() } } impl<'a, I: InitMarker> BorrowMut<[MaybeUninit<u8>]> for IoSliceMut<'a, I> { #[inline] fn borrow_mut(&mut self) -> &mut [MaybeUninit<u8>] { self.as_maybe_uninit_slice_mut() } } impl<'a> BorrowMut<[u8]> for IoSliceMut<'a, Init> { #[inline] fn borrow_mut(&mut self) -> &mut [u8] { self.as_slice_mut() } } impl<'a, I: InitMarker> DerefMut for IoSliceMut<'a, I> { #[inline] fn deref_mut(&mut self) -> &mut Self::Target { self.inner_data_mut() } } #[cfg(all(unix, feature = "libc"))] impl<'a, I: InitMarker> From<IoSliceMut<'a, I>> for libc::iovec { #[inline] fn from(slice: IoSliceMut<'a, I>) -> Self { slice.as_raw_iovec() } } impl<'a, I: InitMarker> fmt::Debug for IoSliceMut<'a, I> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { if I::IS_INITIALIZED { write!(f, "{:?}", self.inner_data()) } else { write!( f, "[possibly uninitialized mutable I/O slice at {:p}, len {} bytes]", self.as_maybe_uninit_slice().as_ptr(), self.as_maybe_uninit_slice().len() ) } } } impl<'a> PartialEq for IoSliceMut<'a, Init> { #[inline] fn eq(&self, other: &Self) -> bool { self.as_slice() == other.as_slice() } } impl<'a> PartialEq<[u8]> for IoSliceMut<'a, Init> { #[inline] fn eq(&self, other: &[u8]) -> bool { self.as_slice() == other } } impl<'a, 'b> PartialEq<&'b [u8]> for IoSliceMut<'a, Init> { #[inline] fn eq(&self, other: &&'b [u8]) -> bool { self.as_slice() == *other } } impl<'a, 'b> PartialEq<IoSlice<'b, Init>> for IoSliceMut<'a, Init> { #[inline] fn eq(&self, other: &IoSlice<'b>) -> bool { self.as_slice() == other.as_slice() } } #[cfg(feature = "nightly")] impl<'a, const N: usize> PartialEq<[u8; N]> for IoSliceMut<'a, Init> { #[inline] fn eq(&self, other: &[u8; N]) -> bool { self.as_slice() == &other[..] } } impl<'a> Eq for IoSliceMut<'a, Init> {} impl<'a> PartialOrd for IoSliceMut<'a, Init> { #[inline] fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> { Some(Ord::cmp(self, other)) } } impl<'a> PartialOrd<[u8]> for IoSliceMut<'a, Init> { #[inline] fn partial_cmp(&self, other: &[u8]) -> Option<core::cmp::Ordering> { PartialOrd::partial_cmp(self.as_slice(), other) } } impl<'a, 'b> PartialOrd<IoSlice<'b, Init>> for IoSliceMut<'a, Init> { #[inline] fn partial_cmp(&self, other: &IoSlice<'b, Init>) -> Option<core::cmp::Ordering> { PartialOrd::partial_cmp(self.as_slice(), other.as_slice()) } } #[cfg(feature = "nightly")] impl<'a, const N: usize> PartialOrd<[u8; N]> for IoSliceMut<'a, Init> { #[inline] fn partial_cmp(&self, other: &[u8; N]) -> Option<core::cmp::Ordering> { PartialOrd::partial_cmp(self.as_slice(), other) } } impl<'a> Ord for IoSliceMut<'a, Init> { #[inline] fn cmp(&self, other: &Self) -> core::cmp::Ordering { Ord::cmp(self.as_slice(), other.as_slice()) } } impl<'a> core::hash::Hash for IoSliceMut<'a, Init> { #[inline] fn hash<H: core::hash::Hasher>(&self, state: &mut H) { state.write(self.as_slice()) } } impl<'a, I: InitMarker> From<&'a mut [I::DerefTargetItem]> for IoSliceMut<'a, I> { #[inline] fn from(slice: &'a mut [I::DerefTargetItem]) -> Self { Self::from_inner_data(slice) } } impl<'a> From<&'a mut [u8]> for IoSliceMut<'a, Uninit> { #[inline] fn from(slice: &'a mut [u8]) -> Self { Self::new(slice) } } #[cfg(feature = "nightly")] impl<'a, I: InitMarker, const N: usize> From<&'a mut [I::DerefTargetItem; N]> for IoSliceMut<'a, I> { #[inline] fn from(slice: &'a mut [I::DerefTargetItem; N]) -> Self { Self::from_inner_data(slice) } } #[cfg(feature = "stable_deref_trait")] unsafe impl<'a> stable_deref_trait::StableDeref for IoSliceMut<'a> {} #[cfg(feature = "alloc")] mod io_box { use super::*; #[cfg(any(all(unix, feature = "libc"), all(windows, feature = "winapi")))] use alloc::alloc::dealloc as deallocate; use alloc::alloc::{alloc as allocate, alloc_zeroed as allocate_zeroed, Layout}; use alloc::boxed::Box; use alloc::vec::Vec; /// An owned chunk of memory, that is ABI-compatible with [`libc::iovec`] or `WSABUF`, /// depending on the platform and Cargo features used. /// /// This must be allocated via the system alloc; importing pointers from _malloc(2)_ is /// currently not possible. #[repr(transparent)] pub struct IoBox<I: InitMarker = Init> { #[cfg(all(unix, feature = "libc"))] inner: libc::iovec, #[cfg(all(windows, feature = "winapi"))] inner: WSABUF, #[cfg(not(any(all(unix, feature = "libc"), all(windows, feature = "winapi"))))] inner: Box<[I::DerefTargetItem]>, _marker: PhantomData<I>, } /// An error that may occur if allocating an I/O box fails. /// /// This will most likely never occur on real operating systems, but being able to handle this /// error is crucial when working in resource-limited environments, or in e.g. OS kernels. #[derive(Debug)] pub struct AllocationError(Layout); impl AllocationError { /// Retrieve the layout that the allocator failed to allocate. #[inline] pub fn layout(&self) -> &Layout { &self.0 } } impl fmt::Display for AllocationError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "failed to allocate {} bytes on a {}-byte alignment for buffer", self.layout().size(), self.layout().align() ) } } #[cfg(feature = "std")] impl std::error::Error for AllocationError {} impl<I: InitMarker> IoBox<I> { // TODO: While really niche (except maybe for O_DIRECT where buffers need to be // page-aligned?), one should also be able to directly specify a layout. fn try_alloc_inner<J: InitMarker>( length: usize, zeroed: bool, ) -> Result<IoBox<J>, AllocationError> { let layout = Layout::from_size_align( core::mem::size_of::<u8>() .checked_mul(length) .expect("overflow when multiplying length with size of u8"), core::mem::align_of::<u8>(), ) .expect("error when creating allocation layout"); #[cfg(all(windows, feature = "winapi"))] if length > u32::MAX as usize { panic!("IoBox (or any WSABUF-based I/O slice) cannot be larger in size than ULONG, which is 32 bits on Windows."); } let pointer = match zeroed { false => unsafe { allocate(layout) }, true => unsafe { allocate_zeroed(layout) }, }; if pointer.is_null() { return Err(AllocationError(layout)); } Ok(unsafe { IoBox::__construct(pointer as *mut J::DerefTargetItem, length) }) } /// Attempt to allocate `length` bytes, which are initially set to zero. /// /// This allocation may fail, but should not be used unless the global allocator actually /// does return null when there is no memory. This is generally not the case for userspace /// processes, where the kernel gives more memory than physically available, but is /// obviously useful in `#![no_std]`. /// /// Since the allocator may be able to already have zeroed blocks of memory, this should be /// preferred over manually initializing it using [`zeroed`]. /// /// # Panics /// /// This associated function will panic on Windows platforms when using the `winapi` /// feature, if the length exceeds the `WSABUF` limit of 2^32 bytes. Always check /// beforehand; this will never be returned as a regular allocation error. /// /// [`zeroed`]: #method.zeroed #[inline] pub fn try_alloc_zeroed(length: usize) -> Result<Self, AllocationError> { Self::try_alloc_inner(length, true) } /// Allocate `length` bytes, which are initially set to zero. /// /// # Panics /// /// This associated function will, like most other heap-allocated structures in the `alloc` /// crate, panic when there is no available memory left. On Windows platforms with using /// the `winapi` feature, this will also panic if the length exceeds the `WSABUF` limit of /// 2^32 bytes. #[inline] pub fn alloc_zeroed(length: usize) -> Self { match Self::try_alloc_zeroed(length) { Ok(boxed) => boxed, Err(AllocationError(layout)) => alloc::alloc::handle_alloc_error(layout), } } /// Turn the I/O box into the underlying pointer and size. #[inline] pub fn into_raw_parts(self) -> (*mut u8, usize) { let ptr = self.__ptr(); let len = self.__len(); core::mem::forget(self); (ptr as *mut u8, len) } /// Convert an underlying pointer and size, into an [`IoBox`]. /// /// # Safety /// /// For this to be safe, the validity and initialization invariants must be held. In /// addition to that, the pointer must be allocated using the system allocator. #[inline] pub unsafe fn from_raw_parts(base: *mut I::DerefTargetItem, len: usize) -> Self { Self::__construct(base, len) } #[cfg(all(unix, feature = "libc"))] pub fn into_raw_iovec(self) -> libc::iovec { let iovec = self.inner; core::mem::forget(self); iovec } #[cfg(all(windows, feature = "winapi"))] pub fn into_raw_wsabuf(self) -> WSABUF { let wsabuf = self.inner; core::mem::forget(self); wsabuf } #[inline] pub fn into_box(self) -> Box<[I::DerefTargetItem]> { let (ptr, len) = self.into_raw_parts(); unsafe { Box::from_raw(core::slice::from_raw_parts_mut( ptr as *mut I::DerefTargetItem, len, )) } } #[inline] pub fn as_ioslice(&self) -> IoSlice<I> { IoSlice::from_inner_data(self.inner_data()) } #[inline] pub fn as_ioslice_mut(&mut self) -> IoSliceMut<I> { IoSliceMut::from_inner_data(self.inner_data_mut()) } #[inline] pub fn inner_data(&self) -> &[I::DerefTargetItem] { unsafe { core::slice::from_raw_parts(self.__ptr() as *const I::DerefTargetItem, self.__len()) } } #[inline] pub fn inner_data_mut(&mut self) -> &mut [I::DerefTargetItem] { unsafe { core::slice::from_raw_parts_mut( self.__ptr() as *mut I::DerefTargetItem, self.__len(), ) } } #[inline] pub fn cast_to_ioslices(these: &[Self]) -> &[IoSlice<I>] { unsafe { crate::cast_slice_same_layout(these) } } /// Cast `&mut [IoBox]` to `&mut [IoSlice]`. /// /// # Safety /// /// To avoid being able to change the pointers, which are likely going to be deallocated in /// this `Drop` code, unless they are changed back, this is marked as "unsafe". /// /// Refer to [`cast_to_mut_ioslices_mut`]. /// /// [`cast_to_mut_ioslices_mut`]: #method.cast_to_mut_ioslices_mut #[inline] pub unsafe fn cast_to_ioslices_mut(these: &mut [Self]) -> &mut [IoSlice<I>] { crate::cast_slice_same_layout_mut(these) } #[inline] pub fn cast_to_mut_ioslices(these: &[Self]) -> &[IoSliceMut<I>] { unsafe { crate::cast_slice_same_layout(these) } } /// Cast `&mut [IoBox]` to `&mut [IoSliceMut]`. /// /// # Safety /// /// Since a mutable slice that mirrors these allows it to change the start offsets and /// advancing them in other ways (and even changing them to global variables etc.), this /// can cause the Drop code to cause UB. The caller must ensure that any pointers are /// changed back to what they were previously, before the drop code is run. #[inline] pub unsafe fn cast_to_mut_ioslices_mut(these: &mut [Self]) -> &mut [IoSliceMut<I>]
/// Convert `IoBox<_>` into `IoBox<Init>`, assuming that the data is initialized. /// /// # Safety /// /// __This shall not be used for initializing data. In that case, initialize it manually /// via [`as_maybe_uninit_slice_mut`], and then call this.__ /// /// While the validity invariant is already upheld when creating this type, the caller must /// ensure that the data be initialized. Refer to the [`std::mem::MaybeUninit`] docs. /// /// [`as_maybe_uninit_slice_mut`]: #method.as_maybe_uninit_slice_mut #[inline] pub unsafe fn assume_init(self) -> IoBox<Init> { let (ptr, len) = self.into_raw_parts(); IoBox::from_raw_parts(ptr, len) } #[inline] pub fn as_maybe_uninit_slice(&self) -> &[MaybeUninit<u8>] { unsafe { crate::cast_slice_same_layout(self.inner_data()) } } #[inline] pub fn as_maybe_uninit_slice_mut(&mut self) -> &mut [MaybeUninit<u8>] { unsafe { crate::cast_slice_same_layout_mut(self.inner_data_mut()) } } #[inline] pub fn into_uninit(self) -> IoBox<Uninit> { unsafe { let (ptr, len) = self.into_raw_parts(); IoBox::from_raw_parts(ptr as *mut MaybeUninit<u8>, len) } } #[inline] pub fn into_uninit_box(self) -> Box<[MaybeUninit<u8>]> { self.into_uninit().into_box() } fn __ptr(&self) -> *mut I::DerefTargetItem { #[cfg(all(unix, feature = "libc"))] { self.inner.iov_base as *mut I::DerefTargetItem } #[cfg(all(windows, feature = "winapi"))] { self.inner.buf as *mut I::DerefTargetItem } #[cfg(not(any(all(unix, feature = "libc"), all(windows, feature = "winapi"))))] { self.inner.as_ptr() as *mut I::DerefTargetItem } } #[inline] fn __len(&self) -> usize { #[cfg(all(unix, feature = "libc"))] { self.inner.iov_len as usize } #[cfg(all(windows, feature = "winapi"))] { self.inner.len as usize } #[cfg(not(any(all(unix, feature = "libc"), all(windows, feature = "winapi"))))] { self.inner.len() } } #[inline] unsafe fn __construct(ptr: *mut I::DerefTargetItem, len: usize) -> Self { Self { #[cfg(all(unix, feature = "libc"))] inner: libc::iovec { iov_base: ptr as *mut libc::c_void, iov_len: len, }, #[cfg(all(windows, feature = "winapi"))] inner: WSABUF { buf: ptr as *mut CHAR, len: len as ULONG, }, #[cfg(not(any(all(unix, feature = "libc"), all(windows, feature = "winapi"))))] inner: Box::from_raw(core::slice::from_raw_parts_mut(ptr, len)), _marker: PhantomData, } } } impl IoBox<Uninit> { /// Attempt to allocate `length` bytes, returning either an uninitialized heap-allocated /// buffer, or an error if the allocator was unable to allocate that many bytes. Note that /// unless called in an `#![no_std]` environment, the OS will likely give more memory than /// physically present, so prefer [`alloc_uninit`] instead in that case. /// /// # Panics /// /// This associated function will panic on Windows platforms when using the `winapi` /// feature, if the length exceeds the `WSABUF` limit of 2^32 bytes. Always check /// beforehand; this will never be returned as a regular allocation error. /// /// [`alloc_uninit`]: #method.alloc_uninit #[inline] pub fn try_alloc_uninit(length: usize) -> Result<IoBox<Uninit>, AllocationError> { Self::try_alloc_inner(length, false) } /// Allocate `length` uninitialized bytes. /// /// # Panics /// /// This associated function will panic if out of memory, or if the length is greater than /// 2^32 on Windows platforms. #[inline] pub fn alloc_uninit(length: usize) -> IoBox<Uninit> { match Self::try_alloc_uninit(length) { Ok(boxed) => boxed, Err(AllocationError(layout)) => alloc::alloc::handle_alloc_error(layout), } } } impl IoBox<Init> { #[inline] pub fn as_slice(&self) -> &[u8] { self.inner_data() } #[inline] pub fn as_slice_mut(&mut self) -> &mut [u8] { self.inner_data_mut() } } impl<I: InitMarker> Drop for IoBox<I> { fn drop(&mut self) { #[cfg(any(all(unix, feature = "libc"), all(windows, feature = "winapi")))] unsafe { deallocate( self.__ptr() as *mut u8, Layout::from_size_align( self.__len() .checked_mul(core::mem::size_of::<u8>()) .expect("overflow on multiplication that should be a no-op"), core::mem::align_of::<u8>(), ) .expect("failed to deallocate due to invalid layout"), ); } } } impl<I: InitMarker> From<Box<[I::DerefTargetItem]>> for IoBox<I> { #[inline] fn from(boxed: Box<[I::DerefTargetItem]>) -> Self { unsafe { let slice_ptr = Box::into_raw(boxed); // TODO: #![feature(slice_ptr_len)] //let iov_len = slice_ptr.len(); let len = (&*slice_ptr).len(); let base = (&*slice_ptr).as_ptr() as *mut I::DerefTargetItem; Self::from_raw_parts(base, len) } } } impl From<Box<[u8]>> for IoBox<Uninit> { #[inline] fn from(boxed: Box<[u8]>) -> Self { unsafe { let slice_ptr = Box::into_raw(boxed); // TODO: #![feature(slice_ptr_len)] //let iov_len = slice_ptr.len(); let len = (&*slice_ptr).len(); let base = (&*slice_ptr).as_ptr() as *mut MaybeUninit<u8>; Self::from_raw_parts(base, len) } } } impl<I: InitMarker> From<Vec<I::DerefTargetItem>> for IoBox<I> { #[inline] fn from(vector: Vec<I::DerefTargetItem>) -> Self { Self::from(vector.into_boxed_slice()) } } impl From<Vec<u8>> for IoBox<Uninit> { #[inline] fn from(vector: Vec<u8>) -> Self { Self::from(vector.into_boxed_slice()) } } impl<I: InitMarker> From<IoBox<I>> for Box<[I::DerefTargetItem]> { #[inline] fn from(io_box: IoBox<I>) -> Self { io_box.into_box() } } impl From<IoBox<Init>> for Box<[MaybeUninit<u8>]> { #[inline] fn from(io_box: IoBox<Init>) -> Self { io_box.into_uninit_box() } } impl<I: InitMarker> From<IoBox<I>> for Vec<I::DerefTargetItem> { #[inline] fn from(io_box: IoBox<I>) -> Self { Self::from(Box::from(io_box)) } } impl From<IoBox<Init>> for Vec<MaybeUninit<u8>> { #[inline] fn from(io_box: IoBox<Init>) -> Self { io_box.into_uninit_box().into() } } impl core::fmt::Debug for IoBox { #[inline] fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { write!(f, "{:?}", self.as_slice()) } } impl<I: InitMarker> Deref for IoBox<I> { type Target = [I::DerefTargetItem]; #[inline] fn deref(&self) -> &Self::Target { self.inner_data() } } impl<I: InitMarker> DerefMut for IoBox<I> { #[inline] fn deref_mut(&mut self) -> &mut Self::Target { self.inner_data_mut() } } impl<I: InitMarker> AsRef<[MaybeUninit<u8>]> for IoBox<I> { #[inline] fn as_ref(&self) -> &[MaybeUninit<u8>] { self.as_maybe_uninit_slice() } } impl<I: InitMarker> AsMut<[MaybeUninit<u8>]> for IoBox<I> { #[inline] fn as_mut(&mut self) -> &mut [MaybeUninit<u8>] { self.as_maybe_uninit_slice_mut() } } impl AsRef<[u8]> for IoBox<Init> { #[inline] fn as_ref(&self) -> &[u8] { self.as_slice() } } impl AsMut<[u8]> for IoBox<Init> { #[inline] fn as_mut(&mut self) -> &mut [u8] { self.as_slice_mut() } } impl<I: InitMarker> core::borrow::Borrow<[MaybeUninit<u8>]> for IoBox<I> { #[inline] fn borrow(&self) -> &[MaybeUninit<u8>] { self.as_maybe_uninit_slice() } } impl<I: InitMarker> core::borrow::BorrowMut<[MaybeUninit<u8>]> for IoBox<I> { #[inline] fn borrow_mut(&mut self) -> &mut [MaybeUninit<u8>] { self.as_maybe_uninit_slice_mut() } } impl core::borrow::Borrow<[u8]> for IoBox<Init> { #[inline] fn borrow(&self) -> &[u8] { self.as_slice() } } impl core::borrow::BorrowMut<[u8]> for IoBox<Init> { #[inline] fn borrow_mut(&mut self) -> &mut [u8] { self.as_slice_mut() } } impl PartialEq for IoBox<Init> { #[inline] fn eq(&self, other: &Self) -> bool { self.as_slice() == other.as_slice() } } impl PartialEq<[u8]> for IoBox<Init> { #[inline] fn eq(&self, other: &[u8]) -> bool { self.as_slice() == other } } impl<'a> PartialEq<IoSlice<'a, Init>> for IoBox<Init> { #[inline] fn eq(&self, other: &IoSlice<Init>) -> bool { self.as_slice() == other.as_slice() } } impl<'a> PartialEq<IoSliceMut<'a, Init>> for IoBox<Init> { #[inline] fn eq(&self, other: &IoSliceMut<Init>) -> bool { self.as_slice() == other.as_slice() } } impl Eq for IoBox<Init> {} // TODO: more impls } #[cfg(feature = "alloc")] pub use io_box::*; // TODO: Replace the million different methods for casting slices to and from system types of I/O // vectors, with these traits. /// A trait for casting slices of different types to and from each other, provided that they have /// the same memory layout. pub trait CastSlice<'a, T>: Sized { fn cast_slice(selves: &'a [Self]) -> &'a [T]; } /// A trait for casting slices of different types to and from each other, mutably, provided that /// they have the same memory layout. /// /// Any modifications to the target type must not be able to violate invariants of the source type. pub trait CastSliceMut<'a, T>: CastSlice<'a, T> { fn cast_slice_mut(selves: &'a mut [Self]) -> &'a mut [T]; } impl<T> CastSlice<'_, T> for T { #[inline] fn cast_slice(selves: &[Self]) -> &[T] { selves } } impl<T> CastSliceMut<'_, T> for T { #[inline] fn cast_slice_mut(selves: &mut [Self]) -> &mut [T] { selves } } #[cfg(feature = "alloc")] impl<'a, I: InitMarker> CastSlice<'a, IoSlice<'a, I>> for IoBox<I> { #[inline] fn cast_slice(selves: &'a [Self]) -> &'a [IoSlice<'a, I>] { IoBox::cast_to_ioslices(selves) } } #[cfg(feature = "alloc")] impl<'a, I: InitMarker> CastSlice<'a, IoSliceMut<'a, I>> for IoBox<I> { #[inline] fn cast_slice(selves: &'a [Self]) -> &'a [IoSliceMut<'a, I>] { IoBox::cast_to_mut_ioslices(selves) } } #[cfg(test)] mod tests { use super::*; const FIRST: &[u8] = b"this"; const SECOND: &[u8] = b"is"; const THIRD: &[u8] = b"FAL"; const FOURTH: &[u8] = b"-rs"; const SPACE: &[u8] = b" "; #[test] fn advance() { let original_slices = [FIRST, SPACE, SECOND, SPACE, THIRD, FOURTH]; let mut original_ioslices = original_slices .iter() .copied() .map(|slice| IoSlice::from(slice)) .collect::<Vec<_>>(); let original_slices = &original_slices[..]; let original_ioslices = &mut original_ioslices[..]; fn check_slices(ioslices: &[IoSlice], slice: &[&[u8]]) { assert!(ioslices .iter() .map(|ioslice| ioslice.as_slice()) .eq(slice.iter().copied())); } let mut ioslices = original_ioslices; check_slices(ioslices, original_slices); ioslices = IoSlice::advance_within(ioslices, 0).unwrap(); check_slices(ioslices, &[b"this", b" ", b"is", b" ", b"FAL", b"-rs"]); ioslices = IoSlice::advance_within(ioslices, 2).unwrap(); check_slices(ioslices, &[b"is", b" ", b"is", b" ", b"FAL", b"-rs"]); ioslices = IoSlice::advance_within(ioslices, 5).unwrap(); check_slices(ioslices, &[b" ", b"FAL", b"-rs"]); ioslices = IoSlice::advance_within(ioslices, 6).unwrap(); check_slices(ioslices, &[b"s"]); ioslices = IoSlice::advance_within(ioslices, 1).unwrap(); check_slices(ioslices, &[]); assert_eq!(IoSlice::advance_within(ioslices, 1), None); } macro_rules! splitting_inner( ($slice:ident) => {{ let mut buf: [u8; 13] = *b"Hello, world!"; let full = $slice::<Init>::new(&mut buf); let (first, remainder) = full.split_at(4); assert_eq!(&*first, b"Hell"); let (second, third) = remainder.split_at(5); assert_eq!(&*second, b"o, wo"); assert_eq!(&*third, b"rld!"); }} ); #[test] fn splitting_ioslice() { splitting_inner!(IoSlice) } #[test] fn splitting_ioslice_mut() { splitting_inner!(IoSliceMut) } #[test] #[cfg(all(windows, feature = "winapi"))] #[cfg_attr(target_pointer_width = "32", ignore)] #[should_panic = "IoBox (or any WSABUF-based I/O slice) cannot be larger in size than ULONG, which is 32 bits on Windows."] fn wsabuf_limit() { let _ = IoBox::try_alloc_uninit(u32::MAX as usize + 1); } #[test] #[cfg(feature = "std")] fn abi_compatibility_with_std() { assert_eq!( std::mem::size_of::<IoSlice>(), std::mem::size_of::<std::io::IoSlice>() ); assert_eq!( std::mem::align_of::<IoSlice>(), std::mem::align_of::<std::io::IoSlice>() ); let slices = [FIRST, SECOND, THIRD, FOURTH]; let mut ioslices = [ IoSlice::new(FIRST), IoSlice::new(SECOND), IoSlice::new(THIRD), IoSlice::new(FOURTH), ]; let std_ioslices = IoSlice::cast_to_std_ioslices(&ioslices); assert!(std_ioslices .iter() .map(|ioslice| ioslice.as_ref()) .eq(slices.iter().copied())); use std::io::prelude::*; let mut buffer = vec![0u8; slices.iter().copied().map(<[u8]>::len).sum()].into_boxed_slice(); let mut total = 0; let mut ioslices = &mut ioslices[..]; loop { let std_ioslices = IoSlice::cast_to_std_ioslices(&ioslices); match (&mut *buffer).write_vectored(std_ioslices) { Ok(0) => break, Ok(n) => { ioslices = IoSlice::advance_within(ioslices, n).unwrap(); total += n } Err(error) if error.kind() == std::io::ErrorKind::Interrupted => continue, Err(error) => Err(error).unwrap(), } } assert_eq!(total, buffer.len()); assert_eq!(&*buffer, b"thisisFAL-rs"); } #[test] #[cfg(all(unix, feature = "libc"))] #[cfg_attr(miri, ignore)] fn abi_compatibility_with_iovec() { use std::convert::TryInto; assert_eq!( std::mem::size_of::<IoSlice>(), std::mem::size_of::<libc::iovec>() ); assert_eq!( std::mem::align_of::<IoSlice>(), std::mem::align_of::<libc::iovec>() ); unsafe { let slice: &[u8] = b"Hello, world!"; let iov_base = slice.as_ptr() as *mut libc::c_void; let iov_len = slice.len(); let vec = libc::iovec { iov_base, iov_len }; let wrapped: IoSlice = std::mem::transmute::<libc::iovec, IoSlice>(vec); assert_eq!(wrapped.as_ptr(), iov_base as *const u8); assert_eq!(wrapped.len(), iov_len); } let ioslices = [ IoSlice::new(FIRST), IoSlice::new(SPACE), IoSlice::new(SECOND), IoSlice::new(SPACE), IoSlice::new(THIRD), IoSlice::new(FOURTH), ]; let iovecs = IoSlice::cast_to_raw_iovecs(&ioslices); let mut fds = [0; 2]; unsafe { libc::pipe(fds.as_mut_ptr()); } let [receiver_fd, sender_fd] = fds; let mut buffer = vec![0u8; ioslices.iter().map(|slice| slice.len()).sum()]; let buffer_parts = buffer .chunks_mut(4) .map(|slice| IoSliceMut::new(slice)) .collect::<Vec<_>>(); let buffer_parts_iovecs = IoSliceMut::cast_to_raw_iovecs(&*buffer_parts); unsafe { // TODO: Maybe repeat since writev and readv don't have to return everything? let result = libc::writev(sender_fd, iovecs.as_ptr(), iovecs.len().try_into().unwrap()); if result == -1 { panic!("failed to writev: {}", std::io::Error::last_os_error()); } let result = libc::readv( receiver_fd, buffer_parts_iovecs.as_ptr(), buffer_parts_iovecs.len().try_into().unwrap(), ); if result == -1 { panic!("failed to readv: {}", std::io::Error::last_os_error()); } } let src_iter = ioslices .iter() .flat_map(|ioslice| ioslice.as_slice()) .copied(); let dst_iter = buffer_parts .iter() .flat_map(|ioslice| ioslice.as_slice()) .copied(); assert!(Iterator::eq(src_iter, dst_iter)); } // TODO: Fix test. /* #[test] #[cfg(all(feature = "std"))] fn iobox() { use uninit_tools::initializer::BufferInitializer; use std::io::Write; let iobox = IoBox::alloc_uninit(1024); let initialized = BufferInitializer::uninit(iobox).finish_init_by_filling(0xFF).into(); let iobox2 = IoBox::alloc_zeroed(2048); let boxes = [initialized, iobox2]; let io_slices = IoBox::cast_to_ioslices(&boxes); let io_slices = IoSlice::cast_to_std_ioslices(io_slices); // NOTE: This test currently depends on the fact that the Write impl for slices, never // only writes part of the buffers. let mut original_buf = [0u8; 1024 + 2048]; let mut buf = &mut original_buf[..]; buf.write_vectored(io_slices).unwrap(); assert!(original_buf[..1024] .iter() .copied() .eq(std::iter::repeat(0xFF).take(1024))); assert!(original_buf[1024..1024 + 2048] .iter() .copied() .eq(std::iter::repeat(0x00).take(2048))); // TODO: Test more things. } */ // TODO: Make IoSlice compatible with WSABUF without std as well. }
{ crate::cast_slice_same_layout_mut(these) }
instance.go
// *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** // *** Do not edit by hand unless you're certain you know what you are doing! *** package ec2 import ( "github.com/pkg/errors" "github.com/pulumi/pulumi/sdk/go/pulumi" ) // Provides an EC2 instance resource. This allows instances to be created, updated,
} // NewInstance registers a new resource with the given unique name, arguments, and options. func NewInstance(ctx *pulumi.Context, name string, args *InstanceArgs, opts ...pulumi.ResourceOpt) (*Instance, error) { if args == nil || args.Ami == nil { return nil, errors.New("missing required argument 'Ami'") } if args == nil || args.InstanceType == nil { return nil, errors.New("missing required argument 'InstanceType'") } inputs := make(map[string]interface{}) if args == nil { inputs["ami"] = nil inputs["associatePublicIpAddress"] = nil inputs["availabilityZone"] = nil inputs["cpuCoreCount"] = nil inputs["cpuThreadsPerCore"] = nil inputs["creditSpecification"] = nil inputs["disableApiTermination"] = nil inputs["ebsBlockDevices"] = nil inputs["ebsOptimized"] = nil inputs["ephemeralBlockDevices"] = nil inputs["getPasswordData"] = nil inputs["hostId"] = nil inputs["iamInstanceProfile"] = nil inputs["instanceInitiatedShutdownBehavior"] = nil inputs["instanceType"] = nil inputs["ipv6AddressCount"] = nil inputs["ipv6Addresses"] = nil inputs["keyName"] = nil inputs["monitoring"] = nil inputs["networkInterfaces"] = nil inputs["placementGroup"] = nil inputs["privateIp"] = nil inputs["rootBlockDevice"] = nil inputs["securityGroups"] = nil inputs["sourceDestCheck"] = nil inputs["subnetId"] = nil inputs["tags"] = nil inputs["tenancy"] = nil inputs["userData"] = nil inputs["userDataBase64"] = nil inputs["volumeTags"] = nil inputs["vpcSecurityGroupIds"] = nil } else { inputs["ami"] = args.Ami inputs["associatePublicIpAddress"] = args.AssociatePublicIpAddress inputs["availabilityZone"] = args.AvailabilityZone inputs["cpuCoreCount"] = args.CpuCoreCount inputs["cpuThreadsPerCore"] = args.CpuThreadsPerCore inputs["creditSpecification"] = args.CreditSpecification inputs["disableApiTermination"] = args.DisableApiTermination inputs["ebsBlockDevices"] = args.EbsBlockDevices inputs["ebsOptimized"] = args.EbsOptimized inputs["ephemeralBlockDevices"] = args.EphemeralBlockDevices inputs["getPasswordData"] = args.GetPasswordData inputs["hostId"] = args.HostId inputs["iamInstanceProfile"] = args.IamInstanceProfile inputs["instanceInitiatedShutdownBehavior"] = args.InstanceInitiatedShutdownBehavior inputs["instanceType"] = args.InstanceType inputs["ipv6AddressCount"] = args.Ipv6AddressCount inputs["ipv6Addresses"] = args.Ipv6Addresses inputs["keyName"] = args.KeyName inputs["monitoring"] = args.Monitoring inputs["networkInterfaces"] = args.NetworkInterfaces inputs["placementGroup"] = args.PlacementGroup inputs["privateIp"] = args.PrivateIp inputs["rootBlockDevice"] = args.RootBlockDevice inputs["securityGroups"] = args.SecurityGroups inputs["sourceDestCheck"] = args.SourceDestCheck inputs["subnetId"] = args.SubnetId inputs["tags"] = args.Tags inputs["tenancy"] = args.Tenancy inputs["userData"] = args.UserData inputs["userDataBase64"] = args.UserDataBase64 inputs["volumeTags"] = args.VolumeTags inputs["vpcSecurityGroupIds"] = args.VpcSecurityGroupIds } inputs["arn"] = nil inputs["instanceState"] = nil inputs["passwordData"] = nil inputs["primaryNetworkInterfaceId"] = nil inputs["privateDns"] = nil inputs["publicDns"] = nil inputs["publicIp"] = nil s, err := ctx.RegisterResource("aws:ec2/instance:Instance", name, true, inputs, opts...) if err != nil { return nil, err } return &Instance{s: s}, nil } // GetInstance gets an existing Instance resource's state with the given name, ID, and optional // state properties that are used to uniquely qualify the lookup (nil if not required). func GetInstance(ctx *pulumi.Context, name string, id pulumi.ID, state *InstanceState, opts ...pulumi.ResourceOpt) (*Instance, error) { inputs := make(map[string]interface{}) if state != nil { inputs["ami"] = state.Ami inputs["arn"] = state.Arn inputs["associatePublicIpAddress"] = state.AssociatePublicIpAddress inputs["availabilityZone"] = state.AvailabilityZone inputs["cpuCoreCount"] = state.CpuCoreCount inputs["cpuThreadsPerCore"] = state.CpuThreadsPerCore inputs["creditSpecification"] = state.CreditSpecification inputs["disableApiTermination"] = state.DisableApiTermination inputs["ebsBlockDevices"] = state.EbsBlockDevices inputs["ebsOptimized"] = state.EbsOptimized inputs["ephemeralBlockDevices"] = state.EphemeralBlockDevices inputs["getPasswordData"] = state.GetPasswordData inputs["hostId"] = state.HostId inputs["iamInstanceProfile"] = state.IamInstanceProfile inputs["instanceInitiatedShutdownBehavior"] = state.InstanceInitiatedShutdownBehavior inputs["instanceState"] = state.InstanceState inputs["instanceType"] = state.InstanceType inputs["ipv6AddressCount"] = state.Ipv6AddressCount inputs["ipv6Addresses"] = state.Ipv6Addresses inputs["keyName"] = state.KeyName inputs["monitoring"] = state.Monitoring inputs["networkInterfaces"] = state.NetworkInterfaces inputs["passwordData"] = state.PasswordData inputs["placementGroup"] = state.PlacementGroup inputs["primaryNetworkInterfaceId"] = state.PrimaryNetworkInterfaceId inputs["privateDns"] = state.PrivateDns inputs["privateIp"] = state.PrivateIp inputs["publicDns"] = state.PublicDns inputs["publicIp"] = state.PublicIp inputs["rootBlockDevice"] = state.RootBlockDevice inputs["securityGroups"] = state.SecurityGroups inputs["sourceDestCheck"] = state.SourceDestCheck inputs["subnetId"] = state.SubnetId inputs["tags"] = state.Tags inputs["tenancy"] = state.Tenancy inputs["userData"] = state.UserData inputs["userDataBase64"] = state.UserDataBase64 inputs["volumeTags"] = state.VolumeTags inputs["vpcSecurityGroupIds"] = state.VpcSecurityGroupIds } s, err := ctx.ReadResource("aws:ec2/instance:Instance", name, id, inputs, opts...) if err != nil { return nil, err } return &Instance{s: s}, nil } // URN is this resource's unique name assigned by Pulumi. func (r *Instance) URN() *pulumi.URNOutput { return r.s.URN() } // ID is this resource's unique identifier assigned by its provider. func (r *Instance) ID() *pulumi.IDOutput { return r.s.ID() } // The AMI to use for the instance. func (r *Instance) Ami() *pulumi.StringOutput { return (*pulumi.StringOutput)(r.s.State["ami"]) } // The ARN of the instance. func (r *Instance) Arn() *pulumi.StringOutput { return (*pulumi.StringOutput)(r.s.State["arn"]) } // Associate a public ip address with an instance in a VPC. Boolean value. func (r *Instance) AssociatePublicIpAddress() *pulumi.BoolOutput { return (*pulumi.BoolOutput)(r.s.State["associatePublicIpAddress"]) } // The AZ to start the instance in. func (r *Instance) AvailabilityZone() *pulumi.StringOutput { return (*pulumi.StringOutput)(r.s.State["availabilityZone"]) } // Sets the number of CPU cores for an instance. This option is // only supported on creation of instance type that support CPU Options // [CPU Cores and Threads Per CPU Core Per Instance Type](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instance-optimize-cpu.html#cpu-options-supported-instances-values) - specifying this option for unsupported instance types will return an error from the EC2 API. func (r *Instance) CpuCoreCount() *pulumi.IntOutput { return (*pulumi.IntOutput)(r.s.State["cpuCoreCount"]) } // If set to to 1, hyperthreading is disabled on the launched instance. Defaults to 2 if not set. See [Optimizing CPU Options](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instance-optimize-cpu.html) for more information. func (r *Instance) CpuThreadsPerCore() *pulumi.IntOutput { return (*pulumi.IntOutput)(r.s.State["cpuThreadsPerCore"]) } // Customize the credit specification of the instance. See Credit Specification below for more details. func (r *Instance) CreditSpecification() *pulumi.Output { return r.s.State["creditSpecification"] } // If true, enables [EC2 Instance // Termination Protection](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/terminating-instances.html#Using_ChangingDisableAPITermination) func (r *Instance) DisableApiTermination() *pulumi.BoolOutput { return (*pulumi.BoolOutput)(r.s.State["disableApiTermination"]) } // Additional EBS block devices to attach to the // instance. Block device configurations only apply on resource creation. See Block Devices below for details on attributes and drift detection. func (r *Instance) EbsBlockDevices() *pulumi.ArrayOutput { return (*pulumi.ArrayOutput)(r.s.State["ebsBlockDevices"]) } // If true, the launched EC2 instance will be EBS-optimized. // Note that if this is not set on an instance type that is optimized by default then // this will show as disabled but if the instance type is optimized by default then // there is no need to set this and there is no effect to disabling it. // See the [EBS Optimized section](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSOptimized.html) of the AWS User Guide for more information. func (r *Instance) EbsOptimized() *pulumi.BoolOutput { return (*pulumi.BoolOutput)(r.s.State["ebsOptimized"]) } // Customize Ephemeral (also known as // "Instance Store") volumes on the instance. See Block Devices below for details. func (r *Instance) EphemeralBlockDevices() *pulumi.ArrayOutput { return (*pulumi.ArrayOutput)(r.s.State["ephemeralBlockDevices"]) } // If true, wait for password data to become available and retrieve it. Useful for getting the administrator password for instances running Microsoft Windows. The password data is exported to the `password_data` attribute. See [GetPasswordData](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_GetPasswordData.html) for more information. func (r *Instance) GetPasswordData() *pulumi.BoolOutput { return (*pulumi.BoolOutput)(r.s.State["getPasswordData"]) } // The Id of a dedicated host that the instance will be assigned to. Use when an instance is to be launched on a specific dedicated host. func (r *Instance) HostId() *pulumi.StringOutput { return (*pulumi.StringOutput)(r.s.State["hostId"]) } // The IAM Instance Profile to // launch the instance with. Specified as the name of the Instance Profile. Ensure your credentials have the correct permission to assign the instance profile according to the [EC2 documentation](http://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_switch-role-ec2.html#roles-usingrole-ec2instance-permissions), notably `iam:PassRole`. // * `ipv6_address_count`- (Optional) A number of IPv6 addresses to associate with the primary network interface. Amazon EC2 chooses the IPv6 addresses from the range of your subnet. func (r *Instance) IamInstanceProfile() *pulumi.StringOutput { return (*pulumi.StringOutput)(r.s.State["iamInstanceProfile"]) } // Shutdown behavior for the // instance. Amazon defaults this to `stop` for EBS-backed instances and // `terminate` for instance-store instances. Cannot be set on instance-store // instances. See [Shutdown Behavior](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/terminating-instances.html#Using_ChangingInstanceInitiatedShutdownBehavior) for more information. func (r *Instance) InstanceInitiatedShutdownBehavior() *pulumi.StringOutput { return (*pulumi.StringOutput)(r.s.State["instanceInitiatedShutdownBehavior"]) } func (r *Instance) InstanceState() *pulumi.StringOutput { return (*pulumi.StringOutput)(r.s.State["instanceState"]) } // The type of instance to start. Updates to this field will trigger a stop/start of the EC2 instance. func (r *Instance) InstanceType() *pulumi.StringOutput { return (*pulumi.StringOutput)(r.s.State["instanceType"]) } func (r *Instance) Ipv6AddressCount() *pulumi.IntOutput { return (*pulumi.IntOutput)(r.s.State["ipv6AddressCount"]) } // Specify one or more IPv6 addresses from the range of the subnet to associate with the primary network interface func (r *Instance) Ipv6Addresses() *pulumi.ArrayOutput { return (*pulumi.ArrayOutput)(r.s.State["ipv6Addresses"]) } // The key name of the Key Pair to use for the instance; which can be managed using the `aws_key_pair` resource. func (r *Instance) KeyName() *pulumi.StringOutput { return (*pulumi.StringOutput)(r.s.State["keyName"]) } // If true, the launched EC2 instance will have detailed monitoring enabled. (Available since v0.6.0) func (r *Instance) Monitoring() *pulumi.BoolOutput { return (*pulumi.BoolOutput)(r.s.State["monitoring"]) } // Customize network interfaces to be attached at instance boot time. See Network Interfaces below for more details. func (r *Instance) NetworkInterfaces() *pulumi.ArrayOutput { return (*pulumi.ArrayOutput)(r.s.State["networkInterfaces"]) } // Base-64 encoded encrypted password data for the instance. // Useful for getting the administrator password for instances running Microsoft Windows. // This attribute is only exported if `get_password_data` is true. // Note that this encrypted value will be stored in the state file, as with all exported attributes. // See [GetPasswordData](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_GetPasswordData.html) for more information. func (r *Instance) PasswordData() *pulumi.StringOutput { return (*pulumi.StringOutput)(r.s.State["passwordData"]) } // The Placement Group to start the instance in. func (r *Instance) PlacementGroup() *pulumi.StringOutput { return (*pulumi.StringOutput)(r.s.State["placementGroup"]) } // The ID of the instance's primary network interface. func (r *Instance) PrimaryNetworkInterfaceId() *pulumi.StringOutput { return (*pulumi.StringOutput)(r.s.State["primaryNetworkInterfaceId"]) } // The private DNS name assigned to the instance. Can only be // used inside the Amazon EC2, and only available if you've enabled DNS hostnames // for your VPC func (r *Instance) PrivateDns() *pulumi.StringOutput { return (*pulumi.StringOutput)(r.s.State["privateDns"]) } // Private IP address to associate with the // instance in a VPC. func (r *Instance) PrivateIp() *pulumi.StringOutput { return (*pulumi.StringOutput)(r.s.State["privateIp"]) } // The public DNS name assigned to the instance. For EC2-VPC, this // is only available if you've enabled DNS hostnames for your VPC func (r *Instance) PublicDns() *pulumi.StringOutput { return (*pulumi.StringOutput)(r.s.State["publicDns"]) } // The public IP address assigned to the instance, if applicable. **NOTE**: If you are using an [`aws_eip`](https://www.terraform.io/docs/providers/aws/r/eip.html) with your instance, you should refer to the EIP's address directly and not use `public_ip`, as this field will change after the EIP is attached. func (r *Instance) PublicIp() *pulumi.StringOutput { return (*pulumi.StringOutput)(r.s.State["publicIp"]) } // Customize details about the root block // device of the instance. See Block Devices below for details. func (r *Instance) RootBlockDevice() *pulumi.Output { return r.s.State["rootBlockDevice"] } // A list of security group names (EC2-Classic) or IDs (default VPC) to associate with. func (r *Instance) SecurityGroups() *pulumi.ArrayOutput { return (*pulumi.ArrayOutput)(r.s.State["securityGroups"]) } // Controls if traffic is routed to the instance when // the destination address does not match the instance. Used for NAT or VPNs. Defaults true. func (r *Instance) SourceDestCheck() *pulumi.BoolOutput { return (*pulumi.BoolOutput)(r.s.State["sourceDestCheck"]) } // The VPC Subnet ID to launch in. func (r *Instance) SubnetId() *pulumi.StringOutput { return (*pulumi.StringOutput)(r.s.State["subnetId"]) } // A mapping of tags to assign to the resource. func (r *Instance) Tags() *pulumi.MapOutput { return (*pulumi.MapOutput)(r.s.State["tags"]) } // The tenancy of the instance (if the instance is running in a VPC). An instance with a tenancy of dedicated runs on single-tenant hardware. The host tenancy is not supported for the import-instance command. func (r *Instance) Tenancy() *pulumi.StringOutput { return (*pulumi.StringOutput)(r.s.State["tenancy"]) } // The user data to provide when launching the instance. Do not pass gzip-compressed data via this argument; see `user_data_base64` instead. func (r *Instance) UserData() *pulumi.StringOutput { return (*pulumi.StringOutput)(r.s.State["userData"]) } // Can be used instead of `user_data` to pass base64-encoded binary data directly. Use this instead of `user_data` whenever the value is not a valid UTF-8 string. For example, gzip-encoded user data must be base64-encoded and passed via this argument to avoid corruption. func (r *Instance) UserDataBase64() *pulumi.StringOutput { return (*pulumi.StringOutput)(r.s.State["userDataBase64"]) } // A mapping of tags to assign to the devices created by the instance at launch time. func (r *Instance) VolumeTags() *pulumi.MapOutput { return (*pulumi.MapOutput)(r.s.State["volumeTags"]) } // A list of security group IDs to associate with. func (r *Instance) VpcSecurityGroupIds() *pulumi.ArrayOutput { return (*pulumi.ArrayOutput)(r.s.State["vpcSecurityGroupIds"]) } // Input properties used for looking up and filtering Instance resources. type InstanceState struct { // The AMI to use for the instance. Ami interface{} // The ARN of the instance. Arn interface{} // Associate a public ip address with an instance in a VPC. Boolean value. AssociatePublicIpAddress interface{} // The AZ to start the instance in. AvailabilityZone interface{} // Sets the number of CPU cores for an instance. This option is // only supported on creation of instance type that support CPU Options // [CPU Cores and Threads Per CPU Core Per Instance Type](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instance-optimize-cpu.html#cpu-options-supported-instances-values) - specifying this option for unsupported instance types will return an error from the EC2 API. CpuCoreCount interface{} // If set to to 1, hyperthreading is disabled on the launched instance. Defaults to 2 if not set. See [Optimizing CPU Options](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instance-optimize-cpu.html) for more information. CpuThreadsPerCore interface{} // Customize the credit specification of the instance. See Credit Specification below for more details. CreditSpecification interface{} // If true, enables [EC2 Instance // Termination Protection](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/terminating-instances.html#Using_ChangingDisableAPITermination) DisableApiTermination interface{} // Additional EBS block devices to attach to the // instance. Block device configurations only apply on resource creation. See Block Devices below for details on attributes and drift detection. EbsBlockDevices interface{} // If true, the launched EC2 instance will be EBS-optimized. // Note that if this is not set on an instance type that is optimized by default then // this will show as disabled but if the instance type is optimized by default then // there is no need to set this and there is no effect to disabling it. // See the [EBS Optimized section](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSOptimized.html) of the AWS User Guide for more information. EbsOptimized interface{} // Customize Ephemeral (also known as // "Instance Store") volumes on the instance. See Block Devices below for details. EphemeralBlockDevices interface{} // If true, wait for password data to become available and retrieve it. Useful for getting the administrator password for instances running Microsoft Windows. The password data is exported to the `password_data` attribute. See [GetPasswordData](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_GetPasswordData.html) for more information. GetPasswordData interface{} // The Id of a dedicated host that the instance will be assigned to. Use when an instance is to be launched on a specific dedicated host. HostId interface{} // The IAM Instance Profile to // launch the instance with. Specified as the name of the Instance Profile. Ensure your credentials have the correct permission to assign the instance profile according to the [EC2 documentation](http://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_switch-role-ec2.html#roles-usingrole-ec2instance-permissions), notably `iam:PassRole`. // * `ipv6_address_count`- (Optional) A number of IPv6 addresses to associate with the primary network interface. Amazon EC2 chooses the IPv6 addresses from the range of your subnet. IamInstanceProfile interface{} // Shutdown behavior for the // instance. Amazon defaults this to `stop` for EBS-backed instances and // `terminate` for instance-store instances. Cannot be set on instance-store // instances. See [Shutdown Behavior](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/terminating-instances.html#Using_ChangingInstanceInitiatedShutdownBehavior) for more information. InstanceInitiatedShutdownBehavior interface{} InstanceState interface{} // The type of instance to start. Updates to this field will trigger a stop/start of the EC2 instance. InstanceType interface{} Ipv6AddressCount interface{} // Specify one or more IPv6 addresses from the range of the subnet to associate with the primary network interface Ipv6Addresses interface{} // The key name of the Key Pair to use for the instance; which can be managed using the `aws_key_pair` resource. KeyName interface{} // If true, the launched EC2 instance will have detailed monitoring enabled. (Available since v0.6.0) Monitoring interface{} // Customize network interfaces to be attached at instance boot time. See Network Interfaces below for more details. NetworkInterfaces interface{} // Base-64 encoded encrypted password data for the instance. // Useful for getting the administrator password for instances running Microsoft Windows. // This attribute is only exported if `get_password_data` is true. // Note that this encrypted value will be stored in the state file, as with all exported attributes. // See [GetPasswordData](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_GetPasswordData.html) for more information. PasswordData interface{} // The Placement Group to start the instance in. PlacementGroup interface{} // The ID of the instance's primary network interface. PrimaryNetworkInterfaceId interface{} // The private DNS name assigned to the instance. Can only be // used inside the Amazon EC2, and only available if you've enabled DNS hostnames // for your VPC PrivateDns interface{} // Private IP address to associate with the // instance in a VPC. PrivateIp interface{} // The public DNS name assigned to the instance. For EC2-VPC, this // is only available if you've enabled DNS hostnames for your VPC PublicDns interface{} // The public IP address assigned to the instance, if applicable. **NOTE**: If you are using an [`aws_eip`](https://www.terraform.io/docs/providers/aws/r/eip.html) with your instance, you should refer to the EIP's address directly and not use `public_ip`, as this field will change after the EIP is attached. PublicIp interface{} // Customize details about the root block // device of the instance. See Block Devices below for details. RootBlockDevice interface{} // A list of security group names (EC2-Classic) or IDs (default VPC) to associate with. SecurityGroups interface{} // Controls if traffic is routed to the instance when // the destination address does not match the instance. Used for NAT or VPNs. Defaults true. SourceDestCheck interface{} // The VPC Subnet ID to launch in. SubnetId interface{} // A mapping of tags to assign to the resource. Tags interface{} // The tenancy of the instance (if the instance is running in a VPC). An instance with a tenancy of dedicated runs on single-tenant hardware. The host tenancy is not supported for the import-instance command. Tenancy interface{} // The user data to provide when launching the instance. Do not pass gzip-compressed data via this argument; see `user_data_base64` instead. UserData interface{} // Can be used instead of `user_data` to pass base64-encoded binary data directly. Use this instead of `user_data` whenever the value is not a valid UTF-8 string. For example, gzip-encoded user data must be base64-encoded and passed via this argument to avoid corruption. UserDataBase64 interface{} // A mapping of tags to assign to the devices created by the instance at launch time. VolumeTags interface{} // A list of security group IDs to associate with. VpcSecurityGroupIds interface{} } // The set of arguments for constructing a Instance resource. type InstanceArgs struct { // The AMI to use for the instance. Ami interface{} // Associate a public ip address with an instance in a VPC. Boolean value. AssociatePublicIpAddress interface{} // The AZ to start the instance in. AvailabilityZone interface{} // Sets the number of CPU cores for an instance. This option is // only supported on creation of instance type that support CPU Options // [CPU Cores and Threads Per CPU Core Per Instance Type](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instance-optimize-cpu.html#cpu-options-supported-instances-values) - specifying this option for unsupported instance types will return an error from the EC2 API. CpuCoreCount interface{} // If set to to 1, hyperthreading is disabled on the launched instance. Defaults to 2 if not set. See [Optimizing CPU Options](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instance-optimize-cpu.html) for more information. CpuThreadsPerCore interface{} // Customize the credit specification of the instance. See Credit Specification below for more details. CreditSpecification interface{} // If true, enables [EC2 Instance // Termination Protection](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/terminating-instances.html#Using_ChangingDisableAPITermination) DisableApiTermination interface{} // Additional EBS block devices to attach to the // instance. Block device configurations only apply on resource creation. See Block Devices below for details on attributes and drift detection. EbsBlockDevices interface{} // If true, the launched EC2 instance will be EBS-optimized. // Note that if this is not set on an instance type that is optimized by default then // this will show as disabled but if the instance type is optimized by default then // there is no need to set this and there is no effect to disabling it. // See the [EBS Optimized section](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSOptimized.html) of the AWS User Guide for more information. EbsOptimized interface{} // Customize Ephemeral (also known as // "Instance Store") volumes on the instance. See Block Devices below for details. EphemeralBlockDevices interface{} // If true, wait for password data to become available and retrieve it. Useful for getting the administrator password for instances running Microsoft Windows. The password data is exported to the `password_data` attribute. See [GetPasswordData](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_GetPasswordData.html) for more information. GetPasswordData interface{} // The Id of a dedicated host that the instance will be assigned to. Use when an instance is to be launched on a specific dedicated host. HostId interface{} // The IAM Instance Profile to // launch the instance with. Specified as the name of the Instance Profile. Ensure your credentials have the correct permission to assign the instance profile according to the [EC2 documentation](http://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_switch-role-ec2.html#roles-usingrole-ec2instance-permissions), notably `iam:PassRole`. // * `ipv6_address_count`- (Optional) A number of IPv6 addresses to associate with the primary network interface. Amazon EC2 chooses the IPv6 addresses from the range of your subnet. IamInstanceProfile interface{} // Shutdown behavior for the // instance. Amazon defaults this to `stop` for EBS-backed instances and // `terminate` for instance-store instances. Cannot be set on instance-store // instances. See [Shutdown Behavior](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/terminating-instances.html#Using_ChangingInstanceInitiatedShutdownBehavior) for more information. InstanceInitiatedShutdownBehavior interface{} // The type of instance to start. Updates to this field will trigger a stop/start of the EC2 instance. InstanceType interface{} Ipv6AddressCount interface{} // Specify one or more IPv6 addresses from the range of the subnet to associate with the primary network interface Ipv6Addresses interface{} // The key name of the Key Pair to use for the instance; which can be managed using the `aws_key_pair` resource. KeyName interface{} // If true, the launched EC2 instance will have detailed monitoring enabled. (Available since v0.6.0) Monitoring interface{} // Customize network interfaces to be attached at instance boot time. See Network Interfaces below for more details. NetworkInterfaces interface{} // The Placement Group to start the instance in. PlacementGroup interface{} // Private IP address to associate with the // instance in a VPC. PrivateIp interface{} // Customize details about the root block // device of the instance. See Block Devices below for details. RootBlockDevice interface{} // A list of security group names (EC2-Classic) or IDs (default VPC) to associate with. SecurityGroups interface{} // Controls if traffic is routed to the instance when // the destination address does not match the instance. Used for NAT or VPNs. Defaults true. SourceDestCheck interface{} // The VPC Subnet ID to launch in. SubnetId interface{} // A mapping of tags to assign to the resource. Tags interface{} // The tenancy of the instance (if the instance is running in a VPC). An instance with a tenancy of dedicated runs on single-tenant hardware. The host tenancy is not supported for the import-instance command. Tenancy interface{} // The user data to provide when launching the instance. Do not pass gzip-compressed data via this argument; see `user_data_base64` instead. UserData interface{} // Can be used instead of `user_data` to pass base64-encoded binary data directly. Use this instead of `user_data` whenever the value is not a valid UTF-8 string. For example, gzip-encoded user data must be base64-encoded and passed via this argument to avoid corruption. UserDataBase64 interface{} // A mapping of tags to assign to the devices created by the instance at launch time. VolumeTags interface{} // A list of security group IDs to associate with. VpcSecurityGroupIds interface{} }
// and deleted. Instances also support [provisioning](https://www.terraform.io/docs/provisioners/index.html). type Instance struct { s *pulumi.ResourceState
__init__.py
""" pyexcel.sheets ~~~~~~~~~~~~~~~~~~~ Core functionality of pyexcel, data model :copyright: (c) 2014-2017 by Onni Software Ltd.
:license: New BSD License, see LICENSE for more details """ # flake8: noqa from .sheet import Sheet from .matrix import Matrix, transpose, Row, Column
inv.go
// Copyright © 2011-12 Qtrac Ltd. // // This program or package and any associated files are licensed under the // Apache License, Version 2.0 (the "License"); you may not use these files // except in compliance with the License. You can get a copy of the License // at: http://www.apache.org/licenses/LICENSE-2.0. // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package main import ( "encoding/binary" "errors" "fmt" "io" "strconv" "time" ) type InvMarshaler struct{} const invDateFormat = "20060102" // This date must always be used. var byteOrder = binary.LittleEndian func (InvMarshaler) MarshalInvoices(writer io.Writer, invoices []*Invoice) error { var write invWriterFunc = func(x interface{}) error { return binary.Write(writer, byteOrder, x) } if err := write(uint32(magicNumber)); err != nil { return err } if err := write(uint16(fileVersion)); err != nil { return err } if err := write(int32(len(invoices))); err != nil { return err } for _, invoice := range invoices { if err := write.writeInvoice(invoice); err != nil { return err } } return nil } type invWriterFunc func(interface{}) error func (write invWriterFunc) writeInvoice(invoice *Invoice) error { for _, i := range []int{invoice.Id, invoice.CustomerId} { if err := write(int32(i)); err != nil { return err } } for _, date := range []time.Time{invoice.Raised, invoice.Due} { if err := write.writeDate(date); err != nil { return err } } if err := write.writeBool(invoice.Paid); err != nil { return err } if err := write.writeString(invoice.Note); err != nil { return err } if err := write(int32(len(invoice.Items))); err != nil { return err } for _, item := range invoice.Items { if err := write.writeItem(item); err != nil { return err } } return nil } func (write invWriterFunc) writeDate(date time.Time) error { i, err := strconv.Atoi(date.Format(invDateFormat)) if err != nil { return err } return write(int32(i)) } func (write invWriterFunc) writeBool(b bool) error { var v int8 if b { v = 1 } return write(v) } func (write invWriterFunc) writeString(s string) error { if err := write(int32(len(s))); err != nil { return err } return write([]byte(s)) } func (write invWriterFunc) writeItem(item *Item) error { if err := write.writeString(item.Id); err != nil { return err } if err := write(item.Price); err != nil { return err } if err := write(int16(item.Quantity)); err != nil { return err } return write.writeString(item.Note) } func (InvMarshaler) UnmarshalInvoices(reader io.Reader) ([]*Invoice, error) { if err := checkInvVersion(reader); err != nil { return nil, err } count, err := readIntFromInt32(reader) if err != nil { return nil, err } invoices := make([]*Invoice, 0, count) for i := 0; i < count; i++ { invoice, err := readInvInvoice(reader) if err != nil { return nil, err } invoices = append(invoices, invoice) } return invoices, nil } func readIntFromInt32(reader io.Reader) (int, error) { var i32 int32 err := binary.Read(reader, byteOrder, &i32) return int(i32), err } func readIntFromInt16(reader io.Reader) (int, error) { var i16 int16 err := binary.Read(reader, byteOrder, &i16) return int(i16), err } func readBoolFromInt8(reader io.Reader) (bool, error) { var i8 int8 err := binary.Read(reader, byteOrder, &i8) return i8 == 1, err } func c
reader io.Reader) error { var magic uint32 if err := binary.Read(reader, byteOrder, &magic); err != nil { return err } if magic != magicNumber { return errors.New("cannot read non-invoices inv file") } var version uint16 if err := binary.Read(reader, byteOrder, &version); err != nil { return err } if version > fileVersion { return fmt.Errorf("version %d is too new to read", version) } return nil } func readInvInvoice(reader io.Reader) (invoice *Invoice, err error) { invoice = &Invoice{} for _, pId := range []*int{&invoice.Id, &invoice.CustomerId} { if *pId, err = readIntFromInt32(reader); err != nil { return nil, err } } for _, pDate := range []*time.Time{&invoice.Raised, &invoice.Due} { if *pDate, err = readInvDate(reader); err != nil { return nil, err } } if invoice.Paid, err = readBoolFromInt8(reader); err != nil { return nil, err } if invoice.Note, err = readInvString(reader); err != nil { return nil, err } var count int if count, err = readIntFromInt32(reader); err != nil { return nil, err } invoice.Items, err = readInvItems(reader, count) return invoice, err } func readInvItems(reader io.Reader, count int) ([]*Item, error) { items := make([]*Item, 0, count) for i := 0; i < count; i++ { item, err := readInvItem(reader) if err != nil { return nil, err } items = append(items, item) } return items, nil } func readInvDate(reader io.Reader) (time.Time, error) { var n int32 if err := binary.Read(reader, byteOrder, &n); err != nil { return time.Time{}, err } return time.Parse(invDateFormat, fmt.Sprint(n)) } func readInvString(reader io.Reader) (string, error) { var length int32 if err := binary.Read(reader, byteOrder, &length); err != nil { return "", nil } raw := make([]byte, length) if err := binary.Read(reader, byteOrder, &raw); err != nil { return "", err } return string(raw), nil } func readInvItem(reader io.Reader) (item *Item, err error) { item = &Item{} if item.Id, err = readInvString(reader); err != nil { return nil, err } if err = binary.Read(reader, byteOrder, &item.Price); err != nil { return nil, err } if item.Quantity, err = readIntFromInt16(reader); err != nil { return nil, err } item.Note, err = readInvString(reader) return item, nil }
heckInvVersion(
run.py
import random from random import shuffle import numpy as np from datetime import datetime import time import queue import threading import logging from PIL import Image import itertools import re import os import glob import shutil import sys import copy import h5py from typing import Any, List, Tuple import torch import torch.nn as nn import torch.nn.functional as F import torch.optim as optim from torch.nn.parallel.data_parallel import data_parallel import torch.utils.checkpoint as cp from collections import OrderedDict from torch import Tensor target_city = 'ANTWERP' other_city_list = ['ANTWERP', 'BANGKOK', 'BARCELONA', 'MOSCOW', 'BERLIN', 'CHICAGO', 'ISTANBUL', 'MELBOURNE', ] input_train_data_folder_path = '../../0_data/' + target_city + '/' + 'training' input_static_data_path = '../../0_data/' + target_city + '/' + target_city + "_static.h5" out_dir = 'output' os.environ['CUDA_VISIBLE_DEVICES'] = '0' SEED = int(time.time()) num_train_file = 180 num_frame_per_day = 288 num_frame_before = 12 num_frame_sequence = 24 num_frame_out = 6 num_sequence_per_day = num_frame_per_day - num_frame_sequence + 1 height=495 width =436 num_channel=8 num_channel_out=8 num_channel_static = 9 visual_input_channels=105 visual_output_channels=48 vector_input_channels=1 num_epoch_to_train = 100000000 save_per_iteration = 5000 global_step_start = 0 initial_checkpoint = None initial_checkpoint_optimizer = None LEARNING_RATE = 3e-4 batch_size = 2 batch_size_val = 1 num_thread=2 num_groups = 8 EPS = 1e-12 np.set_printoptions(precision=8) NUM_INPUT_CHANNEL = visual_input_channels NUM_OUTPUT_CHANNEL = visual_output_channels def get_data_filepath_list_by_year(input_data_folder_path): data_filepath_list_1 = [] data_filepath_list_2 = [] for filename in os.listdir(input_data_folder_path): if filename.split('.')[-1] != 'h5': continue if filename.startswith('2019'): data_filepath_list_1.append(os.path.join(input_data_folder_path, filename)) elif filename.startswith('2020'): data_filepath_list_2.append(os.path.join(input_data_folder_path, filename)) else: print('Error - Unknown data year\t', filename) exit(-1) data_filepath_list_1 = sorted(data_filepath_list_1) data_filepath_list_2 = sorted(data_filepath_list_2) return data_filepath_list_1, data_filepath_list_2 class Deconv3x3Block(nn.Sequential): def __init__(self, in_size: int, h_size: int, ) -> None: super(Deconv3x3Block, self).__init__() self.add_module('deconv', nn.ConvTranspose2d(in_size, h_size, kernel_size=3, stride=2, padding=1, bias=True)) self.add_module('elu', nn.ELU(inplace=True)) self.add_module('norm', nn.GroupNorm(num_groups=num_groups, num_channels=h_size)) class Conv1x1Block(nn.Sequential): def __init__(self, in_size: int, h_size: int, ) -> None: super(Conv1x1Block, self).__init__() self.add_module('conv', nn.Conv2d(in_size, h_size, kernel_size=1, stride=1, padding=0, bias=True)) class
(nn.Sequential): def __init__(self, in_size: int, h_size: int, ) -> None: super(Conv3x3Block, self).__init__() self.add_module('conv', nn.Conv2d(in_size, h_size, kernel_size=3, stride=1, padding=1, bias=True)) self.add_module('elu', nn.ELU(inplace=True)) self.add_module('norm', nn.GroupNorm(num_groups=num_groups, num_channels=h_size)) class AvgBlock(nn.Sequential): def __init__(self, kernel_size: int, stride: int, padding: int) -> None: super(AvgBlock, self).__init__() self.add_module('pool', nn.AvgPool2d(kernel_size=kernel_size, stride=stride, padding=padding)) class MaxBlock(nn.Sequential): def __init__(self, kernel_size: int, stride: int, padding: int) -> None: super(MaxBlock, self).__init__() self.add_module('pool', nn.MaxPool2d(kernel_size=kernel_size, stride=stride, padding=padding)) class DownBlock(nn.Module): def __init__(self, in_size: int, h_size: int, out_size: int, do_pool: int = True): super(DownBlock, self).__init__() self.do_pool = do_pool in_size_cum = in_size self.conv_1 = Conv3x3Block( in_size=in_size_cum, h_size=h_size) in_size_cum += h_size self.conv_3 = Conv3x3Block( in_size=in_size_cum, h_size=h_size) in_size_cum += h_size self.conv_2 = Conv1x1Block( in_size=in_size_cum, h_size=out_size) def forward(self, x): batch_size = len(x) if self.do_pool: x = F.interpolate(x, scale_factor=0.7, mode='bilinear', align_corners=False, recompute_scale_factor=None) x_list = [] x_list.append(x) x = self.conv_1(x) x_list.append(x) x = torch.cat(x_list, 1) x = self.conv_3(x) x_list.append(x) x = torch.cat(x_list, 1) x = self.conv_2(x) return x def cuda(self, ): super(DownBlock, self).cuda() self.conv_1.cuda() self.conv_3.cuda() self.conv_2.cuda() return self class UpBlock(nn.Module): def __init__(self, in_size: int, in_size_2: int, h_size: int, out_size: int, ): super(UpBlock, self).__init__() self.deconv = Conv3x3Block( in_size=in_size, h_size=h_size) self.out_conv = Conv3x3Block( in_size=h_size + in_size_2, h_size=out_size) def forward(self, x1, x2): x1 = self.deconv(x1) x1 = F.interpolate(x1, size=x2.size()[2:4], scale_factor=None, mode='bilinear', align_corners=False, recompute_scale_factor=None) x = torch.cat([x2, x1], dim=1) return self.out_conv(x) def cuda(self, ): super(UpBlock, self).cuda() self.deconv.cuda() self.out_conv.cuda() return self class NetA(nn.Module): def __init__(self,): super(NetA, self).__init__() self.block0 = DownBlock(in_size=NUM_INPUT_CHANNEL, h_size=128, out_size=128, do_pool=False) self.block1 = DownBlock(in_size=128, h_size=128, out_size=128,) self.block2 = DownBlock(in_size=128, h_size=128, out_size=128, ) self.block3 = DownBlock(in_size=128, h_size=128, out_size=128, ) self.block4 = DownBlock(in_size=128, h_size=128, out_size=128, ) self.block5 = DownBlock(in_size=128, h_size=128, out_size=128, ) self.block6 = DownBlock(in_size=128, h_size=128, out_size=128,) self.block7 = DownBlock(in_size=128, h_size=128, out_size=128,) self.block20 = Conv3x3Block(in_size=128, h_size=128) self.block16 = UpBlock(in_size=128, in_size_2=128, h_size=128, out_size=128,) self.block15 = UpBlock(in_size=128, in_size_2=128, h_size=128, out_size=128,) self.block14 = UpBlock(in_size=128, in_size_2=128, h_size=128, out_size=128,) self.block13 = UpBlock(in_size=128, in_size_2=128, h_size=128, out_size=128,) self.block12 = UpBlock(in_size=128, in_size_2=128, h_size=128, out_size=128,) self.block11 = UpBlock(in_size=128, in_size_2=128 , h_size=128, out_size=128,) self.block10 = UpBlock(in_size=128, in_size_2=128 , h_size=128, out_size=128,) self.out_conv = nn.Sequential(nn.Conv2d(128*1, NUM_OUTPUT_CHANNEL, kernel_size=3, stride=1, padding=1, bias=True)) if 1: for name, m in self.named_modules(): if isinstance(m, nn.Conv2d) or isinstance(m, nn.ConvTranspose2d): nn.init.kaiming_normal_(m.weight) elif isinstance(m, nn.BatchNorm2d): nn.init.constant_(m.weight, 1) nn.init.constant_(m.bias, 0) elif isinstance(m, nn.GroupNorm): nn.init.constant_(m.weight, 1) nn.init.constant_(m.bias, 0) elif isinstance(m, nn.Linear): nn.init.constant_(m.bias, 0) def forward(self, x): batch_size = len(x) x0 = self.block0(x) x1 = self.block1(x0) x2 = self.block2(x1) x3 = self.block3(x2) x4 = self.block4(x3) x5 = self.block5(x4) x6 = self.block6(x5) x7 = self.block7(x6) x = self.block20(x7) x = self.block16(x, x6) x = self.block15(x, x5) x = self.block14(x, x4) x = self.block13(x, x3) x = self.block12(x, x2) x = self.block11(x, x1) x = self.block10(x, x0) x = self.out_conv(x) x = torch.sigmoid(x) return x def cuda(self, ): super(NetA, self).cuda() self.block0.cuda() self.block1.cuda() self.block2.cuda() self.block3.cuda() self.block4.cuda() self.block5.cuda() self.block6.cuda() self.block7.cuda() self.block20.cuda() self.block16.cuda() self.block15.cuda() self.block14.cuda() self.block13.cuda() self.block12.cuda() self.block11.cuda() self.block10.cuda() self.out_conv.cuda() return self if __name__ == '__main__': if initial_checkpoint == None: assert global_step_start == 0 else: assert global_step_start > 0 random.seed(SEED) np.random.seed(SEED) torch.manual_seed(SEED) torch.cuda.manual_seed_all(SEED) torch.backends.cudnn.enabled = True torch.backends.cudnn.benchmark = True torch.backends.cudnn.deterministic = False try: if not os.path.exists(out_dir): os.makedirs(out_dir) except Exception: print('out_dir not made') net = NetA().cuda() optimizer = optim.Adam(filter(lambda p: p.requires_grad, net.parameters()),lr=LEARNING_RATE) loss_func2 = nn.MSELoss() if initial_checkpoint is not None: print('Loading ', initial_checkpoint) state_dict = torch.load(initial_checkpoint, map_location=lambda storage, loc: storage) net.load_state_dict(state_dict, strict=True) optimizer_state_dict_ = torch.load(initial_checkpoint_optimizer, map_location=lambda storage, loc: storage) optimizer_state_dict = optimizer_state_dict_['optimizer'] optimizer.load_state_dict(optimizer_state_dict) static_data = None if 1: file_path = input_static_data_path fr = h5py.File(file_path, 'r') a_group_key = list(fr.keys())[0] data = np.asarray(fr[a_group_key], np.uint8) static_data = data[np.newaxis,:,:,:] static_data = static_data.astype(np.float32) static_data = static_data / 255.0 static_data_list = [] if 1: for other_city in other_city_list: file_path = '../../0_data/' + other_city + '/' + other_city + "_static.h5" fr = h5py.File(file_path, 'r') a_group_key = list(fr.keys())[0] data = np.asarray(fr[a_group_key], np.uint8) static_data_ = data[np.newaxis,:,:,:] static_data_ = static_data_.astype(np.float32) static_data_ = static_data_ / 255.0 static_data_list.append(static_data_) train_static_data_index_list = [] train_data_filepath_list, val_data_filepath_list = get_data_filepath_list_by_year(input_train_data_folder_path) target_city_i = other_city_list.index(target_city) for _ in range(len(train_data_filepath_list)): train_static_data_index_list.append(target_city_i) for o, other_city in enumerate(other_city_list): if o == target_city_i: continue train_data_filepath_list_one, _ = get_data_filepath_list_by_year('../../0_data/' + other_city + '/' + 'training') for _ in range(len(train_data_filepath_list_one)): train_static_data_index_list.append(o) train_data_filepath_list += train_data_filepath_list_one train_set = [] for i in range(len(train_data_filepath_list)): for j in range(num_sequence_per_day): train_set.append( (i,j) ) num_iteration_per_epoch = int(len(train_set) / batch_size) print('num_iteration_per_epoch:', num_iteration_per_epoch) assert num_iteration_per_epoch > 10 val_set = [] val_skip_k = 0 val_skip_ratio = 5 for i in range(len(val_data_filepath_list)): for j in range(0, num_sequence_per_day, num_frame_sequence): val_skip_k += 1 if val_skip_k % val_skip_ratio == 0: val_set.append( (i,j) ) num_val_iteration_per_epoch = int(len(val_set) / batch_size_val) print('num_val_iteration_per_epoch:', num_val_iteration_per_epoch) train_input_queue = queue.Queue() train_output_queue = queue.Queue() def load_train_multithread(): while True: if train_input_queue.empty() or train_output_queue.qsize() > 8: time.sleep(0.1) continue i_j_list = train_input_queue.get() train_orig_data_batch_list = [] train_data_batch_list = [] train_data_mask_list = [] train_stat_batch_list = [] train_static_data_batch_list = [] for train_i_j in i_j_list: (i,j) = train_i_j file_path = train_data_filepath_list[i] train_static_data_batch_list.append(static_data_list[train_static_data_index_list[i]]) fr = h5py.File(file_path, 'r') a_group_key = list(fr.keys())[0] data = fr[a_group_key] train_data_batch_list.append(data[j:j+num_frame_sequence,:,:,:][np.newaxis,:,:,:,:]) train_data_batch = np.concatenate(train_data_batch_list, axis=0) train_static_data_batch = np.concatenate(train_static_data_batch_list,axis=0) input_data = train_data_batch[:,:num_frame_before ,:,:,:] orig_label = train_data_batch[:, num_frame_before:,:,:,:num_channel_out] true_label = np.concatenate((orig_label[:, 0:3, :,:,:], orig_label[:, 5::3,:,:,:] ), axis=1) input_data = input_data.astype(np.float32) true_label = true_label.astype(np.float32) input_data = input_data / 255.0 true_label = true_label / 255.0 flip_dr = np.random.randint(0,2) if flip_dr == 1: input_data_flipped = copy.deepcopy(input_data) input_data_flipped[:,:,:,:,4:8] = input_data[:,:,:,:,0:4] input_data_flipped[:,:,:,:,0:4] = input_data[:,:,:,:,4:8] input_data = input_data_flipped[:,:,::-1,::-1,:] true_label_flipped = copy.deepcopy(true_label) true_label_flipped[:,:,:,:,4:8] = true_label[:,:,:,:,0:4] true_label_flipped[:,:,:,:,0:4] = true_label[:,:,:,:,4:8] true_label = true_label_flipped[:,:,::-1,::-1,:] train_static_data_batch_flipped = copy.deepcopy(train_static_data_batch) train_static_data_batch_flipped[:,5:9,:,:] = train_static_data_batch[:,1:5,:,:] train_static_data_batch_flipped[:,1:5,:,:] = train_static_data_batch[:,5:9,:,:] train_static_data_batch = train_static_data_batch_flipped[:,:,::-1,::-1] input_data = np.moveaxis(input_data, -1, 2).reshape((batch_size, -1, height, width)) true_label = np.moveaxis(true_label, -1, 2).reshape((batch_size, -1, height, width)) input_data = np.concatenate((input_data, train_static_data_batch), axis=1) train_output_queue.put( (input_data, true_label) ) thread_list = [] assert num_thread > 0 for i in range(num_thread): t = threading.Thread(target=load_train_multithread) t.start() net.train() sum_train_loss = 0.0 sum_train_iter = 0 global_step = global_step_start for epoch in range(num_epoch_to_train): np.random.shuffle(train_set) for a in range(num_iteration_per_epoch): i_j_list = [] for train_i_j in train_set[a * batch_size : (a+1) * batch_size]: i_j_list.append(train_i_j) train_input_queue.put(i_j_list) for a in range(num_iteration_per_epoch): if global_step % save_per_iteration == 0: net.eval() state_dict_0 = copy.deepcopy(net.state_dict()) torch.save(state_dict_0, out_dir + '/%09d_model.pth' % (global_step)) torch.save( { 'optimizer': optimizer.state_dict(), 'global_step': global_step, 'epoch': epoch, }, out_dir + '/%09d_optimizer.pth' % (global_step)) eval_loss_list = list() eval_loss_list = [0] with torch.no_grad(): for a in range(num_val_iteration_per_epoch): val_orig_data_batch_list = [] val_data_batch_list = [] val_data_mask_list = [] val_stat_batch_list = [] for i_j in val_set[a * batch_size_val : (a+1) * batch_size_val]: (i,j) = i_j file_path = val_data_filepath_list[i] fr = h5py.File(file_path, 'r') a_group_key = list(fr.keys())[0] data = fr[a_group_key] val_data_batch_list.append(data[j:j+num_frame_sequence,:,:,:][np.newaxis,:,:,:,:]) val_data_batch = np.concatenate(val_data_batch_list, axis=0) input_data = val_data_batch[:,:num_frame_before ,:,:,:] orig_label = val_data_batch[:, num_frame_before:,:,:,:num_channel_out] true_label = np.concatenate((orig_label[:, 0:3, :,:,:], orig_label[:, 5::3,:,:,:]), axis=1) input_data = input_data.astype(np.float32) true_label = true_label.astype(np.float32) input_data = input_data / 255.0 true_label = true_label / 255.0 input_data = np.moveaxis(input_data, -1, 2).reshape((batch_size_val, -1, height, width)) true_label = np.moveaxis(true_label, -1, 2).reshape((batch_size_val, -1, height, width)) input_data = np.concatenate((input_data,np.repeat(static_data, batch_size_val, axis=0)), axis=1) input = torch.from_numpy(input_data).float().cuda() target = torch.from_numpy(true_label).float().cuda() prediction = net(input) loss = loss_func2(prediction, target) eval_loss_list.append(loss.item()) avg_train_loss = sum_train_loss / (float(sum_train_iter)+EPS) sum_train_loss = 0.0 sum_train_iter = 0 print('global_step:', global_step, '\t', 'epoch:', epoch, \ '\t', 'train_loss:', avg_train_loss, \ '\t', 'eval_loss:', np.mean(eval_loss_list), \ '\t', datetime.now(), ) debug_out = open('res.txt', 'a') debug_out.write(str(global_step)) debug_out.write('\t') debug_out.write('%.8f' % float(avg_train_loss)) debug_out.write('\t') debug_out.write('%.8f' % float(np.mean(eval_loss_list))) debug_out.write('\n') debug_out.close() net.train() while train_output_queue.empty(): time.sleep(0.1) (input_data, true_label) = train_output_queue.get() optimizer.zero_grad() input = torch.from_numpy(input_data).float().cuda() target = torch.from_numpy(true_label).float().cuda() prediction = net(input) loss = loss_func2(prediction, target) sum_train_iter += 1 sum_train_loss += loss.item() loss.backward() optimizer.step() global_step += 1
Conv3x3Block
util_test.go
// Copyright 2017 CoreOS, Inc. //
// you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package dbtester import ( "reflect" "testing" ) func Test_assignRequest(t *testing.T) { ranges := []int64{1, 10, 50, 100, 300, 500, 700, 1000} total := int64(1000000) rs := assignRequest(ranges, total) expected := []int64{120000, 120000, 120000, 120000, 120000, 120000, 120000, 160000} if !reflect.DeepEqual(rs, expected) { t.Fatalf("expected %+v, got %+v", expected, rs) } cur := int64(0) for _, v := range expected { cur += v } if cur != total { t.Fatalf("sum must be %d, got %d", total, cur) } }
// Licensed under the Apache License, Version 2.0 (the "License");
party.ts
import {Component} from '@angular/core'; import {PartyService} from '../services/party'; @Component({ selector: 'party',
</li> </ul> ` }) export class Party { constructor(public partyService:PartyService) { } }
template: `<ul> <li *ngFor="let person of partyService.attendees"> {{person.name}}
index.js
/* eslint-disable no-console, no-process-exit, global-require */ const fs = require('fs') const path = require('path') const args = process.argv let action = '' let config = {}
const zanata = require('./zanata') const configFile = path.join(__dirname, '../zanata.local.json') const availableLanguages = require('../client/gettext/json/available_languages.json') const languages = setLanguages(availableLanguages) /** * Sample Zanata config file * { "ZANATA_API_KEY":"", "ZANATA_PROJECT":"", "ZANATA_USER":"" } */ if (fs.existsSync(configFile)) { config = require(configFile) } else { if (!process.env.ZANATA_API_KEY) { console.log('Please set env variables or create config file') process.exit(1) } else { config.apiKey = process.env.ZANATA_API_KEY config.project = process.env.ZANATA_PROJECT config.version = (process.env.TRAVIS_BRANCH ? process.env.TRAVIS_BRANCH : process.env.zanataVersion) config.user = process.env.ZANATA_USER } } zanata.setConfig(config) if (args.length < 3) { console.log('Did not specify the correct number of args') process.exit(1) } else { action = args[2] } if (action === 'upload') { const potFile = path.join(__dirname, '../client/gettext/po/manageiq-ui-service.pot') zanata.upload(potFile) } else if (action === 'download') { console.log('Downloading Files') const outputDir = path.join(__dirname, '../client/gettext/po/') zanata.download(languages, outputDir) } function setLanguages (languageList) { const tmpLanguages = [] for (var key in languageList) { tmpLanguages.push(key) } return tmpLanguages }
wondershaper.go
package main import ( "flag" "log" "os" "github.com/mysteriumnetwork/go-wondershaper/wondershaper" ) func
() { adapter := flag.String("a", "", "set the adapter") down := flag.Int("d", 0, "set maximum download rate (in Kbps)") up := flag.Int("u", 0, "set maximum upload rate (in Kbps)") clear := flag.Bool("c", false, "clear the limits from adapter") status := flag.Bool("s", false, "show the current status of adapter") flag.Parse() if *adapter == "" { log.Fatalln("Please supply the adapter name") } shaper := wondershaper.New() shaper.Stdout = os.Stdout shaper.Stderr = os.Stderr if *clear { shaper.Clear(*adapter) } if *down != 0 { err := shaper.LimitDownlink(*adapter, *down) if err != nil { log.Fatalln("Could not limit downlink", err) } } if *up != 0 { err := shaper.LimitUplink(*adapter, *up) if err != nil { log.Fatalln("Could not limit uplink", err) } } if *status { err := shaper.Status(*adapter) if err != nil { log.Fatalln("Could not query adapter status", err) } } }
main
gitlab.service.spec.ts
import { Test, TestingModule } from '@nestjs/testing'; import { GitlabService } from './gitlab.service'; import { HttpModule } from '@nestjs/common'; import { ConfigService } from '@nestjs/config'; describe('GitlabService', () => { let service: GitlabService; beforeEach(async () => { const module: TestingModule = await Test.createTestingModule({ imports: [HttpModule], providers: [GitlabService, ConfigService], }).compile(); service = module.get<GitlabService>(GitlabService); }); it('should be defined', () => {
expect(service).toBeDefined(); }); });
client.go
// Copyright (C) 2019-2020 Zilliz. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distributed under the License // is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express // or implied. See the License for the specific language governing permissions and limitations under the License. package grpcrootcoordclient import ( "context" "fmt" "sync" "time" grpc_middleware "github.com/grpc-ecosystem/go-grpc-middleware" grpc_retry "github.com/grpc-ecosystem/go-grpc-middleware/retry" grpc_opentracing "github.com/grpc-ecosystem/go-grpc-middleware/tracing/opentracing" "github.com/milvus-io/milvus/internal/log" "github.com/milvus-io/milvus/internal/proto/commonpb" "github.com/milvus-io/milvus/internal/proto/datapb" "github.com/milvus-io/milvus/internal/proto/internalpb" "github.com/milvus-io/milvus/internal/proto/milvuspb" "github.com/milvus-io/milvus/internal/proto/proxypb" "github.com/milvus-io/milvus/internal/proto/rootcoordpb" "github.com/milvus-io/milvus/internal/types" "github.com/milvus-io/milvus/internal/util/retry" "github.com/milvus-io/milvus/internal/util/sessionutil" "github.com/milvus-io/milvus/internal/util/trace" "github.com/milvus-io/milvus/internal/util/typeutil" "go.uber.org/zap" "google.golang.org/grpc" "google.golang.org/grpc/codes" ) // Base is an interface that embeds types.RootCoord and contains some other methods. type Base interface { types.RootCoord Init() error Start() error Stop() error Register() error } // GrpcClient grpc client type GrpcClient struct { ctx context.Context cancel context.CancelFunc grpcClient rootcoordpb.RootCoordClient conn *grpc.ClientConn grpcClientMtx sync.RWMutex sess *sessionutil.Session addr string getGrpcClient func() (rootcoordpb.RootCoordClient, error) } func getRootCoordAddr(sess *sessionutil.Session) (string, error) { key := typeutil.RootCoordRole msess, _, err := sess.GetSessions(key) if err != nil { log.Debug("RootCoordClient GetSessions failed", zap.Any("key", key)) return "", err } log.Debug("RootCoordClient GetSessions success") ms, ok := msess[key] if !ok { log.Debug("RootCoordClient mess key not exist", zap.Any("key", key)) return "", fmt.Errorf("number of RootCoord is incorrect, %d", len(msess)) } return ms.Address, nil } // NewClient create root coordinator client with specified ectd info and timeout // ctx execution control context // metaRoot is the path in etcd for root coordinator registration // etcdEndpoints are the address list for etcd end points // timeout is default setting for each grpc call func
(ctx context.Context, metaRoot string, etcdEndpoints []string) (*GrpcClient, error) { sess := sessionutil.NewSession(ctx, metaRoot, etcdEndpoints) if sess == nil { err := fmt.Errorf("new session error, maybe can not connect to etcd") log.Debug("RootCoordClient NewClient failed", zap.Error(err)) return nil, err } ctx, cancel := context.WithCancel(ctx) client := &GrpcClient{ ctx: ctx, cancel: cancel, sess: sess, } client.setGetGrpcClientFunc() return client, nil } // Init initialize grpc parameters func (c *GrpcClient) Init() error { Params.Init() return nil } func (c *GrpcClient) connect(retryOptions ...retry.Option) error { var err error connectRootCoordAddrFn := func() error { c.addr, err = getRootCoordAddr(c.sess) if err != nil { log.Debug("RootCoordClient getRootCoordAddr failed", zap.Error(err)) return err } opts := trace.GetInterceptorOpts() log.Debug("RootCoordClient try reconnect ", zap.String("address", c.addr)) ctx, cancel := context.WithTimeout(c.ctx, 15*time.Second) defer cancel() conn, err := grpc.DialContext(ctx, c.addr, grpc.WithInsecure(), grpc.WithBlock(), grpc.WithDefaultCallOptions( grpc.MaxCallRecvMsgSize(Params.ClientMaxRecvSize), grpc.MaxCallSendMsgSize(Params.ClientMaxSendSize)), grpc.WithUnaryInterceptor( grpc_middleware.ChainUnaryClient( grpc_retry.UnaryClientInterceptor( grpc_retry.WithMax(3), grpc_retry.WithCodes(codes.Aborted, codes.Unavailable), ), grpc_opentracing.UnaryClientInterceptor(opts...), )), grpc.WithStreamInterceptor( grpc_middleware.ChainStreamClient( grpc_retry.StreamClientInterceptor(grpc_retry.WithMax(3), grpc_retry.WithCodes(codes.Aborted, codes.Unavailable), ), grpc_opentracing.StreamClientInterceptor(opts...), )), ) if err != nil { return err } if c.conn != nil { _ = c.conn.Close() } c.conn = conn return nil } err = retry.Do(c.ctx, connectRootCoordAddrFn, retryOptions...) if err != nil { log.Debug("RootCoordClient try reconnect failed", zap.Error(err)) return err } log.Debug("RootCoordClient try reconnect success") c.grpcClient = rootcoordpb.NewRootCoordClient(c.conn) return nil } func (c *GrpcClient) setGetGrpcClientFunc() { c.getGrpcClient = c.getGrpcClientFunc } func (c *GrpcClient) getGrpcClientFunc() (rootcoordpb.RootCoordClient, error) { c.grpcClientMtx.RLock() if c.grpcClient != nil { defer c.grpcClientMtx.RUnlock() return c.grpcClient, nil } c.grpcClientMtx.RUnlock() c.grpcClientMtx.Lock() defer c.grpcClientMtx.Unlock() if c.grpcClient != nil { return c.grpcClient, nil } // FIXME(dragondriver): how to handle error here? // if we return nil here, then we should check if client is nil outside, err := c.connect(retry.Attempts(20)) if err != nil { return nil, err } return c.grpcClient, nil } // Start dummy func (c *GrpcClient) Start() error { return nil } // Stop terminate grpc connection func (c *GrpcClient) Stop() error { c.cancel() c.grpcClientMtx.Lock() defer c.grpcClientMtx.Unlock() if c.conn != nil { return c.conn.Close() } return nil } // Register dummy func (c *GrpcClient) Register() error { return nil } func (c *GrpcClient) resetConnection() { c.grpcClientMtx.Lock() defer c.grpcClientMtx.Unlock() if c.conn != nil { _ = c.conn.Close() } c.conn = nil c.grpcClient = nil } func (c *GrpcClient) recall(caller func() (interface{}, error)) (interface{}, error) { ret, err := caller() if err == nil { return ret, nil } log.Debug("RootCoord Client grpc error", zap.Error(err)) c.resetConnection() ret, err = caller() if err == nil { return ret, nil } return ret, err } // GetComponentStates TODO: timeout need to be propagated through ctx func (c *GrpcClient) GetComponentStates(ctx context.Context) (*internalpb.ComponentStates, error) { ret, err := c.recall(func() (interface{}, error) { client, err := c.getGrpcClient() if err != nil { return nil, err } return client.GetComponentStates(ctx, &internalpb.GetComponentStatesRequest{}) }) if err != nil || ret == nil { return nil, err } return ret.(*internalpb.ComponentStates), err } // GetTimeTickChannel get timetick channel name func (c *GrpcClient) GetTimeTickChannel(ctx context.Context) (*milvuspb.StringResponse, error) { ret, err := c.recall(func() (interface{}, error) { client, err := c.getGrpcClient() if err != nil { return nil, err } return client.GetTimeTickChannel(ctx, &internalpb.GetTimeTickChannelRequest{}) }) if err != nil || ret == nil { return nil, err } return ret.(*milvuspb.StringResponse), err } // GetStatisticsChannel just define a channel, not used currently func (c *GrpcClient) GetStatisticsChannel(ctx context.Context) (*milvuspb.StringResponse, error) { ret, err := c.recall(func() (interface{}, error) { client, err := c.getGrpcClient() if err != nil { return nil, err } return client.GetStatisticsChannel(ctx, &internalpb.GetStatisticsChannelRequest{}) }) if err != nil || ret == nil { return nil, err } return ret.(*milvuspb.StringResponse), err } // CreateCollection create collection func (c *GrpcClient) CreateCollection(ctx context.Context, in *milvuspb.CreateCollectionRequest) (*commonpb.Status, error) { ret, err := c.recall(func() (interface{}, error) { client, err := c.getGrpcClient() if err != nil { return nil, err } return client.CreateCollection(ctx, in) }) if err != nil || ret == nil { return nil, err } return ret.(*commonpb.Status), err } // DropCollection drop collection func (c *GrpcClient) DropCollection(ctx context.Context, in *milvuspb.DropCollectionRequest) (*commonpb.Status, error) { ret, err := c.recall(func() (interface{}, error) { client, err := c.getGrpcClient() if err != nil { return nil, err } return client.DropCollection(ctx, in) }) if err != nil || ret == nil { return nil, err } return ret.(*commonpb.Status), err } // HasCollection check collection existence func (c *GrpcClient) HasCollection(ctx context.Context, in *milvuspb.HasCollectionRequest) (*milvuspb.BoolResponse, error) { ret, err := c.recall(func() (interface{}, error) { client, err := c.getGrpcClient() if err != nil { return nil, err } return client.HasCollection(ctx, in) }) if err != nil || ret == nil { return nil, err } return ret.(*milvuspb.BoolResponse), err } // DescribeCollection return collection info func (c *GrpcClient) DescribeCollection(ctx context.Context, in *milvuspb.DescribeCollectionRequest) (*milvuspb.DescribeCollectionResponse, error) { ret, err := c.recall(func() (interface{}, error) { client, err := c.getGrpcClient() if err != nil { return nil, err } return client.DescribeCollection(ctx, in) }) if err != nil || ret == nil { return nil, err } return ret.(*milvuspb.DescribeCollectionResponse), err } // ShowCollections list all collection names func (c *GrpcClient) ShowCollections(ctx context.Context, in *milvuspb.ShowCollectionsRequest) (*milvuspb.ShowCollectionsResponse, error) { ret, err := c.recall(func() (interface{}, error) { client, err := c.getGrpcClient() if err != nil { return nil, err } return client.ShowCollections(ctx, in) }) if err != nil || ret == nil { return nil, err } return ret.(*milvuspb.ShowCollectionsResponse), err } // CreatePartition create partition func (c *GrpcClient) CreatePartition(ctx context.Context, in *milvuspb.CreatePartitionRequest) (*commonpb.Status, error) { ret, err := c.recall(func() (interface{}, error) { client, err := c.getGrpcClient() if err != nil { return nil, err } return client.CreatePartition(ctx, in) }) if err != nil || ret == nil { return nil, err } return ret.(*commonpb.Status), err } // DropPartition drop partition func (c *GrpcClient) DropPartition(ctx context.Context, in *milvuspb.DropPartitionRequest) (*commonpb.Status, error) { ret, err := c.recall(func() (interface{}, error) { client, err := c.getGrpcClient() if err != nil { return nil, err } return client.DropPartition(ctx, in) }) if err != nil || ret == nil { return nil, err } return ret.(*commonpb.Status), err } // HasPartition check partition existence func (c *GrpcClient) HasPartition(ctx context.Context, in *milvuspb.HasPartitionRequest) (*milvuspb.BoolResponse, error) { ret, err := c.recall(func() (interface{}, error) { client, err := c.getGrpcClient() if err != nil { return nil, err } return client.HasPartition(ctx, in) }) if err != nil || ret == nil { return nil, err } return ret.(*milvuspb.BoolResponse), err } // ShowPartitions list all partitions in collection func (c *GrpcClient) ShowPartitions(ctx context.Context, in *milvuspb.ShowPartitionsRequest) (*milvuspb.ShowPartitionsResponse, error) { ret, err := c.recall(func() (interface{}, error) { client, err := c.getGrpcClient() if err != nil { return nil, err } return client.ShowPartitions(ctx, in) }) if err != nil || ret == nil { return nil, err } return ret.(*milvuspb.ShowPartitionsResponse), err } // CreateIndex create index func (c *GrpcClient) CreateIndex(ctx context.Context, in *milvuspb.CreateIndexRequest) (*commonpb.Status, error) { ret, err := c.recall(func() (interface{}, error) { client, err := c.getGrpcClient() if err != nil { return nil, err } return client.CreateIndex(ctx, in) }) if err != nil || ret == nil { return nil, err } return ret.(*commonpb.Status), err } // DropIndex drop index func (c *GrpcClient) DropIndex(ctx context.Context, in *milvuspb.DropIndexRequest) (*commonpb.Status, error) { ret, err := c.recall(func() (interface{}, error) { client, err := c.getGrpcClient() if err != nil { return nil, err } return client.DropIndex(ctx, in) }) if err != nil || ret == nil { return nil, err } return ret.(*commonpb.Status), err } // DescribeIndex return index info func (c *GrpcClient) DescribeIndex(ctx context.Context, in *milvuspb.DescribeIndexRequest) (*milvuspb.DescribeIndexResponse, error) { ret, err := c.recall(func() (interface{}, error) { client, err := c.getGrpcClient() if err != nil { return nil, err } return client.DescribeIndex(ctx, in) }) if err != nil || ret == nil { return nil, err } return ret.(*milvuspb.DescribeIndexResponse), err } // AllocTimestamp global timestamp allocator func (c *GrpcClient) AllocTimestamp(ctx context.Context, in *rootcoordpb.AllocTimestampRequest) (*rootcoordpb.AllocTimestampResponse, error) { ret, err := c.recall(func() (interface{}, error) { client, err := c.getGrpcClient() if err != nil { return nil, err } return client.AllocTimestamp(ctx, in) }) if err != nil || ret == nil { return nil, err } return ret.(*rootcoordpb.AllocTimestampResponse), err } // AllocID global ID allocator func (c *GrpcClient) AllocID(ctx context.Context, in *rootcoordpb.AllocIDRequest) (*rootcoordpb.AllocIDResponse, error) { ret, err := c.recall(func() (interface{}, error) { client, err := c.getGrpcClient() if err != nil { return nil, err } return client.AllocID(ctx, in) }) if err != nil || ret == nil { return nil, err } return ret.(*rootcoordpb.AllocIDResponse), err } // UpdateChannelTimeTick used to handle ChannelTimeTickMsg func (c *GrpcClient) UpdateChannelTimeTick(ctx context.Context, in *internalpb.ChannelTimeTickMsg) (*commonpb.Status, error) { ret, err := c.recall(func() (interface{}, error) { client, err := c.getGrpcClient() if err != nil { return nil, err } return client.UpdateChannelTimeTick(ctx, in) }) if err != nil || ret == nil { return nil, err } return ret.(*commonpb.Status), err } // DescribeSegment receiver time tick from proxy service, and put it into this channel func (c *GrpcClient) DescribeSegment(ctx context.Context, in *milvuspb.DescribeSegmentRequest) (*milvuspb.DescribeSegmentResponse, error) { ret, err := c.recall(func() (interface{}, error) { client, err := c.getGrpcClient() if err != nil { return nil, err } return client.DescribeSegment(ctx, in) }) if err != nil || ret == nil { return nil, err } return ret.(*milvuspb.DescribeSegmentResponse), err } // ShowSegments list all segments func (c *GrpcClient) ShowSegments(ctx context.Context, in *milvuspb.ShowSegmentsRequest) (*milvuspb.ShowSegmentsResponse, error) { ret, err := c.recall(func() (interface{}, error) { client, err := c.getGrpcClient() if err != nil { return nil, err } return client.ShowSegments(ctx, in) }) if err != nil || ret == nil { return nil, err } return ret.(*milvuspb.ShowSegmentsResponse), err } // ReleaseDQLMessageStream release DQL msgstream func (c *GrpcClient) ReleaseDQLMessageStream(ctx context.Context, in *proxypb.ReleaseDQLMessageStreamRequest) (*commonpb.Status, error) { ret, err := c.recall(func() (interface{}, error) { client, err := c.getGrpcClient() if err != nil { return nil, err } return client.ReleaseDQLMessageStream(ctx, in) }) if err != nil || ret == nil { return nil, err } return ret.(*commonpb.Status), err } // SegmentFlushCompleted check whether segment flush is completed func (c *GrpcClient) SegmentFlushCompleted(ctx context.Context, in *datapb.SegmentFlushCompletedMsg) (*commonpb.Status, error) { ret, err := c.recall(func() (interface{}, error) { client, err := c.getGrpcClient() if err != nil { return nil, err } return client.SegmentFlushCompleted(ctx, in) }) if err != nil || ret == nil { return nil, err } return ret.(*commonpb.Status), err } // GetMetrics get metrics func (c *GrpcClient) GetMetrics(ctx context.Context, in *milvuspb.GetMetricsRequest) (*milvuspb.GetMetricsResponse, error) { ret, err := c.recall(func() (interface{}, error) { client, err := c.getGrpcClient() if err != nil { return nil, err } return client.GetMetrics(ctx, in) }) if err != nil || ret == nil { return nil, err } return ret.(*milvuspb.GetMetricsResponse), err } // CreateAlias create collection alias func (c *GrpcClient) CreateAlias(ctx context.Context, req *milvuspb.CreateAliasRequest) (*commonpb.Status, error) { ret, err := c.recall(func() (interface{}, error) { client, err := c.getGrpcClient() if err != nil { return nil, err } return client.CreateAlias(ctx, req) }) if err != nil || ret == nil { return nil, err } return ret.(*commonpb.Status), err } // DropAlias drop collection alias func (c *GrpcClient) DropAlias(ctx context.Context, req *milvuspb.DropAliasRequest) (*commonpb.Status, error) { ret, err := c.recall(func() (interface{}, error) { client, err := c.getGrpcClient() if err != nil { return nil, err } return client.DropAlias(ctx, req) }) if err != nil || ret == nil { return nil, err } return ret.(*commonpb.Status), err } // AlterAlias alter collection alias func (c *GrpcClient) AlterAlias(ctx context.Context, req *milvuspb.AlterAliasRequest) (*commonpb.Status, error) { ret, err := c.recall(func() (interface{}, error) { client, err := c.getGrpcClient() if err != nil { return nil, err } return client.AlterAlias(ctx, req) }) if err != nil || ret == nil { return nil, err } return ret.(*commonpb.Status), err }
NewClient
init.go
/******************************************************************************* * Copyright 2018 Dell Inc. * Copyright (c) 2019 Intel Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. *******************************************************************************/ package config import ( "fmt" "io/ioutil" "net/http" "sync" "time" "github.com/edgexfoundry/go-mod-core-contracts/clients/logger" "github.com/edgexfoundry/go-mod-registry/pkg/types" "github.com/edgexfoundry/go-mod-registry/registry" "github.com/edgexfoundry/edgex-go/internal" "github.com/edgexfoundry/edgex-go/internal/pkg/config" ) // Global variables var Configuration *ConfigurationStruct var LoggingClient logger.LoggingClient var Registry registry.Client // The purpose of Retry is different here than in other services. In this case, we use a retry in order // to initialize the RegistryClient that will be used to write configuration information. Other services // use Retry to read their information. Config-seed writes information. func
(useProfile string, timeout int, wait *sync.WaitGroup, ch chan error) { until := time.Now().Add(time.Millisecond * time.Duration(timeout)) for time.Now().Before(until) { var err error //When looping, only handle configuration if it hasn't already been set. if Configuration == nil { Configuration, err = initializeConfiguration(useProfile) if err != nil { ch <- err } else { // Setup Logging logTarget := setLoggingTarget() LoggingClient = logger.NewClient(internal.ConfigSeedServiceKey, Configuration.EnableRemoteLogging, logTarget, Configuration.LoggingLevel) } } //Check to verify Registry connectivity if Registry == nil { Registry, err = initRegistryClient("") if err != nil { ch <- err } } else { if !Registry.IsAlive() { ch <- fmt.Errorf("Registry (%s) is not running", Configuration.Registry.Type) } else { break } } time.Sleep(time.Second * time.Duration(1)) } close(ch) wait.Done() return } func Init() bool { if Configuration != nil && Registry != nil { return true } return false } func initializeConfiguration(useProfile string) (*ConfigurationStruct, error) { conf := &ConfigurationStruct{} err := config.LoadFromFile(useProfile, conf) if err != nil { return nil, err } return conf, nil } func initRegistryClient(serviceKey string) (registry.Client, error) { registryConfig := types.Config{ Host: Configuration.Registry.Host, Port: Configuration.Registry.Port, Type: Configuration.Registry.Type, ServiceKey: serviceKey, } registryClient, err := registry.NewRegistryClient(registryConfig) if err != nil { return nil, fmt.Errorf("unable to create New Registry: %v", err) } if !registryClient.IsAlive() { return nil, fmt.Errorf("registry is not available") } return registryClient, nil } // Helper method to get the body from the response after making the request func getBody(resp *http.Response) ([]byte, error) { body, err := ioutil.ReadAll(resp.Body) return body, err } func setLoggingTarget() string { logTarget := Configuration.LoggingRemoteURL if !Configuration.EnableRemoteLogging { return Configuration.LoggingFile } return logTarget }
Retry
youtube_contest.py
#подключение библиотек from PyQt5.QtCore import Qt from PyQt5.QtWidgets import QApplication, QWidget, QPushButton, QLabel,QVBoxLayout,QHBoxLayout, QMessageBox, QRadioButton #создание приложения и главного окна app=QApplication([]) main_win =QWidget() main_win.setWindowTitle('Конкурс от Crazy People') question =QLabel("В каком году канал получил золотую кнопку от YouTube?") btn_answer1 =QRadioButton('2005') btn_answer2 =QRadioButton('2010') btn_answer3 =QRadioButton('2015') btn_answer4 =QRadioButton('2020') layout_main=QVBoxLayout() h1=QHBoxLayout() h2=QHBoxLayout() h3=QHBoxLayout() h1.addWidget(question,alignment =Qt.AlignCenter) h2.addWidget(btn_answer1,alignment =Qt.AlignCenter) h2.addWidget(btn_answer2,alignment =Qt.AlignCenter) h3.addWidget(btn_answer3,alignment =Qt.AlignCenter) h3.addWidget(btn_answer4,alignment =Qt.AlignCenter) layout_main.addLayout(h1) layout_main.addLayout(h2) layout_main.addLayout(h3) main_win.setLayout(layout_main) def win (): win =QMessageBox() win.setText('Верно!') win.exec_() def lose(): lose =QMessageBox() lose.setText('«Нет, в 2015 году. Вы выиграли фирменный плакат') lose.exec_
e) main_win.show() app.exec_()
() btn_answer1.clicked.connect(lose) btn_answer2.clicked.connect(lose) btn_answer3.clicked.connect(win) btn_answer4.clicked.connect(los
DecodeAuthorizationMessageCommand.js
import { __extends } from "tslib"; import { DecodeAuthorizationMessageRequest, DecodeAuthorizationMessageResponse } from "../models/models_0"; import { deserializeAws_queryDecodeAuthorizationMessageCommand, serializeAws_queryDecodeAuthorizationMessageCommand, } from "../protocols/Aws_query"; import { getSerdePlugin } from "@aws-sdk/middleware-serde"; import { getAwsAuthPlugin } from "@aws-sdk/middleware-signing";
* <p>For example, if a user is not authorized to perform an operation that he or she has * requested, the request returns a <code>Client.UnauthorizedOperation</code> response (an * HTTP 403 response). Some AWS operations additionally return an encoded message that can * provide details about this authorization failure. </p> * <note> * <p>Only certain AWS operations return an encoded authorization message. The * documentation for an individual operation indicates whether that operation returns an * encoded message in addition to returning an HTTP code.</p> * </note> * <p>The message is encoded because the details of the authorization status can constitute * privileged information that the user who requested the operation should not see. To decode * an authorization status message, a user must be granted permissions via an IAM policy to * request the <code>DecodeAuthorizationMessage</code> * (<code>sts:DecodeAuthorizationMessage</code>) action. </p> * <p>The decoded message includes the following type of information:</p> * <ul> * <li> * <p>Whether the request was denied due to an explicit deny or due to the absence of an * explicit allow. For more information, see <a href="https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_evaluation-logic.html#policy-eval-denyallow">Determining Whether a Request is Allowed or Denied</a> in the * <i>IAM User Guide</i>. </p> * </li> * <li> * <p>The principal who made the request.</p> * </li> * <li> * <p>The requested action.</p> * </li> * <li> * <p>The requested resource.</p> * </li> * <li> * <p>The values of condition keys in the context of the user's request.</p> * </li> * </ul> */ var DecodeAuthorizationMessageCommand = /** @class */ (function (_super) { __extends(DecodeAuthorizationMessageCommand, _super); // Start section: command_properties // End section: command_properties function DecodeAuthorizationMessageCommand(input) { var _this = // Start section: command_constructor _super.call(this) || this; _this.input = input; return _this; // End section: command_constructor } /** * @internal */ DecodeAuthorizationMessageCommand.prototype.resolveMiddleware = function (clientStack, configuration, options) { this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); this.middlewareStack.use(getAwsAuthPlugin(configuration)); var stack = clientStack.concat(this.middlewareStack); var logger = configuration.logger; var clientName = "STSClient"; var commandName = "DecodeAuthorizationMessageCommand"; var handlerExecutionContext = { logger: logger, clientName: clientName, commandName: commandName, inputFilterSensitiveLog: DecodeAuthorizationMessageRequest.filterSensitiveLog, outputFilterSensitiveLog: DecodeAuthorizationMessageResponse.filterSensitiveLog, }; var requestHandler = configuration.requestHandler; return stack.resolve(function (request) { return requestHandler.handle(request.request, options || {}); }, handlerExecutionContext); }; DecodeAuthorizationMessageCommand.prototype.serialize = function (input, context) { return serializeAws_queryDecodeAuthorizationMessageCommand(input, context); }; DecodeAuthorizationMessageCommand.prototype.deserialize = function (output, context) { return deserializeAws_queryDecodeAuthorizationMessageCommand(output, context); }; return DecodeAuthorizationMessageCommand; }($Command)); export { DecodeAuthorizationMessageCommand }; //# sourceMappingURL=DecodeAuthorizationMessageCommand.js.map
import { Command as $Command } from "@aws-sdk/smithy-client"; /** * <p>Decodes additional information about the authorization status of a request from an * encoded message returned in response to an AWS request.</p>
router_5_6_3.py
#!/usr/bin/env python # # Copyright (c) 2016, Nest Labs, Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # 3. Neither the name of the copyright holder nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # from autothreadharness.harness_case import HarnessCase import unittest class Router_5_6_3(HarnessCase): suite = 2 case = '5 6 3' golden_devices_required = 3 def
(self, dialog, title): pass if __name__ == '__main__': unittest.main()
on_dialog
resource_user.go
package looker import ( "context" "fmt" "strconv" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" v4 "github.com/looker-open-source/sdk-codegen/go/sdk/v4" ) func resourceUser() *schema.Resource { return &schema.Resource{ CreateContext: resourceUserCreate, ReadContext: resourceUserRead, UpdateContext: resourceUserUpdate, DeleteContext: resourceUserDelete, Schema: map[string]*schema.Schema{ "credentials_email": { Type: schema.TypeList, Required: true, MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "email": { Type: schema.TypeString, Required: true, }, "forced_password_reset_at_next_login": { Type: schema.TypeBool, Optional: true, }, }, }, }, "first_name": { Type: schema.TypeString, Optional: true, }, "last_name": { Type: schema.TypeString, Optional: true, }, "display_name": { Type: schema.TypeString, Computed: true, }, "email": { Type: schema.TypeString, Computed: true, }, "locale": { Type: schema.TypeString, Optional: true, Computed: true, }, "home_folder_id": { Type: schema.TypeInt, Optional: true, Computed: true, }, "personal_folder_id": { Type: schema.TypeInt, Computed: true, }, "is_disabled": { Type: schema.TypeBool, Optional: true, Computed: true, }, "models_dir_validated": { Type: schema.TypeBool, Optional: true, Computed: true, }, "ui_state": { Type: schema.TypeMap, Optional: true, Computed: true, Elem: &schema.Schema{Type: schema.TypeString}, }, "group_ids": { Type: schema.TypeSet, Optional: true, Computed: true, Elem: &schema.Schema{Type: schema.TypeInt}, }, "role_ids": { Type: schema.TypeSet, Optional: true, Computed: true, Elem: &schema.Schema{Type: schema.TypeInt}, }, }, } } func resourceUserCreate(ctx context.Context, d *schema.ResourceData, m interface{}) (diags diag.Diagnostics) { config := m.(*Config) sdk := config.sdk user, err := sdk.CreateUser(makeWriteUser(d), "", nil) if err != nil { return diag.FromErr(err) } userId := *user.Id d.SetId(strconv.Itoa(int(userId))) // create email credentials creds := d.Get("credentials_email").([]interface{})[0] _, err = sdk.CreateUserCredentialsEmail(userId, makeCredentialsEmail(creds), "", nil) if err != nil { // delete user if unable to create email credential sdk.DeleteUser(userId, nil) return diag.FromErr(err) } // add user to group(s) groups := d.Get("group_ids").(*schema.Set) for _, g := range groups.List() { u := v4.GroupIdForGroupUserInclusion{ UserId: &userId, } sdk.AddGroupUser(int64(g.(int)), u, nil) } // add user to role(s) roles := convertIntSlice(d.Get("role_ids").(*schema.Set).List()) sdk.SetUserRoles(userId, roles, "", nil) return resourceUserRead(ctx, d, m) } func resourceUserRead(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics { sdk := m.(*Config).sdk // Warning or errors can be collected in a slice type var diags diag.Diagnostics userId, err := strconv.ParseInt(d.Id(), 10, 64) if err != nil { return diag.FromErr(err) } user, err := sdk.User(userId, "", nil) if err != nil { return diag.FromErr(err) } if err := d.Set("ui_state", user.UiState); err != nil { return diag.FromErr(err) } d.Set("first_name", user.FirstName) d.Set("last_name", user.LastName) d.Set("display_name", user.DisplayName) d.Set("email", user.Email) d.Set("locale", user.Locale) d.Set("is_disabled", user.IsDisabled) d.Set("home_folder_id", user.HomeFolderId) d.Set("personal_folder_id", user.PersonalFolderId) d.Set("models_dir_validated", user.ModelsDirValidated) d.Set("group_ids", user.GroupIds) d.Set("role_ids", user.RoleIds) return diags } func resourceUserUpdate(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics { sdk := m.(*Config).sdk userId, err := strconv.ParseInt(d.Id(), 10, 64) if err != nil { return diag.FromErr(err) } // update user if d.HasChanges("first_name", "last_name")
// update email credentials if d.HasChange("credentials_email") { creds := d.Get("credentials_email").([]interface{})[0] _, err = sdk.UpdateUserCredentialsEmail(userId, makeCredentialsEmail(creds), "", nil) if err != nil { return diag.FromErr(err) } } // add user to role(s) if d.HasChange("role_ids") { roles := convertIntSlice(d.Get("role_ids").([]interface{})) sdk.SetUserRoles(userId, roles, "", nil) } return resourceUserRead(ctx, d, m) } func resourceUserDelete(ctx context.Context, d *schema.ResourceData, m interface{}) (diags diag.Diagnostics) { sdk := m.(*Config).sdk userId, err := strconv.ParseInt(d.Id(), 10, 64) if err != nil { return diag.FromErr(err) } _, err = sdk.DeleteUser(userId, nil) if err != nil { return diag.FromErr(err) } d.SetId("") return diags } func makeCredentialsEmail(creds interface{}) v4.WriteCredentialsEmail { credentials := creds.(map[string]interface{}) email := credentials["email"].(string) forcedReset := credentials["forced_password_reset_at_next_login"].(bool) return v4.WriteCredentialsEmail{ Email: &email, ForcedPasswordResetAtNextLogin: &forcedReset, } } func makeWriteUser(d *schema.ResourceData) v4.WriteUser { firstName := d.Get("first_name").(string) lastName := d.Get("last_name").(string) locale := d.Get("locale").(string) isDisabled := d.Get("is_disabled").(bool) homeFolderID := fmt.Sprint(d.Get("home_folder_id").(int)) modelsDirValidated := d.Get("models_dir_validated").(bool) uiState := d.Get("ui_state").(map[string]interface{}) user := v4.WriteUser{ FirstName: &firstName, LastName: &lastName, Locale: &locale, IsDisabled: &isDisabled, HomeFolderId: &homeFolderID, ModelsDirValidated: &modelsDirValidated, UiState: &uiState, } return user }
{ user := makeWriteUser(d) _, err = sdk.UpdateUser(userId, user, "", nil) if err != nil { return diag.FromErr(err) } }
write_packet.rs
// Copyright (c) 2017 Anatoly Ikorsky // // Licensed under the Apache License, Version 2.0 // <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT // license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. All files in the project carrying such notice may not be copied, // modified, or distributed except according to those terms. use futures_core::ready; use futures_sink::Sink; use std::{ future::Future, pin::Pin, task::{Context, Poll}, }; use crate::{connection_like::Connection, error::IoError}; /// Writes a packet. #[derive(Debug)] #[must_use = "futures do nothing unless you `.await` or poll them"] pub struct WritePacket<'a, 't> { conn: Connection<'a, 't>, data: Option<Vec<u8>>, } impl<'a, 't> WritePacket<'a, 't> { pub(crate) fn
<T: Into<Connection<'a, 't>>>(conn: T, data: Vec<u8>) -> Self { Self { conn: conn.into(), data: Some(data), } } } impl Future for WritePacket<'_, '_> { type Output = std::result::Result<(), IoError>; fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> { if self.data.is_some() { let codec = Pin::new(self.conn.stream_mut().codec.as_mut().expect("must be here")); ready!(codec.poll_ready(cx))?; } if let Some(data) = self.data.take() { let codec = Pin::new(self.conn.stream_mut().codec.as_mut().expect("must be here")); // to get here, stream must be ready codec.start_send(data)?; } let codec = Pin::new(self.conn.stream_mut().codec.as_mut().expect("must be here")); ready!(codec.poll_flush(cx))?; Poll::Ready(Ok(())) } }
new
api.go
package main // TODO: 这也是从dssdc复制来的,相同部分考虑做放公共部分 import ( "encoding/json" "fmt" "net/http" "github.com/ddosakura/ds-watcher-simple-dev/repo" ) type CommonResponse struct { Code int `json:"code"` Msg string `json:"msg"` Data interface{} `json:"data"` } var successMSg = "SUCCESS" type apiHandler struct { api func(r *http.Request) (interface{}, error) } func (h *apiHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { r.ParseForm() data, e := h.api(r) w.Header().Set("Content-Type", "application/json;chartset=uft-8") var rep CommonResponse if e == nil { rep = CommonResponse{ Code: 0, Msg: successMSg, Data: data, } } else { rep = CommonResponse{ Code: -1, Msg: e.Error(), Data: data, } } d, e := json.Marshal(rep) // fmt.Println(rep) if e != nil { d, _ := json.Marshal(CommonResponse{ Code: -1, Msg: e.Error(), Data: nil, }) fmt.Fprintln(w, string(d)) return } fmt.Fprintln(w, string(d)) } func apiDeveloper(*http.Request) (interface{}, error) { return repo.Developers(), nil } func apiDetail(r *http.Request) (interface{}, error) { name := r.FormValue("name") // fmt.Println(r.Form, name) return *repo.Detail(name), nil } func apiNote(r *http.Request) (interface{}, err
// result, _ := ioutil.ReadAll(r.Body) // r.Body.Close() // fmt.Printf("%s\n", result) // var f interface{} // json.Unmarshal(result, &f) // fmt.Println(f) // m := f.(map[string]interface{}) // fmt.Println(r, r.Method) // fmt.Println(r.Form) data := r.FormValue("data") d := repo.Notes{} json.Unmarshal([]byte(data), &d) // fmt.Println(d) repo.Note(&d) /* repo.Note(&repo.Notes{ Developer: r.FormValue(""), Project: r.FormValue(""), File: r.FormValue(""), Ext: r.FormValue(""), // ChangeTime: nil, }) */ return "", nil }
or) {
time-reports.tsx
import React, { useContext } from 'react';
import googleSheetsApi from 'lib/google-sheets-api'; import { Container, Grid, Cell } from 'elements'; import { Controls } from './controls'; import { Progress } from './progress'; import { Calendar } from './calendar'; export const TimeReports = () => { const settings = parseSettings(sessionStorage.config); toggleApi.email = sessionStorage.email; toggleApi.token = settings.togglToken; toggleApi.workspace = settings.togglWorkspace; googleSheetsApi.sheetId = settings.googleSpreatsheet; return ( <> <NavBar /> <Container> <Grid> <Cell> <Controls /> </Cell> <Cell> <Progress /> </Cell> </Grid> <Calendar /> </Container> </> ); };
import { NavBar } from 'components/nav-bar'; import { parseSettings } from 'lib/settings-parser'; import toggleApi from 'lib/toggl-api';
reactor_test.go
package mempool import ( "net" "sync" "testing" "time" "github.com/fortytw2/leaktest" "github.com/go-kit/kit/log/term" "github.com/pkg/errors" "github.com/stretchr/testify/assert" "github.com/tendermint/tendermint/abci/example/kvstore" cfg "github.com/tendermint/tendermint/config" "github.com/tendermint/tendermint/libs/log" "github.com/tendermint/tendermint/p2p" "github.com/tendermint/tendermint/p2p/mock" "github.com/tendermint/tendermint/proxy" "github.com/tendermint/tendermint/types" ) type peerState struct { height int64 } func (ps peerState) GetHeight() int64 { return ps.height } // mempoolLogger is a TestingLogger which uses a different // color for each validator ("validator" key must exist). func mempoolLogger() log.Logger { return log.TestingLoggerWithColorFn(func(keyvals ...interface{}) term.FgBgColor { for i := 0; i < len(keyvals)-1; i += 2 { if keyvals[i] == "validator" { return term.FgBgColor{Fg: term.Color(uint8(keyvals[i+1].(int) + 1))} } } return term.FgBgColor{} }) } // connect N mempool reactors through N switches func makeAndConnectReactors(config *cfg.Config, n int) []*Reactor { reactors := make([]*Reactor, n) logger := mempoolLogger() for i := 0; i < n; i++ { app := kvstore.NewKVStoreApplication() cc := proxy.NewLocalClientCreator(app) mempool, cleanup := newMempoolWithApp(cc) defer cleanup() reactors[i] = NewReactor(config.Mempool, mempool) // so we dont start the consensus states reactors[i].SetLogger(logger.With("validator", i)) } p2p.MakeConnectedSwitches(config.P2P, n, func(i int, s *p2p.Switch) *p2p.Switch { s.AddReactor("MEMPOOL", reactors[i]) return s }, p2p.Connect2Switches) return reactors } func waitForTxsOnReactors(t *testing.T, txs types.Txs, reactors []*Reactor) { // wait for the txs in all mempools wg := new(sync.WaitGroup) for i, reactor := range reactors { wg.Add(1) go func(r *Reactor, reactorIndex int) { defer wg.Done() waitForTxsOnReactor(t, txs, r, reactorIndex) }(reactor, i) } done := make(chan struct{}) go func() { wg.Wait() close(done) }() timer := time.After(TIMEOUT) select { case <-timer: t.Fatal("Timed out waiting for txs") case <-done: } } func waitForTxsOnReactor(t *testing.T, txs types.Txs, reactor *Reactor, reactorIndex int) { mempool := reactor.mempool for mempool.Size() < len(txs) { time.Sleep(time.Millisecond * 100) } reapedTxs := mempool.ReapMaxTxs(len(txs)) for i, tx := range txs { assert.Equalf(t, tx, reapedTxs[i], "txs at index %d on reactor %d don't match: %v vs %v", i, reactorIndex, tx, reapedTxs[i]) } } // ensure no txs on reactor after some timeout func
(t *testing.T, reactor *Reactor, timeout time.Duration) { time.Sleep(timeout) // wait for the txs in all mempools assert.Zero(t, reactor.mempool.Size()) } const ( NUM_TXS = 1000 TIMEOUT = 120 * time.Second // ridiculously high because CircleCI is slow ) func TestReactorBroadcastTxMessage(t *testing.T) { config := cfg.TestConfig() const N = 4 reactors := makeAndConnectReactors(config, N) defer func() { for _, r := range reactors { r.Stop() } }() for _, r := range reactors { for _, peer := range r.Switch.Peers().List() { peer.Set(types.PeerStateKey, peerState{1}) } } // send a bunch of txs to the first reactor's mempool // and wait for them all to be received in the others txs := checkTxs(t, reactors[0].mempool, NUM_TXS, UnknownPeerID) waitForTxsOnReactors(t, txs, reactors) } func TestReactorNoBroadcastToSender(t *testing.T) { config := cfg.TestConfig() const N = 2 reactors := makeAndConnectReactors(config, N) defer func() { for _, r := range reactors { r.Stop() } }() // send a bunch of txs to the first reactor's mempool, claiming it came from peer // ensure peer gets no txs checkTxs(t, reactors[0].mempool, NUM_TXS, 1) ensureNoTxs(t, reactors[1], 100*time.Millisecond) } func TestBroadcastTxForPeerStopsWhenPeerStops(t *testing.T) { if testing.Short() { t.Skip("skipping test in short mode.") } config := cfg.TestConfig() const N = 2 reactors := makeAndConnectReactors(config, N) defer func() { for _, r := range reactors { r.Stop() } }() // stop peer sw := reactors[1].Switch sw.StopPeerForError(sw.Peers().List()[0], errors.New("some reason")) // check that we are not leaking any go-routines // i.e. broadcastTxRoutine finishes when peer is stopped leaktest.CheckTimeout(t, 10*time.Second)() } func TestBroadcastTxForPeerStopsWhenReactorStops(t *testing.T) { if testing.Short() { t.Skip("skipping test in short mode.") } config := cfg.TestConfig() const N = 2 reactors := makeAndConnectReactors(config, N) // stop reactors for _, r := range reactors { r.Stop() } // check that we are not leaking any go-routines // i.e. broadcastTxRoutine finishes when reactor is stopped leaktest.CheckTimeout(t, 10*time.Second)() } func TestMempoolIDsBasic(t *testing.T) { ids := newMempoolIDs() peer := mock.NewPeer(net.IP{127, 0, 0, 1}) ids.ReserveForPeer(peer) assert.EqualValues(t, 1, ids.GetForPeer(peer)) ids.Reclaim(peer) ids.ReserveForPeer(peer) assert.EqualValues(t, 2, ids.GetForPeer(peer)) ids.Reclaim(peer) } func TestMempoolIDsPanicsIfNodeRequestsOvermaxActiveIDs(t *testing.T) { if testing.Short() { return } // 0 is already reserved for UnknownPeerID ids := newMempoolIDs() for i := 0; i < maxActiveIDs-1; i++ { peer := mock.NewPeer(net.IP{127, 0, 0, 1}) ids.ReserveForPeer(peer) } assert.Panics(t, func() { peer := mock.NewPeer(net.IP{127, 0, 0, 1}) ids.ReserveForPeer(peer) }) } func TestDontExhaustMaxActiveIDs(t *testing.T) { config := cfg.TestConfig() const N = 1 reactors := makeAndConnectReactors(config, N) defer func() { for _, r := range reactors { r.Stop() } }() reactor := reactors[0] for i := 0; i < maxActiveIDs+1; i++ { peer := mock.NewPeer(nil) reactor.Receive(MempoolChannel, peer, []byte{0x1, 0x2, 0x3}) reactor.AddPeer(peer) } }
ensureNoTxs
plugin.py
import logging from dataclasses import asdict from decimal import Decimal from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Union from urllib.parse import urljoin import opentracing import opentracing.tags from django.core.exceptions import ValidationError from prices import Money, TaxedMoney, TaxedMoneyRange from ...checkout import base_calculations from ...checkout.fetch import fetch_checkout_lines from ...core.taxes import TaxError, TaxType, charge_taxes_on_shipping, zero_taxed_money from ...discount import DiscountInfo from ...product.models import ProductType from ..base_plugin import BasePlugin, ConfigurationTypeField from ..error_codes import PluginErrorCode from . import ( DEFAULT_TAX_CODE, DEFAULT_TAX_DESCRIPTION, META_CODE_KEY, META_DESCRIPTION_KEY, AvataxConfiguration, CustomerErrors, TransactionType, _validate_checkout, _validate_order, api_get_request, api_post_request, generate_request_data_from_checkout, get_api_url, get_cached_tax_codes_or_fetch, get_checkout_tax_data, get_order_request_data, get_order_tax_data, ) from .tasks import api_post_request_task if TYPE_CHECKING: # flake8: noqa from ...account.models import Address from ...channel.models import Channel from ...checkout.fetch import CheckoutInfo, CheckoutLineInfo from ...checkout.models import Checkout, CheckoutLine from ...order.models import Order, OrderLine from ...product.models import Product, ProductVariant from ..models import PluginConfiguration logger = logging.getLogger(__name__) class AvataxPlugin(BasePlugin): PLUGIN_NAME = "Avalara" PLUGIN_ID = "mirumee.taxes.avalara" DEFAULT_CONFIGURATION = [ {"name": "Username or account", "value": None}, {"name": "Password or license", "value": None}, {"name": "Use sandbox", "value": True}, {"name": "Company name", "value": "DEFAULT"}, {"name": "Autocommit", "value": False}, ] CONFIG_STRUCTURE = { "Username or account": { "type": ConfigurationTypeField.STRING, "help_text": "Provide user or account details", "label": "Username or account", }, "Password or license": { "type": ConfigurationTypeField.PASSWORD, "help_text": "Provide password or license details", "label": "Password or license", }, "Use sandbox": { "type": ConfigurationTypeField.BOOLEAN, "help_text": "Determines if Saleor should use Avatax sandbox API.", "label": "Use sandbox", }, "Company name": { "type": ConfigurationTypeField.STRING, "help_text": "Avalara needs to receive company code. Some more " "complicated systems can use more than one company " "code, in that case, this variable should be changed " "based on data from Avalara's admin panel", "label": "Company name", }, "Autocommit": { "type": ConfigurationTypeField.BOOLEAN, "help_text": "Determines, if all transactions sent to Avalara " "should be committed by default.", "label": "Autocommit", }, } def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # Convert to dict to easier take config elements configuration = {item["name"]: item["value"] for item in self.configuration} self.config = AvataxConfiguration( username_or_account=configuration["Username or account"], password_or_license=configuration["Password or license"], use_sandbox=configuration["Use sandbox"], company_name=configuration["Company name"], autocommit=configuration["Autocommit"], ) def _skip_plugin( self, previous_value: Union[TaxedMoney, TaxedMoneyRange, Decimal] ) -> bool: if not (self.config.username_or_account and self.config.password_or_license): return True if not self.active: return True # The previous plugin already calculated taxes so we can skip our logic if isinstance(previous_value, TaxedMoneyRange): start = previous_value.start stop = previous_value.stop return start.net != start.gross and stop.net != stop.gross if isinstance(previous_value, TaxedMoney): return previous_value.net != previous_value.gross return False def _append_prices_of_not_taxed_lines( self, price: TaxedMoney, lines: Iterable["CheckoutLineInfo"], channel: "Channel", discounts: Iterable[DiscountInfo], ): for line_info in lines: if line_info.variant.product.charge_taxes: continue line_price = base_calculations.base_checkout_line_total( line_info, channel, discounts, ) price.gross.amount += line_price.gross.amount price.net.amount += line_price.net.amount return price def calculate_checkout_total( self, checkout_info: "CheckoutInfo", lines: Iterable["CheckoutLineInfo"], address: Optional["Address"], discounts: Iterable[DiscountInfo], previous_value: TaxedMoney, ) -> TaxedMoney: if self._skip_plugin(previous_value): return previous_value checkout_total = previous_value if not _validate_checkout(checkout_info, lines): return checkout_total response = get_checkout_tax_data(checkout_info, lines, discounts, self.config) if not response or "error" in response: return checkout_total currency = response.get("currencyCode") tax = Decimal(response.get("totalTax", 0.0)) total_net = Decimal(response.get("totalAmount", 0.0)) total_gross = Money(amount=total_net + tax, currency=currency) total_net = Money(amount=total_net, currency=currency) taxed_total = TaxedMoney(net=total_net, gross=total_gross) total = self._append_prices_of_not_taxed_lines( taxed_total, lines, checkout_info.channel, discounts ) voucher_value = checkout_info.checkout.discount if voucher_value: total -= voucher_value return max(total, zero_taxed_money(total.currency)) def _calculate_checkout_shipping( self, currency: str, lines: List[Dict], shipping_price: TaxedMoney ) -> TaxedMoney: shipping_tax = Decimal(0.0) shipping_net = shipping_price.net.amount for line in lines: if line["itemCode"] == "Shipping": shipping_net = Decimal(line["lineAmount"]) shipping_tax = Decimal(line["tax"]) break shipping_gross = Money(amount=shipping_net + shipping_tax, currency=currency) shipping_net = Money(amount=shipping_net, currency=currency) return TaxedMoney(net=shipping_net, gross=shipping_gross) def calculate_checkout_shipping( self, checkout_info: "CheckoutInfo", lines: Iterable["CheckoutLineInfo"], address: Optional["Address"], discounts: Iterable[DiscountInfo], previous_value: TaxedMoney, ) -> TaxedMoney: base_shipping_price = previous_value if not charge_taxes_on_shipping(): return base_shipping_price if self._skip_plugin(previous_value): return base_shipping_price if not _validate_checkout(checkout_info, lines): return base_shipping_price response = get_checkout_tax_data(checkout_info, lines, discounts, self.config) if not response or "error" in response: return base_shipping_price currency = str(response.get("currencyCode")) return self._calculate_checkout_shipping( currency, response.get("lines", []), base_shipping_price ) def preprocess_order_creation( self, checkout_info: "CheckoutInfo", discounts: Iterable[DiscountInfo], lines: Optional[Iterable["CheckoutLineInfo"]], previous_value: Any, ): """Ensure all the data is correct and we can proceed with creation of order. Raise an error when can't receive taxes. """ if lines is None: lines = fetch_checkout_lines(checkout_info.checkout) if self._skip_plugin(previous_value): return previous_value data = generate_request_data_from_checkout( checkout_info, lines, self.config, transaction_token=str(checkout_info.checkout.token), transaction_type=TransactionType.ORDER, discounts=discounts, ) if not data.get("createTransactionModel", {}).get("lines"): return previous_value transaction_url = urljoin( get_api_url(self.config.use_sandbox), "transactions/createoradjust" ) with opentracing.global_tracer().start_active_span( "avatax.transactions.crateoradjust" ) as scope: span = scope.span span.set_tag(opentracing.tags.COMPONENT, "tax") span.set_tag("service.name", "avatax") response = api_post_request(transaction_url, data, self.config) if not response or "error" in response: msg = response.get("error", {}).get("message", "") error_code = response.get("error", {}).get("code", "") logger.warning( "Unable to calculate taxes for checkout %s, error_code: %s, " "error_msg: %s", checkout_info.checkout.token, error_code, msg, ) customer_msg = CustomerErrors.get_error_msg(response.get("error", {})) raise TaxError(customer_msg) return previous_value def order_created(self, order: "Order", previous_value: Any) -> Any: if not self.active: return previous_value request_data = get_order_request_data(order, self.config) transaction_url = urljoin( get_api_url(self.config.use_sandbox), "transactions/createoradjust" ) api_post_request_task.delay( transaction_url, request_data, asdict(self.config), order.id ) return previous_value def calculate_checkout_line_total( self, checkout_info: "CheckoutInfo", lines: Iterable["CheckoutLineInfo"], checkout_line_info: "CheckoutLineInfo", address: Optional["Address"], discounts: Iterable["DiscountInfo"], previous_value: TaxedMoney, ) -> TaxedMoney: if self._skip_plugin(previous_value): return previous_value base_total = previous_value if not checkout_line_info.product.charge_taxes: return base_total if not _validate_checkout(checkout_info, lines): return base_total taxes_data = get_checkout_tax_data(checkout_info, lines, discounts, self.config) if not taxes_data or "error" in taxes_data: return base_total currency = taxes_data.get("currencyCode") for line in taxes_data.get("lines", []): if line.get("itemCode") == checkout_line_info.variant.sku: tax = Decimal(line.get("tax", 0.0)) line_net = Decimal(line["lineAmount"]) line_gross = Money(amount=line_net + tax, currency=currency) line_net = Money(amount=line_net, currency=currency) return TaxedMoney(net=line_net, gross=line_gross) return base_total def calculate_checkout_line_unit_price( self, checkout_info: "CheckoutInfo", lines: Iterable["CheckoutLineInfo"], checkout_line_info: "CheckoutLineInfo", address: Optional["Address"], discounts: Iterable["DiscountInfo"], previous_value: TaxedMoney, ): if not checkout_line_info.product.charge_taxes: return previous_value return self._calculate_unit_price( checkout_info, checkout_line_info.line, lines, checkout_line_info.variant, previous_value, discounts, is_order=False, ) def calculate_order_line_unit( self, order: "Order", order_line: "OrderLine", variant: "ProductVariant", product: "Product", previous_value: TaxedMoney, ) -> TaxedMoney: if not variant or (variant and not product.charge_taxes): return previous_value return self._calculate_unit_price( order, order_line, [], variant, previous_value, is_order=True ) def _calculate_unit_price( self, instance: Union["CheckoutInfo", "Order"], line: Union["CheckoutLine", "OrderLine"], lines_info: Iterable["CheckoutLineInfo"], variant: "ProductVariant", base_value: TaxedMoney, discounts: Optional[Iterable[DiscountInfo]] = [], *, is_order: bool, ): taxes_data = self._get_tax_data( instance, base_value, is_order, discounts, lines_info ) if taxes_data is None: return base_value currency = taxes_data.get("currencyCode") for line_data in taxes_data.get("lines", []): if line_data.get("itemCode") == variant.sku: tax = Decimal(line_data.get("tax", 0.0)) / line.quantity net = Decimal(line_data.get("lineAmount", 0.0)) / line.quantity gross = Money(amount=net + tax, currency=currency) net = Money(amount=net, currency=currency) return TaxedMoney(net=net, gross=gross) return base_value def calculate_order_shipping( self, order: "Order", previous_value: TaxedMoney ) -> TaxedMoney: if self._skip_plugin(previous_value): return previous_value if not charge_taxes_on_shipping(): return previous_value if not _validate_order(order): return zero_taxed_money(order.total.currency) taxes_data = get_order_tax_data(order, self.config, False) currency = taxes_data.get("currencyCode") for line in taxes_data.get("lines", []): if line["itemCode"] == "Shipping": tax = Decimal(line.get("tax", 0.0)) net = Decimal(line.get("lineAmount", 0.0)) gross = Money(amount=net + tax, currency=currency) net = Money(amount=net, currency=currency) return TaxedMoney(net=net, gross=gross) return TaxedMoney( # Ignore typing checks because it is checked in _validate_order net=order.shipping_method.price, # type: ignore gross=order.shipping_method.price, # type: ignore ) def get_tax_rate_type_choices(self, previous_value: Any) -> List[TaxType]: if not self.active: return previous_value return [ TaxType(code=tax_code, description=desc) for tax_code, desc in get_cached_tax_codes_or_fetch(self.config).items() ] def get_checkout_line_tax_rate( self, checkout_info: "CheckoutInfo", lines: Iterable["CheckoutLineInfo"], checkout_line_info: "CheckoutLineInfo", address: Optional["Address"], discounts: Iterable[DiscountInfo], previous_value: Decimal, ) -> Decimal: return self._get_unit_tax_rate( checkout_info, previous_value, False, discounts, lines ) def get_order_line_tax_rate( self, order: "Order", product: "Product", address: Optional["Address"], previous_value: Decimal, ) -> Decimal: return self._get_unit_tax_rate(order, previous_value, True) def get_checkout_shipping_tax_rate( self, checkout_info: "CheckoutInfo", lines: Iterable["CheckoutLineInfo"], address: Optional["Address"], discounts: Iterable[DiscountInfo], previous_value: Decimal, ): return self._get_shipping_tax_rate( checkout_info, previous_value, False, discounts, lines, ) def get_order_shipping_tax_rate(self, order: "Order", previous_value: Decimal): return self._get_shipping_tax_rate(order, previous_value, True) def _get_unit_tax_rate( self, instance: Union["Order", "CheckoutInfo"], base_rate: Decimal, is_order: bool, discounts: Optional[Iterable[DiscountInfo]] = None, lines_info: Iterable["CheckoutLineInfo"] = [], ): response = self._get_tax_data( instance, base_rate, is_order, discounts, lines_info ) if response is None: return base_rate rate = None response_summary = response.get("summary") if response_summary: rate = Decimal(response_summary[0].get("rate", 0.0)) return rate or base_rate def _get_shipping_tax_rate( self, instance: Union["Order", "CheckoutInfo"], base_rate: Decimal, is_order: bool, discounts: Optional[Iterable[DiscountInfo]] = None, lines_info: Iterable["CheckoutLineInfo"] = [], ): response = self._get_tax_data( instance, base_rate, is_order, discounts, lines_info ) if response is None: return base_rate lines_data = response.get("lines", []) for line in lines_data: if line["itemCode"] == "Shipping": line_details = line.get("details") if not line_details: return return Decimal(line_details[0].get("rate", 0.0)) return base_rate def _get_tax_data( self, instance: Union["Order", "CheckoutInfo"], base_value: Decimal, is_order: bool, discounts: Optional[Iterable[DiscountInfo]] = None, lines_info: Iterable["CheckoutLineInfo"] = [], ): if self._skip_plugin(base_value): return None valid = ( _validate_order(instance) # type: ignore if is_order else _validate_checkout(instance, lines_info) # type: ignore ) if not valid: return None response = ( get_order_tax_data(instance, self.config, False) # type: ignore if is_order else get_checkout_tax_data(instance, lines_info, discounts, self.config) # type: ignore ) if not response or "error" in response: return None return response def assign_tax_code_to_object_meta( self, obj: Union["Product", "ProductType"], tax_code: Optional[str], previous_value: Any, ): if not self.active: return previous_value if tax_code is None and obj.pk: obj.delete_value_from_metadata(META_CODE_KEY) obj.delete_value_from_metadata(META_DESCRIPTION_KEY) return previous_value codes = get_cached_tax_codes_or_fetch(self.config) if tax_code not in codes: return previous_value tax_description = codes.get(tax_code) tax_item = {META_CODE_KEY: tax_code, META_DESCRIPTION_KEY: tax_description} obj.store_value_in_metadata(items=tax_item) return previous_value def get_tax_code_from_object_meta( self, obj: Union["Product", "ProductType"], previous_value: Any ) -> TaxType: if not self.active: return previous_value # Product has None as it determines if we overwrite taxes for the product default_tax_code = None default_tax_description = None if isinstance(obj, ProductType): default_tax_code = DEFAULT_TAX_CODE default_tax_description = DEFAULT_TAX_DESCRIPTION tax_code = obj.get_value_from_metadata(META_CODE_KEY, default_tax_code) tax_description = obj.get_value_from_metadata( META_DESCRIPTION_KEY, default_tax_description ) return TaxType( code=tax_code, description=tax_description, ) def show_taxes_on_storefront(self, previous_value: bool) -> bool: if not self.active: return previous_value return False def fetch_taxes_data(self, previous_value): if not self.active: return previous_value get_cached_tax_codes_or_fetch(self.config) return True @classmethod def validate_authentication(cls, plugin_configuration: "PluginConfiguration"): conf = { data["name"]: data["value"] for data in plugin_configuration.configuration } url = urljoin(get_api_url(conf["Use sandbox"]), "utilities/ping") with opentracing.global_tracer().start_active_span( "avatax.utilities.ping" ) as scope: span = scope.span span.set_tag(opentracing.tags.COMPONENT, "tax") span.set_tag("service.name", "avatax") response = api_get_request( url, username_or_account=conf["Username or account"], password_or_license=conf["Password or license"], ) if not response.get("authenticated"): raise ValidationError( "Authentication failed. Please check provided data.", code=PluginErrorCode.PLUGIN_MISCONFIGURED.value, ) @classmethod def validate_plugin_configuration(cls, plugin_configuration: "PluginConfiguration"): """Validate if provided configuration is correct.""" missing_fields = [] configuration = plugin_configuration.configuration configuration = {item["name"]: item["value"] for item in configuration} if not configuration["Username or account"]: missing_fields.append("Username or account") if not configuration["Password or license"]:
if plugin_configuration.active: if missing_fields: error_msg = ( "To enable a plugin, you need to provide values for the " "following fields: " ) raise ValidationError( error_msg + ", ".join(missing_fields), code=PluginErrorCode.PLUGIN_MISCONFIGURED.value, ) cls.validate_authentication(plugin_configuration)
missing_fields.append("Password or license")
AImap.py
#!/opt/local/bin/pyuhon #-*- coding: utf-8 -*- import numpy as np import matplotlib.pyplot as plt import datetime from matplotlib.colors import LogNorm from mpl_toolkits.axes_grid1 import make_axes_locatable import matplotlib.cm as cm import sys from mpl_toolkits.basemap import Basemap import os import calendar #assim_out="assim_out_E2O_womc" #assim_out="assim_out_E2O_wmc" #assim_out="assim_out_biased_womc" #assim_out="assim_out_ECMWF_womc_baised_if_fixed1.10" #sys.path.append('../'+assim_out+'/') os.system("ln -sf ../gosh/params.py params.py") import params as pm experiment="E2O_wmc_06" #assim_out=pm.DA_dir()+"/out/"+pm.experiment()+"/assim_out" assim_out=pm.DA_dir()+"/out/"+experiment+"/assim_out" print assim_out #---- def mk_dir(sdir): try: os.makedirs(sdir) except: pass #---- mk_dir(assim_out+"/fig") mk_dir(assim_out+"/fig/AI") #---- #argvs = sys.argv year=2004 month=1 date=1 start_dt=datetime.date(year,month,date) size=60
south= -90 north= 90 west= -180 east= 180 land="#FFFFFF" water="#C0C0C0" londiff=(east-west)*4 latdiff=(north-south)*4 npix=(90-north)*4 spix=(90-south)*4 wpix=(180+west)*4 epix=(180+east)*4 #lastday=int(argvs[1]) lastday=365 #int(argvs[1]) if calendar.isleap(year): lastday=366 else: lastday=365 N=lastday #-- # run calc_stat.py to create the statistical maps ratio=np.fromfile(assim_out+"/stat/annualmeanAI.bin",np.float32).reshape(720,1440) ###ratio=np.zeros((spix-npix)*(epix-wpix)).reshape([spix-npix,epix-wpix]) ###count=np.zeros((spix-npix)*(epix-wpix)).reshape([spix-npix,epix-wpix]) #******************************************************* ###if os.path.exists("../"+assim_out+"/img/AImap/annualmeanAI.bin"): ### ratio=np.fromfile("../"+assim_out+"/img/AImap/annualmeanAI.bin",np.float32).reshape(720,1440) ###else: ### for day in np.arange(0,lastday): ### #for day in np.arange(100,110): ### # analyse date ### target_dt=start_dt+datetime.timedelta(days=day) ### yyyy='%04d' % (target_dt.year) ### mm='%02d' % (target_dt.month) ### dd='%02d' % (target_dt.day) ### print yyyy,mm,dd ### ### # next day name ### next_dt=start_dt+datetime.timedelta(days=day+1) ### nxt_yyyy='%04d' % (next_dt.year) ### nxt_mm='%02d' % (next_dt.month) ### nxt_dd='%02d' % (next_dt.day) ### ### # True Discharge ### fname="../assim_out/rivout/true/rivout"+yyyy+mm+dd+".bin" ### org=np.fromfile(fname,np.float32).reshape([720,1440]) ### ### # open loop ### opn=[] ### for num in np.arange(1,pm.ens_mem()+1): ### numch = "%03d" % num ### fname = "../assim_out/rivout/open/rivout"+yyyy+mm+dd+"_"+numch+".bin" ### opn.append(np.fromfile(fname,np.float32).reshape([720,1440])) ### opn = np.array(opn) ### opn_mean=np.mean(opn,axis=0) ### ### # assimilated ### asm=[] ### for num in np.arange(1,pm.ens_mem()+1): ### numch = "%03d" % num ### fname = "../assim_out/rivout/assim/rivout"+yyyy+mm+dd+"_"+numch+".bin" ### asm.append(np.fromfile(fname,np.float32).reshape([720,1440])) ### asm = np.array(asm) ### asm_mean=np.mean(asm,axis=0) ### ### # assimilation index 計算 ### #ai=1-abs((asm_mean[npix:spix,wpix:epix]-opn_mean[npix:spix,wpix:epix])/(org[npix:spix,wpix:epix]-opn_mean[npix:spix,wpix:epix]+1e-20)-1) ### ai=1.- np.absolute((asm_mean[npix:spix,wpix:epix]-opn_mean[npix:spix,wpix:epix])/((org[npix:spix,wpix:epix]-opn_mean[npix:spix,wpix:epix])+1e-20)-1.) ### # read restart file for making ocean mask ### #fname = "../CaMa_in/restart/true/restart" + nxt_yyyy + nxt_mm + nxt_dd + "T.bin" ### fname = pm.CaMa_dir()+"/map/global_15min/rivout.bin" ### trueforo = np.fromfile(fname,np.float32).reshape([2,720,1440]) ### # ocean [ 0:ocean, 1:not ocean ] ### #ocean = (trueforo[0,npix:spix,wpix:epix]<1e18) * 1 ### ### # river [ 0:not river, 1:river ] ### river = (trueforo[0,npix:spix,wpix:epix]>500.) * 1 ### ### # error < 10% ### error=((np.absolute(org[npix:spix,wpix:epix]-opn_mean[npix:spix,wpix:epix])/(org[npix:spix,wpix:epix]+1e-20))>0.1)*(1) ### error=np.nan_to_num(error) ### #-- ### river = river*error ### ### # ratio ### ratio_n = ai * river #* ocean ### ratio_n = np.ma.fix_invalid(ratio_n,fill_value=0.0) ### ratio = ratio + (ratio_n<0)*0+(ratio_n>1)*1+(ratio_n>=0)*(ratio_n<=1)*ratio_n ### count = count + river #* ocean ### ### ratio = ratio / (count.astype(np.float32)+1.0e-20) ### river = (trueforo[0,npix:spix,wpix:epix]>500.) * 1 ### ratio = ratio * river ### ### ai = ai * river # assim/true plt.close() cmap=cm.viridis_r cmap.set_under("w",alpha=0) resol=1 plt.figure(figsize=(7*resol,3*resol)) m = Basemap(projection='cyl',llcrnrlat=south,urcrnrlat=north,llcrnrlon=west,urcrnrlon=east, lat_ts=0,resolution='c') #m.drawcoastlines( linewidth=0.3, color='k' ) m.fillcontinents(color=land,lake_color=water) m.drawmapboundary(fill_color=water) m.drawparallels(np.arange(south,north+0.1,20), labels = [1,0,0,0], fontsize=10,linewidth=0.1) m.drawmeridians(np.arange(west,east+0.1,40), labels = [0,0,0,1], fontsize=10,linewidth=0.1) fname = pm.CaMa_dir()+"/map/glb_15min/outclm.bin" trueforo = np.fromfile(fname,np.float32).reshape([2,720,1440])[0] river=(trueforo>100.)*1.0 ratio=np.ma.fix_invalid(ratio).data ratio=ratio*river data=ratio[npix:spix,wpix:epix] im = m.imshow(np.flipud(data),vmin=1e-20, vmax=1,interpolation="nearest",cmap=cmap,zorder=100) #im = m.imshow(np.flipud(ai),vmin=1e-20, vmax=1,interpolation="nearest",cmap=cmap,zorder=100) cbar=m.colorbar(im,"right",size="2%") cbar.set_label("annual mean AI") plt.title("annual mean Assimilation Index ")#+yyyy+"-"+mm+"-"+dd) plt.savefig(assim_out+"/fig/AI/AImap.png",dpi=300,bbox_inches="tight", pad_inches=0.05)
jwt.go
package middleware import ( "net/http" "time" "github.com/appleboy/gin-jwt/v2" "github.com/gin-gonic/gin" "github.com/spf13/viper" "zendea/form" "zendea/model" "zendea/service" "zendea/util/log" ) //login type var ( LoginStandard = 1 LoginOAuth = 2 ) type LoginDto struct { Username string `form:"username" json:"username" binding:"required"` Password string `form:"password" json:"password" binding:"required"` Code string `form:"code" json:"code"` } // LoginOAuthDto - oauth login type LoginOAuthDto struct { Code string `form:"code" binding:"required"` State string `form:"state" binding:"required"` } //todo : 用单独的claims model去掉user model func JwtAuth(LoginType int) *jwt.GinJWTMiddleware { jwtMiddle
Response(c *gin.Context, code int, token string, expire time.Time) { c.JSON(http.StatusOK, gin.H{ "code": code, "data": map[string]interface{}{ "token": token, "expire": expire, }, "success": true, "message": "success", }) } func Authenticator(c *gin.Context) (interface{}, error) { var loginDto LoginDto if err := form.Bind(c, &loginDto); err != nil { return "", err } log.Info("loginDto.Username: %s", loginDto.Username) ok, err, u := service.UserService.VerifyAndReturnUserInfo(loginDto.Username, loginDto.Password) // Standard login if ok { return model.UserClaims{ ID: u.ID, Name: u.Username.String, }, nil } return nil, err } func AuthenticatorOAuth(c *gin.Context) (interface{}, error) { provider := c.Param("provider") var oauthDto LoginOAuthDto if err := form.Bind(c, &oauthDto); err != nil { return "", err } account, err := service.LoginSourceService.GetOrCreate(provider, oauthDto.Code, oauthDto.State) if err != nil { return nil, err } u, err := service.UserService.SignInByLoginSource(account) if err == nil { return model.UserClaims{ ID: u.ID, Name: u.Username.String, }, nil } log.Info("oauthDto.Code: %s", oauthDto.Code) log.Info("oauthDto.State: %s", oauthDto.State) return nil, err }
ware, err := jwt.New(&jwt.GinJWTMiddleware{ Realm: "Jwt", // SigningAlgorithm: "RS256", // PubKeyFile: "keys/jwt_private_key.pem", // PrivKeyFile: "keys/jwt_public_key.pem", Key: []byte(viper.GetString("jwt.key")), Timeout: time.Hour * 24, MaxRefresh: time.Hour * 24 * 90, IdentityKey: viper.GetString("jwt.identity_key"), LoginResponse: LoginResponse, PayloadFunc: func(data interface{}) jwt.MapClaims { if v, ok := data.(model.UserClaims); ok { return jwt.MapClaims{ "id": v.ID, "name": v.Name, "uid": v.ID, "uname": v.Name, } } return jwt.MapClaims{} }, IdentityHandler: func(c *gin.Context) interface{} { claims := jwt.ExtractClaims(c) return model.UserClaims{ Name: claims["name"].(string), ID: int64(claims["id"].(float64)), } }, Authenticator: func(c *gin.Context) (interface{}, error) { if LoginType == LoginOAuth { //OAuth return AuthenticatorOAuth(c) } return Authenticator(c) }, Authorizator: func(data interface{}, c *gin.Context) bool { if _, ok := data.(model.UserClaims); ok { return true } return false }, Unauthorized: func(c *gin.Context, code int, message string) { c.JSON(200, gin.H{ "code": 200, "success": false, "message": message, }) }, TokenLookup: "header: Authorization, query: token, cookie: jwt", TokenHeadName: "Bearer", TimeFunc: time.Now, }) if err != nil { log.Error(err.Error()) } return jwtMiddleware } func Login
worker_app.ts
/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import {CommonModule, DOCUMENT, ViewportScroller, ɵNullViewportScroller as NullViewportScroller, ɵPLATFORM_WORKER_APP_ID as PLATFORM_WORKER_APP_ID} from '@angular/common'; import {APP_INITIALIZER, ApplicationModule, ErrorHandler, NgModule, NgZone, PLATFORM_ID, PlatformRef, RendererFactory2, StaticProvider, createPlatformFactory, platformCore, ɵINJECTOR_SCOPE as INJECTOR_SCOPE} from '@angular/core'; import {ɵBROWSER_SANITIZATION_PROVIDERS as BROWSER_SANITIZATION_PROVIDERS} from '@angular/platform-browser'; import {ON_WEB_WORKER} from './web_workers/shared/api'; import {ClientMessageBrokerFactory} from './web_workers/shared/client_message_broker'; import {MessageBus} from './web_workers/shared/message_bus'; import {PostMessageBus, PostMessageBusSink, PostMessageBusSource} from './web_workers/shared/post_message_bus'; import {RenderStore} from './web_workers/shared/render_store'; import {Serializer} from './web_workers/shared/serializer'; import {ServiceMessageBrokerFactory} from './web_workers/shared/service_message_broker'; import {WebWorkerRendererFactory2} from './web_workers/worker/renderer'; import {WorkerDomAdapter} from './web_workers/worker/worker_adapter'; /** * @publicApi * @deprecated platform-webworker is deprecated in Angular and will be removed in version 10 */ export const platformWorkerApp: (extraProviders?: StaticProvider[] | undefined) => PlatformRef = createPlatformFactory( platformCore, 'workerApp', [{provide: PLATFORM_ID, useValue: PLATFORM_WORKER_APP_ID}]); export function errorHandler(): ErrorHandler { return new ErrorHandler(); } // TODO(jteplitz602): remove this and compile with lib.webworker.d.ts (#3492) const _postMessage = { postMessage: (message: any, transferrables: [Transferable]) => { (<any>postMessage)(message, transferrables); } }; export function createMessageBus(zone: NgZone): MessageBus { const sink = new PostMessageBusSink(_postMessage); const source = new PostMessageBusSource(); const bus = new PostMessageBus(sink, source); bus.attachToZone(zone); return bus; } export function setupWebWorker(): void { WorkerDomAdapter.makeCurrent(); }
* * @publicApi * @deprecated platform-webworker is deprecated in Angular and will be removed in version 10 */ @NgModule({ providers: [ BROWSER_SANITIZATION_PROVIDERS, {provide: INJECTOR_SCOPE, useValue: 'root'}, Serializer, {provide: DOCUMENT, useValue: null}, ClientMessageBrokerFactory, ServiceMessageBrokerFactory, WebWorkerRendererFactory2, {provide: RendererFactory2, useExisting: WebWorkerRendererFactory2}, {provide: ON_WEB_WORKER, useValue: true}, RenderStore, {provide: ErrorHandler, useFactory: errorHandler, deps: []}, {provide: MessageBus, useFactory: createMessageBus, deps: [NgZone]}, {provide: APP_INITIALIZER, useValue: setupWebWorker, multi: true}, {provide: ViewportScroller, useClass: NullViewportScroller, deps: []}, ], exports: [ CommonModule, ApplicationModule, ] }) export class WorkerAppModule { }
/** * The ng module for the worker app side.
coupling_orders.py
NLOT = CouplingOrder(name = 'NLOT', # ggS triangle nlo couplings expansion_order = 1, hierarchy = 2) NLOTHL = CouplingOrder(name = 'NLOTHL', # ggS triangle nlo couplings for HL expansion_order = 1, hierarchy = 2) NLOTHH = CouplingOrder(name = 'NLOTHH', # ggS triangle nlo couplings for HH expansion_order = 1, hierarchy = 2) NLOTHA = CouplingOrder(name = 'NLOTHA', # ggS triangle nlo couplings for HA expansion_order = 1, hierarchy = 2)
NLOB = CouplingOrder(name = 'NLOB', # ggSS box nlo couplings expansion_order = 1, hierarchy = 2) NLOZ = CouplingOrder(name = 'NLOZ', # ggZ nlo couplings expansion_order = 1, hierarchy = 2) NLOEW = CouplingOrder(name = 'NLOEW', # gagaS nlo couplings expansion_order = 1, hierarchy = 2) NLOEWHL = CouplingOrder(name = 'NLOEWHL', # gagaS nlo couplings for HL expansion_order = 1, hierarchy = 2) NLOEWHH = CouplingOrder(name = 'NLOEWHH', # gagaS nlo couplings for HH expansion_order = 1, hierarchy = 2) NLOEWHA = CouplingOrder(name = 'NLOEWHA', # gagaS nlo couplings for HA expansion_order = 1, hierarchy = 2)
compute_flavor_groups_info.py
# coding: utf-8 import re import six from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization class ComputeFlavorGroupsInfo: """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ sensitive_list = [] openapi_types = { 'group_type': 'str', 'compute_flavors': 'list[ComputeFlavors]', 'offset': 'int', 'limit': 'int', 'total': 'int' } attribute_map = { 'group_type': 'groupType', 'compute_flavors': 'computeFlavors', 'offset': 'offset', 'limit': 'limit', 'total': 'total' } def __init__(self, group_type=None, compute_flavors=None, offset=None, limit=None, total=None): """ComputeFlavorGroupsInfo - a model defined in huaweicloud sdk""" self._group_type = None self._compute_flavors = None self._offset = None self._limit = None self._total = None self.discriminator = None if group_type is not None: self.group_type = group_type if compute_flavors is not None: self.compute_flavors = compute_flavors if offset is not None: self.offset = offset if limit is not None: self.limit = limit if total is not None: self.total = total @property def group_type(self): """Gets the group_type of this ComputeFlavorGroupsInfo. 计算资源架构类型,目前分X86和ARM两种。 :return: The group_type of this ComputeFlavorGroupsInfo. :rtype: str """ return self._group_type @group_type.setter def group_type(self, group_type): """Sets the group_type of this ComputeFlavorGroupsInfo. 计算资源架构类型,目前分X86和ARM两种。 :param group_type: The group_type of this ComputeFlavorGroupsInfo. :type: str """ self._group_type = group_type @property def compute_flavors(self): """Gets the compute_flavors of this ComputeFlavorGroupsInfo. 计算类型规格详情。 :return: The compute_flavors of this ComputeFlavorGroupsInfo. :rtype: list[ComputeFlavors] """ return self._compute_flavors @compute_flavors.setter def compute_flavors(self, compute_flavors): """Sets the compute_flavors of this ComputeFlavorGroupsInfo. 计算类型规格详情。 :param compute_flavors: The compute_flavors of this ComputeFlavorGroupsInfo. :type: list[ComputeFlavors] """ self._compute_flavors = compute_flavors @property def offset(self): """Gets the offset of this ComputeFlavorGroupsInfo. 分页参数: 起始值。 :return: The offset of this ComputeFlavorGroupsInfo. :rtype: int """ return self._offset @offset.setter def offset(self, offset): """Sets the offset of this ComputeFlavorGroupsInfo. 分页参数: 起始值。 :param offset: The offset of this ComputeFlavorGroupsInfo. :type: int """ self._offset = offset @property def limit(self): """Gets the limit of this ComputeFlavorGroupsInfo. 分页参数:每页多少条。 :return: The limit of this ComputeFlavorGroupsInfo. :rtype: int """ return self._limit @limit.setter def limit(self, limit): """Sets the limit of this ComputeFlavorGroupsInfo. 分页参数:每页多少条。 :param limit: The limit of this ComputeFlavorGroupsInfo. :type: int """ self._limit = limit @property def total(self): """Gets the total of this ComputeFlavorGroupsInfo. 计算类型规格总数。 :return: The total of this ComputeFlavorGroupsInfo. :rtype: int """ return self._total @total.setter def total(self, total): """Sets the total of this ComputeFlavorGroupsInfo. 计算类型规格总数。 :param total: The total of this ComputeFlavorGroupsInfo. :type: int """ self._total = total def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: if attr in self.sensitive_list: result[attr] = "****" else: result[attr] = value return result def to_str(self): """Returns the string representation of the model""" import simplejson as json if six.PY2: import sys reload(sys) sys.setdefaultencoding("utf-8") return json.dumps(sanitize_for_serialization(self), ensure_ascii=False) def __repr__(self): """For `print`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, ComputeFlavorGroupsInfo): return False return self.__dict__ == other.__dict__
def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
Rock Spock Paper Lizard Scissor.py
import random def name_to_number(name): if(name=='rock'): return 0 elif(name=='Spock'): return 1 elif(name=='paper'): return 2 elif(name=='lizard'): return 3 elif(name=='scissors'): return 4 else: return name,"is an invalid name" def number_to_name(number): if(number == 0): return 'rock' elif(number == 1): return 'Spock' elif(number == 2): return 'paper' elif(number == 3): return 'lizard' elif(number == 4): return 'scissors' else: return number,"is an invalid number" def rpsls(player_choice): print "" print "Player chooses",player_choice player_number = name_to_number(player_choice) comp_number = random.randrange(0,5) comp_choice = number_to_name(comp_number) print "Computer chooses",comp_choice difference = (comp_number-player_number)%5 if(difference == 0): print "Player and computer tie!" elif(difference == 1 or difference == 2 ): print "Computer wins!" elif(difference == 3 or difference == 4 ):
else: print "Incorrect input" rpsls("rock") rpsls("Spock") rpsls("paper") rpsls("lizard") rpsls("scissors")
print "Player wins!"
shipper_e2e_test.go
// Copyright (c) The Thanos Authors. // Licensed under the Apache License 2.0. package shipper import ( "bytes" "context" "encoding/json" "io/ioutil" "math/rand" "os" "path" "path/filepath" "strings" "testing" "time" "github.com/thanos-io/thanos/pkg/testutil/e2eutil" "github.com/go-kit/kit/log" "github.com/oklog/ulid" "github.com/prometheus/client_golang/prometheus" promtest "github.com/prometheus/client_golang/prometheus/testutil" "github.com/prometheus/prometheus/pkg/labels" "github.com/prometheus/prometheus/pkg/timestamp" "github.com/prometheus/prometheus/tsdb" "github.com/thanos-io/thanos/pkg/block" "github.com/thanos-io/thanos/pkg/block/metadata" "github.com/thanos-io/thanos/pkg/objstore" "github.com/thanos-io/thanos/pkg/objstore/objtesting" "github.com/thanos-io/thanos/pkg/testutil" ) func
(t *testing.T) { objtesting.ForeachStore(t, func(t *testing.T, bkt objstore.Bucket) { // TODO(GiedriusS): consider switching to BucketWithMetrics() everywhere? metrics := prometheus.NewRegistry() metricsBucket := objstore.BucketWithMetrics("test", bkt, metrics) dir, err := ioutil.TempDir("", "shipper-e2e-test") testutil.Ok(t, err) defer func() { testutil.Ok(t, os.RemoveAll(dir)) }() extLset := labels.FromStrings("prometheus", "prom-1") shipper := New(log.NewLogfmtLogger(os.Stderr), nil, dir, metricsBucket, func() labels.Labels { return extLset }, metadata.TestSource, false, false) ctx, cancel := context.WithCancel(context.Background()) defer cancel() // Create 10 new blocks. 9 of them (non compacted) should be actually uploaded. var ( expBlocks = map[ulid.ULID]struct{}{} expFiles = map[string][]byte{} randr = rand.New(rand.NewSource(0)) now = time.Now() ids = []ulid.ULID{} maxSyncSoFar int64 ) for i := 0; i < 10; i++ { id := ulid.MustNew(uint64(i), randr) bdir := filepath.Join(dir, id.String()) tmp := bdir + ".tmp" testutil.Ok(t, os.Mkdir(tmp, 0777)) meta := metadata.Meta{ BlockMeta: tsdb.BlockMeta{ Version: 1, ULID: id, Stats: tsdb.BlockStats{ NumSamples: 1, }, MinTime: timestamp.FromTime(now.Add(time.Duration(i) * time.Hour)), MaxTime: timestamp.FromTime(now.Add((time.Duration(i) * time.Hour) + 1)), Compaction: tsdb.BlockMetaCompaction{ Level: 1, }, }, Thanos: metadata.Thanos{ Source: metadata.TestSource, }, } // Sixth block is compacted one. if i == 5 { meta.Compaction.Level = 2 } metab, err := json.Marshal(&meta) testutil.Ok(t, err) testutil.Ok(t, ioutil.WriteFile(tmp+"/meta.json", metab, 0666)) testutil.Ok(t, ioutil.WriteFile(tmp+"/index", []byte("indexcontents"), 0666)) // Running shipper while a block is being written to temp dir should not trigger uploads. b, err := shipper.Sync(ctx) testutil.Ok(t, err) testutil.Equals(t, 0, b) shipMeta, err := ReadMetaFile(dir) testutil.Ok(t, err) if len(shipMeta.Uploaded) == 0 { shipMeta.Uploaded = []ulid.ULID{} } testutil.Equals(t, &Meta{Version: MetaVersion1, Uploaded: ids}, shipMeta) testutil.Ok(t, os.MkdirAll(tmp+"/chunks", 0777)) testutil.Ok(t, ioutil.WriteFile(tmp+"/chunks/0001", []byte("chunkcontents1"), 0666)) testutil.Ok(t, ioutil.WriteFile(tmp+"/chunks/0002", []byte("chunkcontents2"), 0666)) testutil.Ok(t, os.Rename(tmp, bdir)) // After rename sync should upload the block. b, err = shipper.Sync(ctx) testutil.Ok(t, err) if i != 5 { ids = append(ids, id) maxSyncSoFar = meta.MaxTime testutil.Equals(t, 1, b) } else { // 5 blocks uploaded so far - 5 existence checks & 25 uploads (5 files each). testutil.Ok(t, promtest.GatherAndCompare(metrics, strings.NewReader(` # HELP thanos_objstore_bucket_operations_total Total number of all attempted operations against a bucket. # TYPE thanos_objstore_bucket_operations_total counter thanos_objstore_bucket_operations_total{bucket="test",operation="attributes"} 0 thanos_objstore_bucket_operations_total{bucket="test",operation="delete"} 0 thanos_objstore_bucket_operations_total{bucket="test",operation="exists"} 5 thanos_objstore_bucket_operations_total{bucket="test",operation="get"} 0 thanos_objstore_bucket_operations_total{bucket="test",operation="get_range"} 0 thanos_objstore_bucket_operations_total{bucket="test",operation="iter"} 0 thanos_objstore_bucket_operations_total{bucket="test",operation="upload"} 25 `), `thanos_objstore_bucket_operations_total`)) testutil.Equals(t, 0, b) } // The external labels must be attached to the meta file on upload. meta.Thanos.Labels = extLset.Map() meta.Thanos.SegmentFiles = []string{"0001", "0002"} meta.Thanos.Files = []metadata.File{ {RelPath: "chunks/0001", SizeBytes: 14}, {RelPath: "chunks/0002", SizeBytes: 14}, {RelPath: "index", SizeBytes: 13}, {RelPath: "meta.json"}, } buf := bytes.Buffer{} testutil.Ok(t, meta.Write(&buf)) // We will delete the fifth block and do not expect it to be re-uploaded later. if i != 4 && i != 5 { expBlocks[id] = struct{}{} expFiles[id.String()+"/meta.json"] = buf.Bytes() expFiles[id.String()+"/index"] = []byte("indexcontents") expFiles[id.String()+"/chunks/0001"] = []byte("chunkcontents1") expFiles[id.String()+"/chunks/0002"] = []byte("chunkcontents2") } if i == 4 { testutil.Ok(t, block.Delete(ctx, log.NewNopLogger(), bkt, ids[4])) } // The shipper meta file should show all blocks as uploaded except the compacted one. shipMeta, err = ReadMetaFile(dir) testutil.Ok(t, err) testutil.Equals(t, &Meta{Version: MetaVersion1, Uploaded: ids}, shipMeta) // Verify timestamps were updated correctly. minTotal, maxSync, err := shipper.Timestamps() testutil.Ok(t, err) testutil.Equals(t, timestamp.FromTime(now), minTotal) testutil.Equals(t, maxSyncSoFar, maxSync) } for id := range expBlocks { ok, _ := bkt.Exists(ctx, path.Join(id.String(), block.MetaFilename)) testutil.Assert(t, ok, "block %s was not uploaded", id) } for fn, exp := range expFiles { rc, err := bkt.Get(ctx, fn) testutil.Ok(t, err) act, err := ioutil.ReadAll(rc) testutil.Ok(t, err) testutil.Ok(t, rc.Close()) testutil.Equals(t, string(exp), string(act)) } // Verify the fifth block is still deleted by the end. ok, err := bkt.Exists(ctx, ids[4].String()+"/meta.json") testutil.Ok(t, err) testutil.Assert(t, ok == false, "fifth block was reuploaded") }) } func TestShipper_SyncBlocksWithMigrating_e2e(t *testing.T) { e2eutil.ForeachPrometheus(t, func(t testing.TB, p *e2eutil.Prometheus) { dir, err := ioutil.TempDir("", "shipper-e2e-test") testutil.Ok(t, err) defer func() { testutil.Ok(t, os.RemoveAll(dir)) }() bkt := objstore.NewInMemBucket() ctx, cancel := context.WithCancel(context.Background()) defer cancel() extLset := labels.FromStrings("prometheus", "prom-1") testutil.Ok(t, p.Start()) upctx, upcancel := context.WithTimeout(ctx, 10*time.Second) defer upcancel() testutil.Ok(t, p.WaitPrometheusUp(upctx)) p.DisableCompaction() testutil.Ok(t, p.Restart()) upctx2, upcancel2 := context.WithTimeout(ctx, 10*time.Second) defer upcancel2() testutil.Ok(t, p.WaitPrometheusUp(upctx2)) shipper := New(log.NewLogfmtLogger(os.Stderr), nil, dir, bkt, func() labels.Labels { return extLset }, metadata.TestSource, true, false) // Create 10 new blocks. 9 of them (non compacted) should be actually uploaded. var ( expBlocks = map[ulid.ULID]struct{}{} expFiles = map[string][]byte{} randr = rand.New(rand.NewSource(0)) now = time.Now() ids = []ulid.ULID{} ) for i := 0; i < 10; i++ { id := ulid.MustNew(uint64(i), randr) bdir := filepath.Join(dir, id.String()) tmp := bdir + ".tmp" testutil.Ok(t, os.Mkdir(tmp, 0777)) meta := metadata.Meta{ BlockMeta: tsdb.BlockMeta{ Version: 1, ULID: id, Stats: tsdb.BlockStats{ NumSamples: 1, }, MinTime: timestamp.FromTime(now.Add(time.Duration(i) * time.Hour)), MaxTime: timestamp.FromTime(now.Add((time.Duration(i) * time.Hour) + 1)), Compaction: tsdb.BlockMetaCompaction{ Level: 1, }, }, Thanos: metadata.Thanos{ Source: metadata.TestSource, }, } // Fifth block is compacted one. if i == 4 { meta.Compaction.Level = 2 } metab, err := json.Marshal(&meta) testutil.Ok(t, err) testutil.Ok(t, ioutil.WriteFile(tmp+"/meta.json", metab, 0666)) testutil.Ok(t, ioutil.WriteFile(tmp+"/index", []byte("indexcontents"), 0666)) // Running shipper while a block is being written to temp dir should not trigger uploads. b, err := shipper.Sync(ctx) testutil.Ok(t, err) testutil.Equals(t, 0, b) shipMeta, err := ReadMetaFile(dir) testutil.Ok(t, err) if len(shipMeta.Uploaded) == 0 { shipMeta.Uploaded = []ulid.ULID{} } testutil.Equals(t, &Meta{Version: MetaVersion1, Uploaded: ids}, shipMeta) testutil.Ok(t, os.MkdirAll(tmp+"/chunks", 0777)) testutil.Ok(t, ioutil.WriteFile(tmp+"/chunks/0001", []byte("chunkcontents1"), 0666)) testutil.Ok(t, ioutil.WriteFile(tmp+"/chunks/0002", []byte("chunkcontents2"), 0666)) testutil.Ok(t, os.Rename(tmp, bdir)) // After rename sync should upload the block. b, err = shipper.Sync(ctx) testutil.Ok(t, err) testutil.Equals(t, 1, b) ids = append(ids, id) // The external labels must be attached to the meta file on upload. meta.Thanos.Labels = extLset.Map() meta.Thanos.SegmentFiles = []string{"0001", "0002"} meta.Thanos.Files = []metadata.File{ {RelPath: "chunks/0001", SizeBytes: 14}, {RelPath: "chunks/0002", SizeBytes: 14}, {RelPath: "index", SizeBytes: 13}, {RelPath: "meta.json"}, } buf := bytes.Buffer{} testutil.Ok(t, meta.Write(&buf)) // We will delete the fifth block and do not expect it to be re-uploaded later. if i != 4 { expBlocks[id] = struct{}{} expFiles[id.String()+"/meta.json"] = buf.Bytes() expFiles[id.String()+"/index"] = []byte("indexcontents") expFiles[id.String()+"/chunks/0001"] = []byte("chunkcontents1") expFiles[id.String()+"/chunks/0002"] = []byte("chunkcontents2") } if i == 4 { testutil.Ok(t, block.Delete(ctx, log.NewNopLogger(), bkt, ids[4])) } // The shipper meta file should show all blocks as uploaded except the compacted one. shipMeta, err = ReadMetaFile(dir) testutil.Ok(t, err) testutil.Equals(t, &Meta{Version: MetaVersion1, Uploaded: ids}, shipMeta) // Verify timestamps were updated correctly. minTotal, maxSync, err := shipper.Timestamps() testutil.Ok(t, err) testutil.Equals(t, timestamp.FromTime(now), minTotal) testutil.Equals(t, meta.MaxTime, maxSync) } for id := range expBlocks { ok, _ := bkt.Exists(ctx, path.Join(id.String(), block.MetaFilename)) testutil.Assert(t, ok, "block %s was not uploaded", id) } for fn, exp := range expFiles { rc, err := bkt.Get(ctx, fn) testutil.Ok(t, err) act, err := ioutil.ReadAll(rc) testutil.Ok(t, err) testutil.Ok(t, rc.Close()) testutil.Equals(t, string(exp), string(act)) } // Verify the fifth block is still deleted by the end. ok, err := bkt.Exists(ctx, ids[4].String()+"/meta.json") testutil.Ok(t, err) testutil.Assert(t, ok == false, "fifth block was reuploaded") }) }
TestShipper_SyncBlocks_e2e
migrate_dbversion14.go
package migrator func (m *Migrator) updateSettingsToDBVersion15() error { legacySettings, err := m.settingsService.Settings() if err != nil { return err } legacySettings.EnableHostManagementFeatures = false
return m.settingsService.UpdateSettings(legacySettings) }
btree.js
"use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = function (d, b) { extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return extendStatics(d, b); }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); var __values = (this && this.__values) || function(o) { var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; if (m) return m.call(o); if (o && typeof o.length === "number") return { next: function () { if (o && i >= o.length) o = void 0; return { value: o && o[i++], done: !o }; } }; throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); }; var __read = (this && this.__read) || function (o, n) { var m = typeof Symbol === "function" && o[Symbol.iterator]; if (!m) return o; var i = m.call(o), r, ar = [], e; try { while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); } catch (error) { e = { error: error }; } finally { try { if (r && !r.done && (m = i["return"])) m.call(i); } finally { if (e) throw e.error; } } return ar; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.BTree = void 0; var btreenode_1 = require("./btreenode"); var UnreachableError = /** @class */ (function (_super) { __extends(UnreachableError, _super); function UnreachableError(msg) { var _this = _super.call(this, msg) || this; _this.name = "UnreachableError"; return _this; } return UnreachableError; }(Error)); var FilteredRootError = /** @class */ (function (_super) { __extends(FilteredRootError, _super); function FilteredRootError(msg) { var _this = _super.call(this, msg) || this; _this.name = "FilteredRootError"; return _this; } return FilteredRootError; }(Error)); // if Symbol is not available in window if (typeof window !== "undefined") { if (typeof Symbol === "undefined") { var win = window; win.Symbol = {}; win.Symbol.iterator = "==iterator=="; } } /** * BTree main class * @class * @public * @example * new BTree(10); * new BTree({ root: 10 }); * new BTree({ value: 10 }); */ var BTree = /** @class */ (function () { /** * Constructor for Binary Tree. * @param {BTreeRootAttrStruct|BTreeValueAttrStruct|T} attr Can be of type object, string, number. In case of object root/value property is expected to be value of root node. * @constructor */ function BTree(attr) { /** * Depth of the binary tree. * @type {number} * @property depth */ this.depth = 1; if (typeof attr == "object" && typeof attr.root !== "undefined") { this.root = new btreenode_1.BTreeNode({ value: attr.root }); } else if (typeof attr == "object" && typeof attr.value !== "undefined") { this.root = new btreenode_1.BTreeNode({ value: attr.value }); } else { this.root = new btreenode_1.BTreeNode({ value: attr }); } this.depth = this.root.getDepth(); } /** * Returns string value of given tree. * @method toString * @member * @public * @example * var tree = new BTree(10); * tree.insert(10); * tree.insert(20); * tree.insert(30); * tree.toString(); // "10102030" */ BTree.prototype.toString = function () { return this.root.toString(); }; /** * Returns JSON Form. * @method toJSON * @member * @public * @returns {BTreeNodeStruct} Returns json form of a given tree. * @example * var tree = new BTree(10); * tree.insert(20); * tree.toJSON(); // {value:10,lNode:{value:20,lNode:null,rNode:null},rNode:null} */ BTree.prototype.toJSON = function () { return this.root.toJSON(); }; /** * Returns array value. * @method toArray * @member * @public * @returns {Array<BTreeNode>} Returns array form of given tree. * @example * var tree = new BTree(10); * tree.insert(20); * tree.toArray(); // => [{value:10,...},{value:20,...}] */ BTree.prototype.toArray = function () { var arr = []; this.each(function (node, index) { arr.push(node); }); return arr; }; /** * Returns array of values of the tree. * @method toFlatArray * @member * @public * @returns {Array<T>} Returns array form of given tree. * @example * var tree = new BTree(10); * tree.insert(20); * tree.toFlatArray(); // => [10,20] */ BTree.prototype.toFlatArray = function () { var arr = []; this.each(function (node, index) { arr.push(node.value); }); return arr; }; /** * Inserts the given value to the tree where first free left child node is found. * @param {any} val any type of value to be added to tree node. * @returns {BTreeNode} Returns newly created BTreeNode. * @method insert * @member * @example * var tree = new BTree(10); * tree.insert(10); * tree.insert(20); * tree.insert(30); * tree.toString(); // "10102030" */ BTree.prototype.insert = function (val) { return this.insertLeftMost(val); }; /** * Inserts the given value to the tree where first free left child node is found. * @param {T} val any type of value to be added to tree node. * @method insertLeftMost * @member * @returns {BTreeNode<T>} Returns newly created BTreeNode. */ BTree.prototype.insertLeftMost = function (val) { var node = this.root; while (node.lNode != null) { node = node.lNode; } node.lNode = new btreenode_1.BTreeNode({ value: val }); this.depth = this.root.getDepth(); return node.lNode; }; /** * Inserts the given value to the tree where first free right child node is found. * @param {T} val any type of value to be added to tree node. * @method insertRightMost * @member * @public * @returns {BTreeNode<T>} Returns newly created BTreeNode. */ BTree.prototype.insertRightMost = function (val) { var node = this.root; while (node.rNode != null) { node = node.rNode; } node.rNode = new btreenode_1.BTreeNode({ value: val }); this.depth = this.root.getDepth(); return node.rNode; }; /** * Deletes given value from tree. * Travarsal = Root -> L -> R. * @param {T} val Value to be removed. * @returns {BTreeNode<T>} Returns removed BTreeNode. * @method delete * @member * @public */ BTree.prototype.delete = function (val) { /** * @private * @param {BTreeNode<T>} currNode Current node. * @returns {BTreeNode<T>} Returns removed BTreeNode. */ var recDel = function (currNode) { if (currNode == null) { return currNode; } var cacheRetLeft = currNode.lNode; var cacheRetRight = currNode.rNode; if (cacheRetLeft == null && cacheRetRight == null) { return null; } if (currNode.lNode && currNode.lNode.value === val) { currNode.lNode = null; return cacheRetLeft; } if (currNode.rNode && currNode.rNode.value === val) { currNode.rNode = null; return cacheRetRight; } var delL = recDel(currNode.lNode); var delR = (!delL) ? recDel(currNode.rNode) : null; return delL || delR; }; var delItem = recDel(this.root); this.depth = this.root.getDepth(); return delItem; }; /** * Inserts given element at given location. If location is already taken then it does not insert any value. * @param {T} val value to insert. * @param {number} index index at which to append new element to. * @method insertAt * @member * @public * @throws UnreachableError * @example * tree.insertAt(20,2); */ BTree.prototype.insertAt = function (val, index) { var e_1, _a; var path = BTree.getPathFromIndex(index); var currNode = this.root; try { for (var _b = __values(path.entries()), _c = _b.next(); !_c.done; _c = _b.next()) { var _d = __read(_c.value, 2), index_1 = _d[0], item = _d[1]; if (item === 'L') { if (currNode.lNode == null && path.length !== index_1 + 1) { throw new UnreachableError('Given index cannot be reached'); } else if (currNode.lNode == null) { currNode.lNode = new btreenode_1.BTreeNode({ value: val }); } else { currNode = currNode.lNode; } } if (item === 'R') { if (currNode.rNode == null && path.length !== index_1 + 1) { throw new UnreachableError('Given index cannot be reached'); } else if (currNode.rNode == null) { currNode.rNode = new btreenode_1.BTreeNode({ value: val }); } else { currNode = currNode.rNode; } } } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { if (_c && !_c.done && (_a = _b.return)) _a.call(_b); } finally { if (e_1) throw e_1.error; } } this.depth = this.root.getDepth(); }; /** * Breadth first search. Executes given callback functions with parameters BTreeNode and path index for each node in BFS fashion. * @param {{(node: BTreeNode<T>, index: number) => any}} callback A callback function for execution of each node. * @method traverseBFS * @member * @public * @returns {void} no value. */ BTree.prototype.traverseBFS = function (callback) { var currCount = 0; var children = []; /** * * @param {BTreeNode<T>} currNode current node in recursion. * @private */ var recInser = function (currNode, currPath) { if (currNode != null) { var currPathL = JSON.parse(JSON.stringify(currPath)); currPathL.push('L'); var currPathR = JSON.parse(JSON.stringify(currPath)); currPathR.push('R'); children.push({ node: currNode.lNode, path: currPathL }); children.push({ node: currNode.rNode, path: currPathR }); callback(currNode, BTree.getIndexFromPath(currPath)); } currCount++; if (children.length) { var item = children.splice(0, 1)[0]; return recInser(item.node, item.path); } else { return; } }; recInser(this.root, ['U']); }; /** * Depth first search, Executes given callback functions with parameters BTreeNode and path index for each node in DFS fashion. * @param {{(node: BTreeNode<T>, index: number) => any}} callback A callback function for execution of each node. * @method traverseDFS * @member * @public * @returns {void} no value. */ BTree.prototype.traverseDFS = function (callback) { /** * * @param {BTreeNode<T>} currNode Currently processing node. * @param {Array<'U'|'L'|'R'>} path current path * @private */ var recFnc = function (currNode, path) { if (currNode !== null) { callback(currNode, BTree.getIndexFromPath(path)); if (currNode.lNode !== null) { var lPath = JSON.parse(JSON.stringify(path)); lPath.push('L'); recFnc(currNode.lNode, lPath); } if (currNode.rNode !== null) { var rPath = JSON.parse(JSON.stringify(path)); rPath.push('R'); recFnc(currNode.rNode, rPath); } } }; recFnc(this.root, ['U']); }; /** * Breadth first search. Executes given callback functions with parameters BTreeNode and path index for each node in BFS fashion. * @param {{(node: BTreeNode<T>, index: number) => any}} callback A callback function for execution of each node. * @method each * @member * @public * @returns {void} no value. */ BTree.prototype.each = function (callback) { return this.traverseBFS(callback); }; /** * Breadth first search. Executes given callback functions with parameters BTreeNode and path index for each node in BFS fashion. * @param {{(node: BTreeNode<T>, index: number) => any}} callback A callback function for execution of each node. * @method forEach * @member * @public * @returns {void} no value. */ BTree.prototype.forEach = function (callback) { return this.traverseBFS(callback); }; /** * Returns an iterable of key, value pairs for every entry in the tree. * @method [Symbol.iterator] * @member * @public * @example * var tree = new BTree(10); * for (const node of tree) { * console.log(node.value); // 10 * } */ BTree.prototype[Symbol.iterator] = function () { var curr = -1; var arr = this.toArray(); return { /** * @returns { {value: BTreeNode<T>, done: boolean} } * @private */ next: function () { curr++; return { value: (arr[curr] === void 0) ? void 0 : arr[curr], done: !!(curr === arr.length) }; } }; }; /** * Returns an iterable of key, value pairs for every entry in the tree. * @method entries * @member * @public * @returns {IterableIterator<[number, BTreeNode<T>]>} Iterable for iterations. * @example * var tree = new BTree(10); * for (const [index, node] of tree.entries()) { * console.log(index, node.value); // 0, 10 * } */ BTree.prototype.entries = function () { return this.toArray().entries(); }; /** * Maps current tree values to a new tree with modifying the values using given callback function. * Uses BFS. * @param {{(value: T) => T}} callback callback function for value modifier. * @method map * @member * @public * @returns {BTree<T>} A new BTree * @example * var tree = BTree.fromArray([10, 20, 30, 40]); * var tree2 = tree.map(n => n * 2); * var arr2 = tree2.toArray(); // [{value:20,...},{value:40,...},{value:60,...},{value:80,...}] */ BTree.prototype.map = function (callback) { var newTree = new BTree(callback(this.root.value)); this.each(function (node, index) { if (index !== 1) { var retVal = callback(node.value); newTree.insertAt(retVal, index); } }); return newTree; }; /** * Filters each item based on given filter function * @param {{(value: T) => boolean}} filterFnc callback function for filtering purpose. * @method filter * @member * @public * @throws FilteredRootError, Error when root node gets filtered out. * @returns {BTree<T>} New filtered instance of tree. * @example * var tree = BTree.fromArray([10, 20, 30, 40]); * var tree2 = tree.filter(n => !!(n % 4 === 0 || n === 10)); * var arr2 = tree2.toArray(); // [{value:10,...},{value:20,...},{value:40,...}] */ BTree.prototype.filter = function (filterFnc) { if (!filterFnc(this.root.value)) { throw new FilteredRootError("Root node cannot be filtered. If you want to filter out root node, you can use emptry BTree instance."); } var newTree = new BTree(this.root.value); this.each(function (node, index) { if (index !== 1) { var canBeInserted = filterFnc(node.value); if (canBeInserted) { newTree.insertAt(node.value, index); } } }); return newTree; }; /** * Reduces each node values using reduceFunction and returns final value. * @param {(next: T2, value: T, index: number, tree: BTree<T>) => T2} reduceFunction callback function for reducing each node value to a final value. * @param {T2} initialValue Optional, Accumulator/Initial value. * @method reduce<T2> * @member * @public * @returns {T2} Returns reduceed value * @example * var tree = BTree.fromArray([10, 20, 30, 40]); * var sum = tree.reduce((acc, node) => acc + node); // => 100 */ BTree.prototype.reduce = function (reduceFunction, initialValue) { var _this = this; if (initialValue === void 0) { initialValue = 0; } var next = initialValue; this.each(function (node, index) { next = reduceFunction(next, node.value, index, _this); }); return next; }; /** * Reverses the current Binary Tree, Left Node becomes Right node and vise versa. * Does not return new instance, returns current tree instance. * @method reverse * @member * @public * @returns {BTree<T>} Returns current tree instance. * @example * var tree = BTree.fromArray([10, 20, 30, 40, 50, 60, 70, 80]); * tree.reverse().toArray(); // => [10, 30, 20, 70, 60, 50, 40, 80] */ BTree.prototype.reverse = function () { var trav = function (currNode) { if (currNode === null) { return; } var temp = currNode.lNode; currNode.lNode = currNode.rNode; currNode.rNode = temp; trav(currNode.lNode); trav(currNode.rNode); }; trav(this.root); return this; }; /** * Returns first index of a value matched, if it is not present, it returns -1. * @param {T} value Any value to find. * @method indexOf * @member * @public * @returns {number} Returns index of given item. * @example * var tree = BTree.fromArray([10, 20, 30, 40, 50, 60, 70, 80]); * tree.indexOf(30); // => 3 * tree.indexOf(51); // => -1 */ BTree.prototype.indexOf = function (value) { var retIndex = -1; this.each(function (node, index) { if (node.value === value && retIndex === -1) { retIndex = index; } }); return retIndex; }; /** * Checks if given item exists or not, returns boolean. * @param {T} value Any value to check if it exists or not. * @method includes * @member * @public * @returns {boolean} Returns true if it is present, otherwise false. * @example * var tree = BTree.fromArray([10, 20, 30, 40, 50, 60, 70, 80]); * tree.includes(30); // true * tree.includes(51); // false */ BTree.prototype.includes = function (value) { return this.indexOf(value) !== -1; }; /** * Checks if given item exists or not, returns boolean. * @param {T} value Any value to check if it exists or not. * @method exists * @member * @public * @returns {boolean} Returns true if it is present, otherwise false. * @example * var tree = BTree.fromArray([10, 20, 30, 40, 50, 60, 70, 80]); * tree.exists(30); // true * tree.exists(51); // false */ BTree.prototype.exists = function (value) { return this.indexOf(value) !== -1; }; /** * Checks if given item exists or not, returns boolean. * @param {T} value Any value to check if it exists or not. * @method has * @member * @public * @returns {boolean} Returns true if it is present, otherwise false. * @example * var tree = BTree.fromArray([10, 20, 30, 40, 50, 60, 70, 80]); * tree.has(30); // true * tree.has(51); // false */ BTree.prototype.has = function (value) { return this.indexOf(value) !== -1; }; /** * Sorts the tree based on compare function, Has option to sort only at children level. * @param {Function} compareFnc Function used to determine the order of the elements. It is expected to return * a negative value if first argument is less than second argument, zero if they're equal and a positive * value otherwise. If omitted, the elements are sorted in ascending, ASCII character order. * ```ts * (a, b) => a - b) * ``` * @param {boolean} atOnlyFirstChildLevel Optiona, Flag to specify if first child of each node should sorted. Default is `false`. * @method sort * @member * @public * @returns {void} Returns undefined. * @example * var tree = BTree.fromArray([10, 200, 100, 50, 60, 90, 5, 3]); * tree.sort().toFlatArray(); // => [3,5,10,50,60,90,100,200] */ BTree.prototype.sort = function (compareFnc, atOnlyFirstChildLevel) { if (compareFnc === void 0) { compareFnc = function (a, b) { if (a === void 0) { a = 0; } if (b === void 0) { b = 0; } return (a < b) ? -1 : (a == b) ? 0 : 1; }; } if (atOnlyFirstChildLevel === void 0) { atOnlyFirstChildLevel = false; } if (atOnlyFirstChildLevel) { var DFS_1 = function (node) { if (node !== null) { var out = compareFnc(node.lNode, node.rNode); if (out > 0) { var temp = node.lNode; node.lNode = node.rNode; node.rNode = temp; } DFS_1(node.lNode); DFS_1(node.rNode); } }; DFS_1(this.root); } else { var arr_1 = []; var arrBFS_1 = []; var counter = 0; var children_1 = []; var BFS_1 = function (node) { if (node !== null && node.lNode !== null) { children_1.push(node.lNode); } if (node !== null && node.rNode !== null) { children_1.push(node.rNode); } if (node !== null) { arrBFS_1.push(node); arr_1.push(node.value); } if (children_1.length !== 0) { var first = children_1[0]; children_1.splice(0, 1); BFS_1(first); } }; BFS_1(this.root); while (arr_1.length !== 0) { var min = arr_1[0]; var minIndex = 0; for (var i = 1; i < arr_1.length; i++) { var out = compareFnc(min, arr_1[i]); if (out > 0) { min = arr_1[i]; minIndex = i; } } arrBFS_1[counter].value = min; arr_1.splice(minIndex, 1); counter++; } } }; /** * Prints entire tree on the console, useful for logging and checking status. * @method print * @member * @public * @returns {void} Returns undefined. * @example * var tree = BTree.fromArray([1, 2, 3]); * tree.print(); * 1 (1) * |- 2 (2) * |- 3 (3) */ BTree.prototype.print = function () { var isit = false; this.traverseDFS(function (node, index) { var len = BTree.getPathFromIndex(index).length; var isFirst = (isit) ? " |-".repeat(len - 1) : ""; console.log(isFirst, node.value, "(" + index + ")"); isit = true; }); }; /** * Returns the first matched tree node. Traverses using BFS. * @param {T} item any value to find inside the tree. * @method find * @member * @public * @returns {BTreeNode<T> | null} Returns the first matched tree node, if not found, returns null. * @example */ BTree.prototype.find = function (item) { var retNode = null; this.each(function (node, index) { if (node.value === item && retNode === null) { retNode = node; } }); return retNode; }; /** * Returns index value from given path. * @param {Array<'U'|'L'|'R'>} path Array for U L or R, which represents the quickest path to get to a node. * @returns {number} Returns index value. * @method getIndexFromPath * @public * @static * @member */ BTree.getIndexFromPath = function (path) {
var score = 1; while (path.length != 0) { if (path[0] === 'U') { path.splice(0, 1); } else if (path[0] === 'L') { score = score * 2; path.splice(0, 1); } else if (path[0] === 'R') { score = score * 2 + 1; path.splice(0, 1); } } return score; }; /** * Returns Path equivalent to the given index. * @param {number} index Index number from which path to be calculated. * @returns {Array<'U'|'L'|'R'>} Path equivalent to the given index. * @method getPathFromIndex * @public * @static */ BTree.getPathFromIndex = function (index) { var path = []; while (index != 1) { if (index % 2 === 0) { path.push('L'); } else { path.push('R'); index = index - 1; } index = index / 2; } path.push('U'); path = path.reverse(); return path; }; /** * Converts given values into a Binary Tree. * @param {T2[]} arr Any array of values. * @returns {BTree<T2>} Newly generated tree. * @method fromArray * @static * @public * @example * var tree = BTree.fromArray([10,20,30,40]); */ BTree.fromArray = function (arr) { var e_2, _a; var newArr = JSON.parse(JSON.stringify(arr)); var tree = new BTree(newArr[0]); try { for (var _b = __values(arr.entries()), _c = _b.next(); !_c.done; _c = _b.next()) { var _d = __read(_c.value, 2), index = _d[0], item = _d[1]; if (index !== 0) { tree.insertAt(item, index + 1); } } } catch (e_2_1) { e_2 = { error: e_2_1 }; } finally { try { if (_c && !_c.done && (_a = _b.return)) _a.call(_b); } finally { if (e_2) throw e_2.error; } } return tree; }; return BTree; }()); exports.BTree = BTree; if (typeof module != "undefined") { module.exports = { BTree: BTree, BTreeNode: btreenode_1.BTreeNode }; } if (typeof window != "undefined") { window.DSinJS = window.DSinJS || {}; window.DSinJS.BTree = BTree; }
path = JSON.parse(JSON.stringify(path));
0005_remove_hospital_sv_name.py
# Generated by Django 2.2.13 on 2020-06-30 06:51 from django.db import migrations class
(migrations.Migration): dependencies = [ ('pipeline', '0004_hospital'), ] operations = [ migrations.RemoveField( model_name='hospital', name='sv_name', ), ]
Migration
config.go
/* Copyright 2019 The KubeOne Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package v1beta1 import ( "encoding/json" "github.com/imdario/mergo" "github.com/pkg/errors" kubeonev1beta1 "github.com/kubermatic/kubeone/pkg/apis/kubeone/v1beta1" "github.com/kubermatic/kubeone/pkg/templates/machinecontroller" corev1 "k8s.io/api/core/v1" ) type controlPlane struct { ClusterName string `json:"cluster_name"` CloudProvider *string `json:"cloud_provider"` PublicAddress []string `json:"public_address"` PrivateAddress []string `json:"private_address"` LeaderIP string `json:"leader_ip"` Hostnames []string `json:"hostnames"` Untaint bool `json:"untaint"` SSHUser string `json:"ssh_user"` SSHPort int `json:"ssh_port"` SSHPrivateKeyFile string `json:"ssh_private_key_file"` SSHAgentSocket string `json:"ssh_agent_socket"` Bastion string `json:"bastion"` BastionPort int `json:"bastion_port"` BastionUser string `json:"bastion_user"` NetworkID string `json:"network_id"` } // Config represents configuration in the terraform output format type Config struct { KubeOneAPI struct { Value struct { Endpoint string `json:"endpoint"` } `json:"value"` } `json:"kubeone_api"` KubeOneHosts struct { Value struct { ControlPlane controlPlane `json:"control_plane"` } `json:"value"` } `json:"kubeone_hosts"` KubeOneWorkers struct { Value map[string]kubeonev1beta1.DynamicWorkerConfig `json:"value"` } `json:"kubeone_workers"` Proxy struct { Value kubeonev1beta1.ProxyConfig `json:"value"` } `json:"proxy"` } type cloudProviderFlags struct { key string value interface{} } // NewConfigFromJSON creates a new config object from json func NewConfigFromJSON(j []byte) (c *Config, err error) { c = &Config{} return c, json.Unmarshal(j, c) } // Apply adds the terraform configuration options to the given // cluster config. func (c *Config) Apply(cluster *kubeonev1beta1.KubeOneCluster) error { if c.KubeOneAPI.Value.Endpoint != "" { cluster.APIEndpoint = kubeonev1beta1.APIEndpoint{ Host: c.KubeOneAPI.Value.Endpoint, } } cp := c.KubeOneHosts.Value.ControlPlane if cp.CloudProvider != nil { if err := kubeonev1beta1.SetCloudProvider(&cluster.CloudProvider, *cp.CloudProvider); err != nil { return errors.Wrap(err, "failed to set cloud provider") } } var err error cluster.Name = cp.ClusterName // build up a list of master nodes hosts := make([]kubeonev1beta1.HostConfig, 0) for i, publicIP := range cp.PublicAddress { privateIP := publicIP if i < len(cp.PrivateAddress) { privateIP = cp.PrivateAddress[i] } hostname := "" if i < len(cp.Hostnames) { hostname = cp.Hostnames[i] } hosts = append(hosts, newHostConfig(i, publicIP, privateIP, hostname, cp)) } if len(hosts) == 0 { // there was no public IPs available for i, privateIP := range cp.PrivateAddress { hostname := "" if i < len(cp.Hostnames) { hostname = cp.Hostnames[i] } hosts = append(hosts, newHostConfig(i, "", privateIP, hostname, cp)) } } if len(hosts) > 0 { cluster.ControlPlane.Hosts = hosts } if err = mergo.Merge(&cluster.Proxy, &c.Proxy.Value); err != nil { return errors.Wrap(err, "failed to merge proxy settings") } if len(cp.NetworkID) > 0 && cluster.CloudProvider.Hetzner != nil { // NetworkID is used only for Hetzner cluster.CloudProvider.Hetzner.NetworkID = cp.NetworkID } // Walk through all configued workersets from terraform and apply their config // by either merging it into an existing workerSet or creating a new one for workersetName, workersetValue := range c.KubeOneWorkers.Value { var existingWorkerSet *kubeonev1beta1.DynamicWorkerConfig // Check do we have a workerset with the same name defined // in the KubeOneCluster object for idx, workerset := range cluster.DynamicWorkers { if workerset.Name == workersetName { existingWorkerSet = &cluster.DynamicWorkers[idx] break } } // If we didn't found a workerset defined in the cluster object, // append a workerset from the terraform output to the cluster object if existingWorkerSet == nil { // no existing workerset found, use what we have from terraform workersetValue.Name = workersetName cluster.DynamicWorkers = append(cluster.DynamicWorkers, workersetValue) continue } // If we found a workerset defined in the cluster object, // merge values from the object and the terraform output switch { case cluster.CloudProvider.AWS != nil: err = c.updateAWSWorkerset(existingWorkerSet, workersetValue.Config.CloudProviderSpec) case cluster.CloudProvider.Azure != nil: err = c.updateAzureWorkerset(existingWorkerSet, workersetValue.Config.CloudProviderSpec) case cluster.CloudProvider.DigitalOcean != nil: err = c.updateDigitalOceanWorkerset(existingWorkerSet, workersetValue.Config.CloudProviderSpec) case cluster.CloudProvider.GCE != nil: err = c.updateGCEWorkerset(existingWorkerSet, workersetValue.Config.CloudProviderSpec) case cluster.CloudProvider.Hetzner != nil: err = c.updateHetznerWorkerset(existingWorkerSet, workersetValue.Config.CloudProviderSpec) case cluster.CloudProvider.Openstack != nil: err = c.updateOpenStackWorkerset(existingWorkerSet, workersetValue.Config.CloudProviderSpec) case cluster.CloudProvider.Packet != nil: err = c.updatePacketWorkerset(existingWorkerSet, workersetValue.Config.CloudProviderSpec) case cluster.CloudProvider.Vsphere != nil: err = c.updateVSphereWorkerset(existingWorkerSet, workersetValue.Config.CloudProviderSpec) default: return errors.Errorf("unknown provider") } if err != nil { return errors.Wrapf(err, "failed to update provider-specific config for workerset %q from terraform config", workersetName) } } return nil } func newHostConfig(id int, publicIP, privateIP, hostname string, cp controlPlane) kubeonev1beta1.HostConfig { var isLeader bool if cp.LeaderIP != "" { isLeader = cp.LeaderIP == publicIP || cp.LeaderIP == privateIP } hostConfig := kubeonev1beta1.HostConfig{ ID: id, PublicAddress: publicIP, PrivateAddress: privateIP, Hostname: hostname, SSHUsername: cp.SSHUser, SSHPort: cp.SSHPort, SSHPrivateKeyFile: cp.SSHPrivateKeyFile, SSHAgentSocket: cp.SSHAgentSocket, Bastion: cp.Bastion, BastionPort: cp.BastionPort, BastionUser: cp.BastionUser, IsLeader: isLeader, } if cp.Untaint { hostConfig.Taints = []corev1.Taint{} } return hostConfig } func (c *Config) updateAWSWorkerset(existingWorkerSet *kubeonev1beta1.DynamicWorkerConfig, cfg json.RawMessage) error { var awsCloudConfig machinecontroller.AWSSpec if err := json.Unmarshal(cfg, &awsCloudConfig); err != nil { return errors.WithStack(err) } flags := []cloudProviderFlags{ {key: "ami", value: awsCloudConfig.AMI}, {key: "assignPublicIP", value: awsCloudConfig.AssignPublicIP}, {key: "availabilityZone", value: awsCloudConfig.AvailabilityZone}, {key: "diskIops", value: awsCloudConfig.DiskIops}, {key: "diskSize", value: awsCloudConfig.DiskSize}, {key: "diskType", value: awsCloudConfig.DiskType}, {key: "ebsVolumeEncrypted", value: awsCloudConfig.EBSVolumeEncrypted}, {key: "instanceProfile", value: awsCloudConfig.InstanceProfile}, {key: "instanceType", value: awsCloudConfig.InstanceType}, {key: "isSpotInstance", value: awsCloudConfig.IsSpotInstance}, {key: "region", value: awsCloudConfig.Region}, {key: "securityGroupIDs", value: awsCloudConfig.SecurityGroupIDs}, {key: "subnetId", value: awsCloudConfig.SubnetID}, {key: "tags", value: awsCloudConfig.Tags}, {key: "vpcId", value: awsCloudConfig.VPCID}, } for _, flag := range flags { if err := setWorkersetFlag(existingWorkerSet, flag.key, flag.value); err != nil { return errors.WithStack(err) } } return nil } func (c *Config) updateAzureWorkerset(existingWorkerSet *kubeonev1beta1.DynamicWorkerConfig, cfg json.RawMessage) error { var azureCloudConfig machinecontroller.AzureSpec if err := json.Unmarshal(cfg, &azureCloudConfig); err != nil { return errors.WithStack(err) } flags := []cloudProviderFlags{ {key: "assignPublicIP", value: azureCloudConfig.AssignPublicIP}, {key: "availabilitySet", value: azureCloudConfig.AvailabilitySet}, {key: "location", value: azureCloudConfig.Location}, {key: "resourceGroup", value: azureCloudConfig.ResourceGroup}, {key: "routeTableName", value: azureCloudConfig.RouteTableName}, {key: "securityGroupName", value: azureCloudConfig.SecurityGroupName}, {key: "zones", value: azureCloudConfig.Zones}, {key: "subnetName", value: azureCloudConfig.SubnetName}, {key: "tags", value: azureCloudConfig.Tags}, {key: "vmSize", value: azureCloudConfig.VMSize}, {key: "vnetName", value: azureCloudConfig.VNetName}, {key: "imageID", value: azureCloudConfig.ImageID}, {key: "osDiskSize", value: azureCloudConfig.OSDiskSize}, {key: "dataDiskSize", value: azureCloudConfig.DataDiskSize}, } for _, flag := range flags { if err := setWorkersetFlag(existingWorkerSet, flag.key, flag.value); err != nil { return errors.WithStack(err) } } return nil } func (c *Config) updateGCEWorkerset(existingWorkerSet *kubeonev1beta1.DynamicWorkerConfig, cfg json.RawMessage) error { var gceCloudConfig machinecontroller.GCESpec if err := json.Unmarshal(cfg, &gceCloudConfig); err != nil
flags := []cloudProviderFlags{ {key: "diskSize", value: gceCloudConfig.DiskSize}, {key: "diskType", value: gceCloudConfig.DiskType}, {key: "machineType", value: gceCloudConfig.MachineType}, {key: "network", value: gceCloudConfig.Network}, {key: "subnetwork", value: gceCloudConfig.Subnetwork}, {key: "zone", value: gceCloudConfig.Zone}, {key: "preemptible", value: gceCloudConfig.Preemptible}, {key: "assignPublicIPAddress", value: gceCloudConfig.AssignPublicIPAddress}, {key: "labels", value: gceCloudConfig.Labels}, {key: "tags", value: gceCloudConfig.Tags}, {key: "multizone", value: gceCloudConfig.MultiZone}, {key: "regional", value: gceCloudConfig.Regional}, {key: "customImage", value: gceCloudConfig.CustomImage}, } for _, flag := range flags { if err := setWorkersetFlag(existingWorkerSet, flag.key, flag.value); err != nil { return errors.WithStack(err) } } return nil } func (c *Config) updateDigitalOceanWorkerset(existingWorkerSet *kubeonev1beta1.DynamicWorkerConfig, cfg json.RawMessage) error { var doCloudConfig machinecontroller.DigitalOceanSpec if err := json.Unmarshal(cfg, &doCloudConfig); err != nil { return errors.WithStack(err) } flags := []cloudProviderFlags{ {key: "region", value: doCloudConfig.Region}, {key: "size", value: doCloudConfig.Size}, {key: "backups", value: doCloudConfig.Backups}, {key: "ipv6", value: doCloudConfig.IPv6}, {key: "private_networking", value: doCloudConfig.PrivateNetworking}, {key: "monitoring", value: doCloudConfig.Monitoring}, {key: "tags", value: doCloudConfig.Tags}, } for _, flag := range flags { if err := setWorkersetFlag(existingWorkerSet, flag.key, flag.value); err != nil { return errors.WithStack(err) } } return nil } func (c *Config) updateHetznerWorkerset(existingWorkerSet *kubeonev1beta1.DynamicWorkerConfig, cfg json.RawMessage) error { var hetznerConfig machinecontroller.HetznerSpec if err := json.Unmarshal(cfg, &hetznerConfig); err != nil { return err } flags := []cloudProviderFlags{ {key: "serverType", value: hetznerConfig.ServerType}, {key: "datacenter", value: hetznerConfig.Datacenter}, {key: "location", value: hetznerConfig.Location}, {key: "networks", value: hetznerConfig.Networks}, {key: "labels", value: hetznerConfig.Labels}, } for _, flag := range flags { if err := setWorkersetFlag(existingWorkerSet, flag.key, flag.value); err != nil { return errors.WithStack(err) } } return nil } func (c *Config) updateOpenStackWorkerset(existingWorkerSet *kubeonev1beta1.DynamicWorkerConfig, cfg json.RawMessage) error { var openstackConfig machinecontroller.OpenStackSpec if err := json.Unmarshal(cfg, &openstackConfig); err != nil { return err } flags := []cloudProviderFlags{ {key: "floatingIPPool", value: openstackConfig.FloatingIPPool}, {key: "image", value: openstackConfig.Image}, {key: "flavor", value: openstackConfig.Flavor}, {key: "securityGroups", value: openstackConfig.SecurityGroups}, {key: "availabilityZone", value: openstackConfig.AvailabilityZone}, {key: "network", value: openstackConfig.Network}, {key: "subnet", value: openstackConfig.Subnet}, {key: "rootDiskSizeGB", value: openstackConfig.RootDiskSizeGB}, {key: "nodeVolumeAttachLimit", value: openstackConfig.NodeVolumeAttachLimit}, {key: "tags", value: openstackConfig.Tags}, {key: "trustDevicePath", value: openstackConfig.TrustDevicePath}, } for _, flag := range flags { if err := setWorkersetFlag(existingWorkerSet, flag.key, flag.value); err != nil { return errors.WithStack(err) } } return nil } func (c *Config) updatePacketWorkerset(existingWorkerSet *kubeonev1beta1.DynamicWorkerConfig, cfg json.RawMessage) error { var packetConfig machinecontroller.PacketSpec if err := json.Unmarshal(cfg, &packetConfig); err != nil { return err } flags := []cloudProviderFlags{ {key: "projectID", value: packetConfig.ProjectID}, {key: "facilities", value: packetConfig.Facilities}, {key: "instanceType", value: packetConfig.InstanceType}, {key: "billingCycle", value: packetConfig.BillingCycle}, {key: "tags", value: packetConfig.Tags}, } for _, flag := range flags { if err := setWorkersetFlag(existingWorkerSet, flag.key, flag.value); err != nil { return errors.WithStack(err) } } return nil } func (c *Config) updateVSphereWorkerset(existingWorkerSet *kubeonev1beta1.DynamicWorkerConfig, cfg json.RawMessage) error { var vsphereConfig machinecontroller.VSphereSpec if err := json.Unmarshal(cfg, &vsphereConfig); err != nil { return err } flags := []cloudProviderFlags{ {key: "allowInsecure", value: vsphereConfig.AllowInsecure}, {key: "cluster", value: vsphereConfig.Cluster}, {key: "cpus", value: vsphereConfig.CPUs}, {key: "datacenter", value: vsphereConfig.Datacenter}, {key: "datastore", value: vsphereConfig.Datastore}, {key: "datastoreCluster", value: vsphereConfig.DatastoreCluster}, {key: "diskSizeGB", value: vsphereConfig.DiskSizeGB}, {key: "folder", value: vsphereConfig.Folder}, {key: "resourcePool", value: vsphereConfig.ResourcePool}, {key: "memoryMB", value: vsphereConfig.MemoryMB}, {key: "templateVMName", value: vsphereConfig.TemplateVMName}, {key: "vmNetName", value: vsphereConfig.VMNetName}, } for _, flag := range flags { if err := setWorkersetFlag(existingWorkerSet, flag.key, flag.value); err != nil { return errors.WithStack(err) } } return nil } func setWorkersetFlag(w *kubeonev1beta1.DynamicWorkerConfig, name string, value interface{}) error { // ignore empty values (i.e. not set in terraform output) switch s := value.(type) { case int: if s == 0 { return nil } case *int: if s == nil { return nil } case *uint: if s == nil { return nil } case string: if s == "" { return nil } case *string: if s == nil { return nil } case []string: if len(s) == 0 { return nil } case map[string]string: if s == nil { return nil } case bool: case *bool: if s == nil { return nil } default: return errors.New("unsupported type") } // update CloudProviderSpec ONLY IF given terraform output is absent in // original CloudProviderSpec jsonSpec := make(map[string]interface{}) if w.Config.CloudProviderSpec != nil { if err := json.Unmarshal(w.Config.CloudProviderSpec, &jsonSpec); err != nil { return errors.Wrap(err, "unable to parse the provided cloud provider") } } if _, exists := jsonSpec[name]; !exists { jsonSpec[name] = value } var err error w.Config.CloudProviderSpec, err = json.Marshal(jsonSpec) if err != nil { return errors.Wrap(err, "unable to update the cloud provider spec") } return nil }
{ return errors.WithStack(err) }
24.a675ef5f.chunk.js
(this["webpackJsonp@coreui/coreui-free-react-admin-template"]=this["webpackJsonp@coreui/coreui-free-react-admin-template"]||[]).push([[24],{1041:function(e,t,n){"use strict";n.r(t);var a=n(2),c=n(9),r=n(1),o=n.n(r),i=n(631),s=n.n(i),l=n(632),j=n.n(l),d=n(163),b=n(629),u=n(627),O=n(641),p=n.n(O),h=n(20),m=n(637),x=n(657),f=n(587),g=n(571),k=n(697),v=n(626),w=n(749),y=n(648),C=n(166),N=n(642),P=n(665),S=n(685),T=n(17),_=function(e){var t=e.modal,n=e.setModal,o=Object(r.useState)([]),i=Object(c.a)(o,2),s=i[0],l=i[1],j=Object(r.useState)({kolom:"",order:"asc"}),d=Object(c.a)(j,2),b=d[0],O=d[1],p=Object(r.useState)(""),h=Object(c.a)(p,2),m=h[0],x=h[1];Object(r.useEffect)((function(){t.modal&&Object(S.a)(l)}),[t]);var f=Object(r.memo)((function(e){var t=e.filter;return"jenjang"!==t.kolom&&x(""),Object(T.jsx)(T.Fragment,{children:"jenjang"===t.kolom&&Object(T.jsxs)(u.y,{children:[Object(T.jsx)(u.L,{children:"Jenjang Pendidikan"}),Object(T.jsxs)(u.Y,{required:!0,value:m,onChange:function(e){return x(e.target.value)},className:m?null:"is-invalid",children:[Object(T.jsx)("option",{value:"",children:"-- Pilih Jenjang --"}),s.map((function(e,t){return Object(T.jsx)("option",{value:e.jenjang,children:e.jenjang},t)}))]}),!m&&Object(T.jsx)("div",{className:"invalid-feedback",children:"Jenjang harus diisi!"})]})})}));return Object(T.jsxs)(u.N,{show:t.modal,onClose:function(){return n(Object(a.a)(Object(a.a)({},t),{},{type:"",modal:!1}))},children:[Object(T.jsx)(u.Q,{closeButton:!0,children:Object(T.jsx)(u.R,{children:"Filter Print"})}),Object(T.jsxs)(u.x,{children:[Object(T.jsxs)(u.O,{children:[Object(T.jsxs)(u.y,{children:[Object(T.jsx)(u.L,{children:"Urutan"}),Object(T.jsxs)(u.Y,{name:"order",value:b.order,onChange:function(e){return O(Object(a.a)(Object(a.a)({},b),{},Object(C.a)({},e.target.name,e.target.value)))},children:[Object(T.jsx)("option",{value:"asc",children:"A - Z (Ascending)"}),Object(T.jsx)("option",{value:"desc",children:"Z - A (Descending)"})]})]}),Object(T.jsxs)(u.y,{children:[Object(T.jsx)(u.L,{children:"Kolom"}),Object(T.jsxs)(u.Y,{name:"kolom",value:b.kolom,onChange:function(e){return function(e){O(Object(a.a)(Object(a.a)({},b),{},Object(C.a)({},e.target.name,e.target.value)))}(e)},children:[Object(T.jsx)("option",{value:"",children:"Semua"}),Object(T.jsx)("option",{value:"nama",children:"Nama"}),Object(T.jsx)("option",{value:"bidang",children:"Bidang"}),Object(T.jsx)("option",{value:"jabatan",children:"Tugas Pokok"}),Object(T.jsx)("option",{value:"jenjang",children:"Jenjang Pendidikan"})]})]}),Object(T.jsx)(f,{filter:b})]}),Object(T.jsxs)(u.P,{children:[Object(T.jsx)(u.f,{color:"primary",onClick:function(e){e.preventDefault();var c="",r={};m?(r={jenjang:m,order:b.order},b.kolom={jenjang:m},c="pttb?jenjang=".concat(b.kolom.jenjang,"&order=").concat(b.order)):(r={kolom:b.kolom,order:b.order},c="pttb?kolom=".concat(b.kolom,"&order=").concat(b.order)),"print"===t.type?Object(P.a)(c):Object(N.a)("pttb",r,"filter_pegawai"),n(Object(a.a)(Object(a.a)({},t),{},{type:"",modal:!1}))},disabled:"jenjang"===b.kolom&&!m,children:"Print"}),Object(T.jsx)(u.f,{color:"secondary",onClick:function(){return n(Object(a.a)(Object(a.a)({},t),{},{type:"",modal:!1}))},children:"Close"})]})]})]})},A=j()(s.a);t.default=function(){var e=Object(h.g)(),t=Object(r.useState)(""),n=Object(c.a)(t,2),i=n[0],s=n[1],l=Object(r.useState)(!1),j=Object(c.a)(l,2),O=j[0],C=j[1],N=Object(r.useContext)(d.a),P=N.pttbState,S=N.pttbDispatch,L=P.data,R=P.loading,B=Object(r.useState)({type:"",modal:!1}),D=Object(c.a)(B,2),F=D[0],M=D[1];Object(r.useEffect)((function(){Object(k.a)(S)}),[S]);var K=L.filter((function(e){return!(!e.nama||!e.jabatan||!e.nama.toLowerCase().includes(i.toLowerCase())&&!e.jabatan.toLowerCase().includes(i.toLowerCase()))})),J=[{name:"No",selector:"no",sortable:!0,width:"75px"},{name:"Nama",selector:"nama",sortable:!0,wrap:!0},{name:"Penetap SK",selector:"penetap_sk",sortable:!0,wrap:!0},{name:"Tugas Pokok",selector:"jabatan",sortable:!0,wrap:!0},{name:"Bidang",selector:"bidang",sortable:!0,wrap:!0},{name:"Action",sortable:!0,cell:function(e){return Object(T.jsx)("div",{"data-tag":"allowRowEvents",children:Object(T.jsxs)(u.g,{children:[Object(T.jsx)(u.f,{color:"info",className:"btn btn-sm",onClick:function(){return Y(e.id_pegawai)},children:"Kelengkapan"}),Object(T.jsx)(u.f,{color:"success",className:"btn btn-sm",onClick:function(){return H(e.id_pegawai)},children:Object(T.jsx)(m.a,{content:x.a,color:"white"})}),Object(T.jsx)(u.f,{color:"danger",className:"btn btn-sm",onClick:function(){return Z(e.id_pegawai)},children:Object(T.jsx)(m.a,{content:f.a,color:"white"})})]})})}}],X={headCells:{style:{fontSize:"1.15em"}}},E=o.a.useMemo((function(){return Object(T.jsxs)(T.Fragment,{children:[Object(T.jsx)(y.a,{onFilter:function(e){return s(e.target.value)},onClear:function(){i&&(C(!O),s(""))},filterText:i}),Object(T.jsxs)(u.f,{type:"button",color:"info",className:"ml-2",onClick:function(){return M(Object(a.a)(Object(a.a)({},F),{},{type:"print",modal:!0}))},children:["PDF ",Object(T.jsx)(m.a,{content:g.a})]}),Object(T.jsxs)(u.f,{type:"button",color:"success",className:"ml-2",onClick:function(){return M(Object(a.a)(Object(a.a)({},F),{},{type:"excel",modal:!0}))},children:["Excel ",Object(T.jsx)(m.a,{content:g.a})]})]})}),[i,O,F]),H=function(t){e.push("/epekerja/admin/pegawai/pttb-edit/".concat(t))},Y=function(t){e.push("/epekerja/admin/pegawai-detail/".concat(t))},Z=function(e){A.fire({icon:"warning",title:"Anda yakin ingin menghapus data ini ?",text:"Jika yakin, klik YA",showConfirmButton:!0,showCancelButton:!0,confirmButtonColor:"#3085d6",cancelButtonColor:"#d33",confirmButtonText:"YA"}).then((function(t){t.isConfirmed&&(!function(e,t){v.a.delete("admin/pegawai/pttb/".concat(e)).then((function(e){Object(k.a)(t)})).catch((function(e){}))}(e,S),A.fire({icon:"success",title:"Terhapus",text:"Data berhasil dihapus"}))}))},z=function(e){var t=e.data;return Object(T.jsx)(T.Fragment,{children:Object(T.jsxs)("div",{style:{padding:"10px 63px"},children:[Object(T.jsxs)(u.X,{className:"mb-1",children:[Object(T.jsx)(u.n,{md:"2",children:Object(T.jsx)("strong",{children:"NIP"})}),Object(T.jsx)(u.n,{children:t.nip})]}),Object(T.jsxs)(u.X,{className:"mb-1",children:[Object(T.jsx)(u.n,{md:"2",children:Object(T.jsx)("strong",{children:"Tgl. Penetapan SK"})}),Object(T.jsx)(u.n,{children:Object(w.a)(new Date(t.tgl_penetapan_sk),"dd/MM/y")})]}),Object(T.jsxs)(u.X,{className:"mb-1",children:[Object(T.jsx)(u.n,{md:"2",children:Object(T.jsx)("strong",{children:"No. SK"})}),Object(T.jsx)(u.n,{children:t.no_sk})]}),Object(T.jsxs)(u.X,{className:"mb-1",children:[Object(T.jsx)(u.n,{md:"2",children:Object(T.jsx)("strong",{children:"Tgl. Mulai Tugas"})}),Object(T.jsx)(u.n,{children:t.tgl_mulai_tugas})]}),Object(T.jsxs)(u.X,{className:"mb-1",children:[Object(T.jsx)(u.n,{md:"2",children:Object(T.jsx)("strong",{children:"Kontrak Ke"})}),Object(T.jsx)(u.n,{children:t.kontrak_ke})]}),Object(T.jsxs)(u.X,{className:"mb-1",children:[Object(T.jsx)(u.n,{md:"2",children:Object(T.jsx)("strong",{children:"Masa Kerja"})}),Object(T.jsx)(u.n,{children:t.masa_kerja})]})]})})};return Object(T.jsxs)(T.Fragment,{children:[Object(T.jsxs)(u.h,{children:[Object(T.jsx)(u.l,{children:Object(T.jsx)("h3",{children:"Data Pegawai Tidak Tetap Bulanan (PTTB)"})}),Object(T.jsxs)(u.i,{children:[Object(T.jsx)(u.f,{color:"primary",className:"btn btn-md",onClick:function(){e.push("/epekerja/admin/pegawai/pttb-tambah")},children:"Tambah Data"}),L.length>0?Object(T.jsx)(p.a,{columns:J,data:K,noHeader:!0,responsive:!0,customStyles:X,pagination:!0,paginationResetDefaultPage:O,subHeader:!0,subHeaderComponent:E,expandableRows:!0,expandOnRowClicked:!0,highlightOnHover:!0,expandableRowsComponent:Object(T.jsx)(z,{})}):R?Object(T.jsx)(T.Fragment,{children:Object(T.jsx)("div",{children:Object(T.jsx)(u.X,{children:Object(T.jsx)(u.n,{className:"text-center",children:Object(T.jsx)("img",{className:"mt-4 ml-3",width:30,src:b.d,alt:"load-animation"})})})})}):Object(T.jsx)(p.a,{columns:J,data:K,noHeader:!0,responsive:!0,customStyles:X})]})]}),Object(T.jsx)(_,{modal:F,setModal:M})]})}},640:function(e,t,n){"use strict";function
(e,t){return t||(t=e.slice(0)),Object.freeze(Object.defineProperties(e,{raw:{value:Object.freeze(t)}}))}n.d(t,"a",(function(){return a}))},644:function(e,t){e.exports=function(e){return e.webpackPolyfill||(e.deprecate=function(){},e.paths=[],e.children||(e.children=[]),Object.defineProperty(e,"loaded",{enumerable:!0,get:function(){return e.l}}),Object.defineProperty(e,"id",{enumerable:!0,get:function(){return e.i}}),e.webpackPolyfill=1),e}},648:function(e,t,n){"use strict";n(1);var a,c,r=n(640),o=n(643),i=o.default.button(a||(a=Object(r.a)(["\n border-top-left-radius: 0;\n border-bottom-left-radius: 0;\n border-top-right-radius: 5px;\n border-bottom-right-radius: 5px;\n height: 37px;\n text-align: center;\n display: flex;\n align-items: center;\n justify-content: center;\n background-color: #3e5973;\n border: none;\n color: white;\n padding: 0 10px;\n transition: 0.3s;\n\n &:hover {\n background-color: #283c4f;\n }\n"]))),s=o.default.input(c||(c=Object(r.a)(["\n height: 37px;\n width: 200px;\n border-radius: 3px;\n border-top-left-radius: 5px;\n border-bottom-left-radius: 5px;\n border-top-right-radius: 0;\n border-bottom-right-radius: 0;\n border: 1px solid #e5e5e5;\n padding: 0 32px 0 16px;\n\n &:hover {\n cursor: pointer;\n }\n"]))),l=n(17);t.a=function(e){var t=e.filterText,n=e.onFilter,a=e.onClear;return Object(l.jsxs)(l.Fragment,{children:[Object(l.jsx)(s,{id:"search",type:"text",placeholder:"Cari pegawai","aria-label":"Search Input",value:t,onChange:n}),Object(l.jsx)(i,{type:"button",onClick:a,children:"Reset"})]})}},657:function(e,t,n){"use strict";n.d(t,"a",(function(){return a}));var a=["512 512","<path fill='var(--ci-primary-color, currentColor)' d='M345.994,42.019,179.531,208.481A646.3,646.3,0,0,0,25.325,456.521a24.845,24.845,0,0,0,6,25.708l.087.087a24.84,24.84,0,0,0,17.611,7.342,25.172,25.172,0,0,0,8.1-1.344,646.283,646.283,0,0,0,248.04-154.207L471.62,167.646A88.831,88.831,0,0,0,345.994,42.019ZM282.531,311.48A614.445,614.445,0,0,1,60.419,453.221,614.435,614.435,0,0,1,202.158,231.108l99.162-99.161,80.372,80.372ZM448.993,145.019l-44.674,44.673L323.947,109.32l44.674-44.674a56.832,56.832,0,1,1,80.372,80.373Z' class='ci-primary'/>"]},665:function(e,t,n){"use strict";t.a=function(e){window.open("".concat(localStorage.baseURL,"print-daftar-pegawai/").concat(e),"_blank")}},685:function(e,t,n){"use strict";n.d(t,"a",(function(){return c}));var a=n(626),c=function(e){a.a.get("admin/pegawai/pendidikan/jenjang").then((function(t){e(t.data.data),console.log(t.data)})).catch((function(e){console.log(e.response.data)}))}},697:function(e,t,n){"use strict";n.d(t,"a",(function(){return r}));var a=n(40),c=n(626),r=function(e){e({type:a.LOADING}),c.a.get("admin/pegawai/pttb").then((function(t){e({type:a.SUCCESS,payload:t.data.data})})).catch((function(t){e({type:a.ERROR,payload:t.response.data.message})}))}}}]); //# sourceMappingURL=24.a675ef5f.chunk.js.map
a
rect.go
package intgeom // A Rect2 represents a span from one point in 2D space to another. // If Min is less than max on any axis, it will return undefined results // for methods. type Rect2 struct { Min, Max Point2 } // MaxDimensions reports that a Rect2 has only two dimensions of definition. func (r Rect2) MaxDimensions() int { return 2 } // A Rect3 represents a span from one point in 3D space to another. // If Min is less than Max on any axis, it will return undefined results // for methods. type Rect3 struct { Min, Max Point3 } // MaxDimensions reports that a Rect3 has only three dimensions of definition. func (r Rect3) MaxDimensions() int { return 3 }
// x < x2 and y < y2, and will swap the inputs if that is not true. // If that enforcement is not desired, construct the struct manually. func NewRect2(x, y, x2, y2 int) Rect2 { if x > x2 { x, x2 = x2, x } if y > y2 { y, y2 = y2, y } return Rect2{ Min: Point2{x, y}, Max: Point2{x2, y2}, } } // NewRect2WH returns an (X,Y):(X+W,Y+H) rectangle. This enforces that // w and h are positive, and will decrease x and y respectively if that is not true. func NewRect2WH(x, y, w, h int) Rect2 { if w < 0 { x += w w *= -1 } if h < 0 { y += h h *= -1 } return Rect2{ Min: Point2{x, y}, Max: Point2{x + w, y + h}, } } // NewBoundingRect2 will produce the minimal rectangle that contains all of // the input points. func NewBoundingRect2(pts ...Point2) Rect2 { return Rect2{ Min: pts[0].LesserOf(pts...), Max: pts[0].GreaterOf(pts...), } } // NewRect3 returns an (X,Y,Z):(X2,Y2,Z2) rectangle. This enforces that // x < x2, y < y2, and z < z2, and will swap the inputs if that is not true. func NewRect3(x, y, z, x2, y2, z2 int) Rect3 { if x > x2 { x, x2 = x2, x } if y > y2 { y, y2 = y2, y } if z > z2 { z, z2 = z2, z } return Rect3{ Min: Point3{x, y, z}, Max: Point3{x2, y2, z2}, } } // NewRect3WH returns an (X,Y,Z):(X+W,Y+H,Z+D) rectangle. This enforces that // w, h, and d and positive, and will decrease x, y, and z respectively if that // is not true. func NewRect3WH(x, y, z, w, h, d int) Rect3 { if w < 0 { x += w w *= -1 } if h < 0 { y += h h *= -1 } if d < 0 { z += d d *= -1 } return Rect3{ Min: Point3{x, y, z}, Max: Point3{x + w, y + h, z + d}, } } // NewBoundingRect3 will produce the minimal rectangle that contains all of // the input points. func NewBoundingRect3(pts ...Point3) Rect3 { return Rect3{ Min: pts[0].LesserOf(pts...), Max: pts[0].GreaterOf(pts...), } } // Area returns W * H. func (r Rect2) Area() int { return r.W() * r.H() } // Span returns the span on this rectangle's ith axis. func (r Rect2) Span(i int) int { return r.Max[i] - r.Min[i] } // W returns the width of this rectangle. func (r Rect2) W() int { return r.Span(0) } // H returns the height of this rectangle. func (r Rect2) H() int { return r.Span(1) } // Space returns W * H * D func (r Rect3) Space() int { return r.W() * r.H() * r.D() } // Span returns the span on this rectangle's ith axis. func (r Rect3) Span(i int) int { return r.Max[i] - r.Min[i] } // W returns the width of this rectangle. func (r Rect3) W() int { return r.Span(0) } // H returns the height of this rectangle. func (r Rect3) H() int { return r.Span(1) } // D returns the depth of this rectangle. func (r Rect3) D() int { return r.Span(2) } // Midpoint returns the midpoint of this rectangle's span over a given dimension. func (r Rect2) Midpoint(i int) int { return (r.Min[i] + r.Max[i]) / 2 } // Midpoint returns the midpoint of this rectangle's span over a given dimension. func (r Rect3) Midpoint(i int) int { return (r.Min[i] + r.Max[i]) / 2 } // Center returns the center of this rectangle. func (r Rect2) Center() Point2 { return Point2{ r.Midpoint(0), r.Midpoint(1), } } // Center returns the center of this rectangle. func (r Rect3) Center() Point3 { return Point3{ r.Midpoint(0), r.Midpoint(1), r.Midpoint(2), } } // Perimeter computes the sum of the edge lengths of a rectangle. func (r Rect2) Perimeter() int { // The number of edges in an n-dimensional rectangle is n * 2^(n-1) // (http://en.wikipedia.org/wiki/Hypercube_graph). Thus the number // of edges of length (ai - bi), where the rectangle is determined // by p = (a1, a2, ..., an) and q = (b1, b2, ..., bn), is 2^(n-1). // // The margin of the rectangle, then, is given by the formula // 2^(n-1) * [(b1 - a1) + (b2 - a2) + ... + (bn - an)]. return 2 * (r.W() + r.H()) } // Margin computes the sum of the edge lengths of a rectangle. func (r Rect3) Margin() int { return 4 * (r.W() + r.H() + r.D()) } // Contains tests whether p is located inside or on the boundary of r. func (r Rect2) Contains(p Point2) bool { return (p.X() >= r.Min.X() && p.X() <= r.Max.X() && p.Y() >= r.Min.Y() && p.Y() <= r.Max.Y()) } // Contains tests whether p is located inside or on the boundary of r. func (r Rect3) Contains(p Point3) bool { return (p.X() >= r.Min.X() && p.X() <= r.Max.X() && p.Y() >= r.Min.Y() && p.Y() <= r.Max.Y() && p.Z() >= r.Min.Z() && p.Z() <= r.Max.Z()) } // ContainsRect tests whether r2 is is located inside r1. func (r Rect2) ContainsRect(r2 Rect2) bool { return (r2.Min.X() >= r.Min.X() && r2.Max.X() <= r.Max.X() && r2.Min.Y() >= r.Min.Y() && r2.Max.Y() <= r.Max.Y()) } // ContainsRect tests whether r2 is is located inside r1. func (r Rect3) ContainsRect(r2 Rect3) bool { return (r2.Min.X() >= r.Min.X() && r2.Max.X() <= r.Max.X() && r2.Min.Y() >= r.Min.Y() && r2.Max.Y() <= r.Max.Y() && r2.Min.Z() >= r.Min.Z() && r2.Max.Z() <= r.Max.Z()) } // GreaterOf returns a rectangle formed of the lowest values on each // dimension for Min, and the highest for Max. func (r Rect2) GreaterOf(r2 Rect2) Rect2 { r.Min = r.Min.LesserOf(r2.Min) r.Max = r.Max.GreaterOf(r2.Max) return r } // GreaterOf returns a rectangle formed of the lowest values on each // dimension for Min, and the highest for Max. func (r Rect3) GreaterOf(r2 Rect3) Rect3 { r.Min = r.Min.LesserOf(r2.Min) r.Max = r.Max.GreaterOf(r2.Max) return r } // Intersects returns whether the two rectangles intersect. func (r Rect3) Intersects(r2 Rect3) bool { // There are four cases of overlap: // // 1. a1------------b1 // a2------------b2 // p--------q // // 2. a1------------b1 // a2------------b2 // p--------q // // 3. a1-----------------b1 // a2-------b2 // p--------q // // 4. a1-------b1 // a2-----------------b2 // p--------q // // Thus there are only two cases of non-overlap: // // 1. a1------b1 // a2------b2 // // 2. a1------b1 // a2------b2 // // Enforced by constructor: a1 <= b1 and a2 <= b2. So we can just // check the endpoints. return !((r2.Max.X() <= r.Min.X() || r.Max.X() <= r2.Min.X()) || (r2.Max.Y() <= r.Min.Y() || r.Max.Y() <= r2.Min.Y()) || (r2.Max.Z() <= r.Min.Z() || r.Max.Z() <= r2.Min.Z())) } // Intersects returns whether the two rectangles intersect. func (r Rect2) Intersects(r2 Rect2) bool { // There are four cases of overlap: // // 1. a1------------b1 // a2------------b2 // p--------q // // 2. a1------------b1 // a2------------b2 // p--------q // // 3. a1-----------------b1 // a2-------b2 // p--------q // // 4. a1-------b1 // a2-----------------b2 // p--------q // // Thus there are only two cases of non-overlap: // // 1. a1------b1 // a2------b2 // // 2. a1------b1 // a2------b2 // // Enforced by constructor: a1 <= b1 and a2 <= b2. So we can just // check the endpoints. return !((r2.Max.X() <= r.Min.X() || r.Max.X() <= r2.Min.X()) || (r2.Max.Y() <= r.Min.Y() || r.Max.Y() <= r2.Min.Y())) }
// NewRect2 returns an (X,Y):(X2,Y2) rectangle. This enforces that
test.rs
mod integration { use std::path::Path; use core_compat::manager::map_manager::MapManager; use core_compat::manager::data_manager::DataManager; use core_compat::manager::sprite_manager::SpriteManager; use core_compat::entity::rmd_type::RmdType;
use core_compat::entity::sprite_type::SpriteType; use core_compat::entity::entry::Entry; #[test] fn map_load() { // create the managers let map_path = Path::new("../data/DATAs/Map/"); let data_path = Path::new("../data/DATAs/"); let sprite_path = Path::new("../data/RLEs/"); let mut map_manager = MapManager::new(&map_path); let mut data_manager = DataManager::new(&data_path); let mut sprite_manager = SpriteManager::new(&sprite_path); // load the map map_manager.load_map(1).unwrap(); let map = map_manager.get_map(1).unwrap(); assert_eq!(map.number(), 1); assert_eq!((map.size_x() * map.size_y()) as usize, map.tile_count()); // load the tile data let tile = map.get_tile(0).unwrap(); let object_num = tile.obj_rmd_entry.file(); // points to a `/DATAs/Obj/*.rmd let object_idx = tile.obj_rmd_entry.index(); // and it's index into this file data_manager.get_data(RmdType::Object, object_num as usize).unwrap(); // load the tile rle } #[test] fn load_main_screen() { // create the managers let sprite_path = Path::new("../data/RLEs/"); let mut sprite_manager = SpriteManager::new(&sprite_path); // load the correct sprite let entry = Entry::new(0, 4); let interface_t = SpriteType::Interface; let sprite = sprite_manager.get_sprite(entry, interface_t).unwrap(); assert!(sprite.rle_entry == entry); } }
ListTeamMembersCommand.ts
import { getSerdePlugin } from "@aws-sdk/middleware-serde"; import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; import { Command as $Command } from "@aws-sdk/smithy-client"; import { FinalizeHandlerArguments, Handler, HandlerExecutionContext, HttpHandlerOptions as __HttpHandlerOptions, MetadataBearer as __MetadataBearer, MiddlewareStack, SerdeContext as __SerdeContext, } from "@aws-sdk/types"; import { CodeStarClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../CodeStarClient"; import { ListTeamMembersRequest, ListTeamMembersResult } from "../models/models_0"; import { deserializeAws_json1_1ListTeamMembersCommand, serializeAws_json1_1ListTeamMembersCommand, } from "../protocols/Aws_json1_1"; export interface ListTeamMembersCommandInput extends ListTeamMembersRequest {} export interface ListTeamMembersCommandOutput extends ListTeamMembersResult, __MetadataBearer {} /** * <p>Lists all team members associated with a project.</p> * @example * Use a bare-bones client and the command you need to make an API call. * ```javascript * import { CodeStarClient, ListTeamMembersCommand } from "@aws-sdk/client-codestar"; // ES Modules import * // const { CodeStarClient, ListTeamMembersCommand } = require("@aws-sdk/client-codestar"); // CommonJS import * const client = new CodeStarClient(config); * const command = new ListTeamMembersCommand(input); * const response = await client.send(command); * ``` * * @see {@link ListTeamMembersCommandInput} for command's `input` shape. * @see {@link ListTeamMembersCommandOutput} for command's `response` shape. * @see {@link CodeStarClientResolvedConfig | config} for command's `input` shape. * */ export class ListTeamMembersCommand extends $Command< ListTeamMembersCommandInput, ListTeamMembersCommandOutput, CodeStarClientResolvedConfig > { // Start section: command_properties // End section: command_properties constructor(readonly input: ListTeamMembersCommandInput) { // Start section: command_constructor super(); // End section: command_constructor } /** * @internal */ resolveMiddleware( clientStack: MiddlewareStack<ServiceInputTypes, ServiceOutputTypes>, configuration: CodeStarClientResolvedConfig, options?: __HttpHandlerOptions ): Handler<ListTeamMembersCommandInput, ListTeamMembersCommandOutput> { this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); const stack = clientStack.concat(this.middlewareStack); const { logger } = configuration; const clientName = "CodeStarClient"; const commandName = "ListTeamMembersCommand"; const handlerExecutionContext: HandlerExecutionContext = { logger, clientName, commandName, inputFilterSensitiveLog: ListTeamMembersRequest.filterSensitiveLog, outputFilterSensitiveLog: ListTeamMembersResult.filterSensitiveLog, }; const { requestHandler } = configuration; return stack.resolve(
); } private serialize(input: ListTeamMembersCommandInput, context: __SerdeContext): Promise<__HttpRequest> { return serializeAws_json1_1ListTeamMembersCommand(input, context); } private deserialize(output: __HttpResponse, context: __SerdeContext): Promise<ListTeamMembersCommandOutput> { return deserializeAws_json1_1ListTeamMembersCommand(output, context); } // Start section: command_body_extra // End section: command_body_extra }
(request: FinalizeHandlerArguments<any>) => requestHandler.handle(request.request as __HttpRequest, options || {}), handlerExecutionContext
0002_entry_follow_media_profile.py
# Generated by Django 3.2.9 on 2021-11-25 03:34 from django.conf import settings from django.db import migrations, models import django.db.models.deletion
class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('copytter', '0001_initial'), ] operations = [ migrations.CreateModel( name='Profile', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('gender', models.CharField(blank=True, max_length=20)), ('birth_date', models.DateField(blank=True, null=True)), ('location', models.CharField(blank=True, max_length=30)), ('age', models.IntegerField(blank=True, default=0)), ('icon_pass', models.ImageField(blank=True, upload_to='images/')), ('profile_message', models.TextField(blank=True, max_length=300)), ('status', models.CharField(choices=[('machine', 'BOT'), ('public', '一般'), ('block', '凍結'), ('close', '非公開'), ('official', '公式')], default='publish', max_length=10)), ('profile_user_id', models.CharField(default='riqURB89q4', max_length=32, unique=True)), ('sensitive_entry', models.BooleanField(default=False)), ('follow_count', models.IntegerField(default=0)), ('follower_count', models.IntegerField(default=0)), ('profile_first_registed', models.BooleanField(default=False)), ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='Media', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('media_type', models.CharField(max_length=8)), ('media_url', models.URLField()), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='Follow', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('follow_user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='follow_user', to=settings.AUTH_USER_MODEL)), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user', to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='Entry', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('body', models.TextField(max_length=300)), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('status', models.CharField(choices=[('draft', '下書き'), ('public', '公開中'), ('close', '非公開'), ('machine', 'BOT')], default='draft', max_length=8)), ('relation_id', models.CharField(max_length=8)), ('relation_cont', models.IntegerField(default=0)), ('like_count', models.IntegerField(default=0)), ('media_close', models.BooleanField(default=False)), ('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), ]
trackerd.go
/** * @file trackerd.go * @brief A simple implementation of type - D tracker. * @author Hanlin Shi * @version 0.1.0 */ package main import ( "github.com/gorilla/mux" "net/http" "net/url" ) /** * Global variables. */ var idd_max = 0 /** * @brief Handling function for listener of type - A tracker. * * This function will parest the request, then record the info transtered. * * @param w HTTP response. * @param r HTTP request. * @return Void. */
id := m["id"][0] referer := m["referer"][0] RecordRefer("tkd", id, referer) } /** * @brief Handling function for type - A tracker. * * This tracker will serve a script to the first - party page in which it will * set an id in cookie of that first - party page. * * @param w HTTP response. * @param r HTTP request. * @return Void. */ func TrackerDScriptHandler(w http.ResponseWriter, r *http.Request) { script := mux.Vars(r)["script"] // http.ServeFile(w, r, "../trackerd/"+script+".js") ServeFileWrapper(w, r, "../trackerd/"+script+".js") }
func ListenerDHandler(w http.ResponseWriter, r *http.Request) { u := r.URL m, _ := url.ParseQuery(u.RawQuery)
go_test.go
// Copyright (c) 2014-2015, b3log.org // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package util import ( "runtime" "strconv" "strings" "testing" ) func TestGetCrossPlatforms(t *testing.T) { crossPlatforms := Go.GetCrossPlatforms() if len(crossPlatforms) < 1 { t.Error("should have one platform at least") } } func TestGetAPIPath(t *testing.T) { apiPath := Go.GetAPIPath() v := runtime.Version()[2:] v = v[:3] verNum, err := strconv.ParseFloat(v, 64) if nil != err { t.Error(err) return } if verNum >= 1.4 { if !strings.HasSuffix(apiPath, "src") { t.Error("api path should end with \"src\"") return } } else { if !strings.HasSuffix(apiPath, "pkg") { t.Error("api path should end with \"pkg\"") } } } func TestIsAPI(t *testing.T) { apiPath := Go.GetAPIPath() if !Go.IsAPI(apiPath) { t.Error("api path root should belong to api path") return } root := "/root" if Go.IsAPI(root) { t.Error("root should not belong to api path") return } } func TestGetGoFormats(t *testing.T) { formats := Go.GetGoFormats() if len(formats) < 1 {
} func TestGetExecutableInGOBIN(t *testing.T) { bin := Go.GetExecutableInGOBIN("test") if OS.IsWindows() { if !strings.HasSuffix(bin, ".exe") { t.Error("Executable binary should end with .exe") return } } }
t.Error("should have one go format tool [gofmt] at least") }
ip_test.go
package ip import ( "bytes" "encoding/binary" psErr "github.com/42milez/ProtocolStack/src/error" psLog "github.com/42milez/ProtocolStack/src/log" "github.com/42milez/ProtocolStack/src/mw" "github.com/42milez/ProtocolStack/src/net/arp" "github.com/42milez/ProtocolStack/src/net/eth" "github.com/42milez/ProtocolStack/src/repo" "github.com/42milez/ProtocolStack/src/worker" "github.com/golang/mock/gomock" "sync" "testing" ) func TestPacketID_Next(t *testing.T) { want := uint16(0) got := id.Next() if got != want { t.Errorf("PacketID.Next() = %d; want %d", got, want) } } func TestReceive_1(t *testing.T) { _, teardown := setupIpTest(t) defer teardown() dev := createTapDevice() iface := createIface() _ = repo.IfaceRepo.Register(iface, dev) // ICMP packet := createIpPacket() want := psErr.OK got := Receive(packet, dev) if got != want { t.Errorf("Receive() = %s; want %s", got, want) } // TCP packet[9] = uint8(mw.PnTCP) packet[10] = 0x00 packet[11] = 0x00 csum := mw.Checksum(packet[:HdrLenMin], 0) packet[10] = uint8((csum & 0xff00) >> 8) packet[11] = uint8(csum & 0x00ff) want = psErr.OK got = Receive(packet, dev) if got != want { t.Errorf("Receive() = %s; want %s", got, want) } // UDP packet[9] = uint8(mw.PnUDP) packet[10] = 0x00 packet[11] = 0x00 csum = mw.Checksum(packet[:HdrLenMin], 0) packet[10] = uint8((csum & 0xff00) >> 8) packet[11] = uint8(csum & 0x00ff) want = psErr.Error got = Receive(packet, dev) if got != want { t.Errorf("Receive() = %s; want %s", got, want) } } func TestReceive_2(t *testing.T) { _, teardown := setupIpTest(t) defer teardown() // invalid packet length (packet length is less than 20bytes) dev := createTapDevice() want := psErr.InvalidPacketLength got := Receive([]byte{}, dev) if got != want { t.Errorf("Receive() = %s; want %s", got, want) } // invalid protocol version packet := createIpPacket() packet[0] = packet[0] & 0x0f // invalid version want = psErr.InvalidProtocolVersion got = Receive(packet, dev) if got != want { t.Errorf("Receive() = %s; want %s", got, want) } // invalid packet length (IHL is greater than packet length) packet = createIpPacket() packet[0] = packet[0] | 0x0f // invalid IHL want = psErr.InvalidPacketLength got = Receive(packet, dev) if got != want { t.Errorf("Receive() = %s; want %s", got, want) } // invalid packet length (Total Length doesn't match actual packet length) packet = createIpPacket() packet[2] = packet[2] | 0xff // invalid Total Length packet[3] = packet[3] | 0xff want = psErr.InvalidPacketLength got = Receive(packet, dev) if got != want { t.Errorf("Receive() = %s; want %s", got, want) } // ttl expired packet = createIpPacket() packet[8] = 0 want = psErr.TtlExpired got = Receive(packet, dev) if got != want { t.Errorf("Receive() = %s; want %s", got, want) } // checksum mismatch packet = createIpPacket() packet[12] = 0x12 packet[13] = 0x34 want = psErr.ChecksumMismatch got = Receive(packet, dev) if got != want { t.Errorf("Receive() = %s; want %s", got, want) } // iface not found packet = createIpPacket() want = psErr.InterfaceNotFound got = Receive(packet, dev) if got != want { t.Errorf("Receive() = %s; want %s", got, want) } // unsupported protocol iface := createIface() _ = repo.IfaceRepo.Register(iface, dev) packet = createIpPacket() packet[9] = 0x00 packet[10] = 0x00 packet[11] = 0x00 csum := mw.Checksum(packet[:HdrLenMin], 0) packet[10] = uint8((csum & 0xff00) >> 8) packet[11] = uint8(csum & 0x00ff) want = psErr.UnsupportedProtocol got = Receive(packet, dev) if got != want { t.Errorf("Receive() = %s; want %s", got, want) } } func TestSend(t *testing.T) { ctrl, teardown := setupIpTest(t) defer teardown() devMock := mw.NewMockIDevice(ctrl) devMock.EXPECT().IsUp().Return(true) devMock.EXPECT().Name().Return("net0") devMock.EXPECT().Flag().Return(mw.BroadcastFlag | mw.NeedArpFlag) devMock.EXPECT().MTU().Return(uint16(mw.EthPayloadLenMax)).AnyTimes() devMock.EXPECT().Priv().Return(mw.Privilege{FD: 3, Name: "tap0"}) devMock.EXPECT().Transmit(any, any, any).Return(psErr.OK) iface := createIface() _ = repo.IfaceRepo.Register(iface, devMock) arpMock := arp.NewMockIResolver(ctrl) arpMock.EXPECT().Resolve(any, any).Return(mw.EthAddr{11, 12, 13, 14, 15, 16}, arp.Complete) arp.Resolver = arpMock payload := []byte{1, 2, 3, 4, 5, 6, 7, 8, 9, 10} src := mw.IP{192, 168, 0, 1} dst := mw.IP{192, 168, 0, 2} want := psErr.OK got := Send(mw.PnICMP, payload, src, dst) if got != want { t.Errorf("Send() = %s; want %s", got, want) } } func TestStart(t *testing.T) { _, teardown := setupIpTest(t) defer teardown() var wg sync.WaitGroup _ = Start(&wg) rcvMonMsg := <-rcvMonCh sndMonMsg := <-sndMonCh if rcvMonMsg.Current != worker.Running || sndMonMsg.Current != worker.Running { t.Errorf("Start() failed") } } func TestStop(t *testing.T) { _, teardown := setupIpTest(t) defer teardown() var wg sync.WaitGroup _ = Start(&wg) <-rcvMonCh <-sndMonCh Stop() rcvMonMsg := <-rcvMonCh sndMonMsg := <-sndMonCh if rcvMonMsg.Current != worker.Stopped || sndMonMsg.Current != worker.Stopped { t.Errorf("Stop() failed") } } var any = gomock.Any() func createIface() *mw.Iface { return &mw.Iface{ Family: mw.V4AddrFamily, Unicast: mw.IP{192, 168, 0, 1}, Netmask: mw.IP{255, 255, 255, 0}, Broadcast: mw.IP{192, 168, 0, 255}, } } func createIpPacket() []byte { payload := []byte{1, 2, 3, 4, 5, 6, 7, 8, 9, 10} hdr := &mw.IpHdr{} hdr.VHL = uint8(ipv4<<4) | uint8(HdrLenMin/4) hdr.TotalLen = uint16(HdrLenMin + len(payload)) hdr.ID = 0 hdr.TTL = 0xff hdr.Protocol = mw.PnICMP hdr.Src = mw.V4Addr{192, 168, 0, 1} hdr.Dst = mw.V4Addr{192, 168, 1, 1} buf := new(bytes.Buffer) if err := binary.Write(buf, binary.BigEndian, hdr); err != nil { return nil } if err := binary.Write(buf, binary.BigEndian, &payload); err != nil { return nil } packet := buf.Bytes() csum := mw.Checksum(packet[:HdrLenMin], 0) packet[10] = uint8((csum & 0xff00) >> 8) packet[11] = uint8(csum & 0x00ff) return packet } func createTapDevice() *eth.TapDevice { return &eth.TapDevice{ Device: mw.Device{ Type_: mw.EthernetDevice, Name_: "net0", MTU_: mw.EthPayloadLenMax, Flag_: mw.BroadcastFlag | mw.NeedArpFlag, Addr_: mw.EthAddr{11, 12, 13, 14, 15, 16}, Priv_: mw.Privilege{FD: 3, Name: "tap0"}, }, } } func
(t *testing.T) (ctrl *gomock.Controller, teardown func()) { ctrl = gomock.NewController(t) psLog.DisableOutput() reset := func() { psLog.EnableOutput() repo.IfaceRepo.Init() } teardown = func() { ctrl.Finish() reset() } return }
setupIpTest
UpdateAppCommand.ts
import { AmplifyClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AmplifyClient"; import { UpdateAppRequest, UpdateAppResult } from "../models/index"; import { deserializeAws_restJson1_1UpdateAppCommand, serializeAws_restJson1_1UpdateAppCommand } from "../protocols/Aws_restJson1_1"; import { getSerdePlugin } from "@aws-sdk/middleware-serde"; import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; import { Command as $Command } from "@aws-sdk/smithy-client"; import { FinalizeHandlerArguments, Handler, HandlerExecutionContext, MiddlewareStack, HttpHandlerOptions as __HttpHandlerOptions, MetadataBearer as __MetadataBearer, SerdeContext as __SerdeContext } from "@aws-sdk/types"; export type UpdateAppCommandInput = UpdateAppRequest; export type UpdateAppCommandOutput = UpdateAppResult & __MetadataBearer; export class
extends $Command< UpdateAppCommandInput, UpdateAppCommandOutput, AmplifyClientResolvedConfig > { // Start section: command_properties // End section: command_properties constructor(readonly input: UpdateAppCommandInput) { // Start section: command_constructor super(); // End section: command_constructor } resolveMiddleware( clientStack: MiddlewareStack<ServiceInputTypes, ServiceOutputTypes>, configuration: AmplifyClientResolvedConfig, options?: __HttpHandlerOptions ): Handler<UpdateAppCommandInput, UpdateAppCommandOutput> { this.middlewareStack.use( getSerdePlugin(configuration, this.serialize, this.deserialize) ); const stack = clientStack.concat(this.middlewareStack); const handlerExecutionContext: HandlerExecutionContext = { logger: {} as any }; const { requestHandler } = configuration; return stack.resolve( (request: FinalizeHandlerArguments<any>) => requestHandler.handle(request.request as __HttpRequest, options || {}), handlerExecutionContext ); } private serialize( input: UpdateAppCommandInput, context: __SerdeContext ): Promise<__HttpRequest> { return serializeAws_restJson1_1UpdateAppCommand(input, context); } private deserialize( output: __HttpResponse, context: __SerdeContext ): Promise<UpdateAppCommandOutput> { return deserializeAws_restJson1_1UpdateAppCommand(output, context); } // Start section: command_body_extra // End section: command_body_extra }
UpdateAppCommand
main.go
package main import ( "context" "fmt" "github.com/zhs007/jccclient" ) func main()
{ client := jccclient.NewClient(nil, "127.0.0.1:7051", "wzDkh9h2fhfUVuS9jZ8uVbhV3vC5AWX3") reply, err := client.GetGeoIP(context.Background(), "47.90.46.159", "ipvoid") if err != nil { fmt.Printf("GetGeoIP %v", err) return } if reply != nil { fmt.Printf("%v", reply) } return }
IntlActions.js
import alt from "alt-instance"; import localeCodes from "assets/locales"; class
{ switchLocale(locale) { // var locale = "cn" // console.debug("[IntlStore]Translate: ", locale); // if (/cn|zh/.test(locale)) { // return { locale }; // } return dispatch => { import("assets/locales/locale-" + locale + ".json") .then(result => { dispatch({ locale, localeData: result.default }); }) .catch(err => { console.log("fetch locale error:", err); return dispatch => { // Translate: 异常返回 // dispatch({locale: ""}); dispatch({ locale: "zh" }); }; }); }; } getLocale(locale) { return locale; } } export default alt.createActions(IntlActions);
IntlActions
cli_test_helpers.rs
use clap::{App, ArgMatches}; use std::fmt; #[derive(Debug)] pub enum
<'a> { Value(&'a str), Values(Vec<String>), Boolean(bool), } use self::Expectation::{Boolean, Value, Values}; #[derive(Debug)] enum CliTestError { ValueMismatch(String, String, String), MultiValueMismatch(String, Vec<String>, Vec<String>), MissingFlag(String), MissingSubcmd(), ClapError(String), } impl fmt::Display for CliTestError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let content = match self { CliTestError::ValueMismatch(flag, expected, actual) => { format!( r#" '{}' validation failed: Expected: '{}' Actual: '{}' "#, flag, expected, actual ) } CliTestError::MultiValueMismatch(flag, expected, actual) => { format!( r#" '{}' validation failed: Expected: '{:#?}' Actual: '{:#?}' "#, flag, expected, actual, ) } CliTestError::MissingFlag(flag) => format!("Expected {} flag was missing.", flag), CliTestError::MissingSubcmd() => { "Expected a subcommand, but none was specified".to_string() } CliTestError::ClapError(err) => { format!( r#" Command Parse error: **************************************** {} **************************************** "#, err.to_string() ) } }; write!(f, "{}", content) } } pub fn assert_command(app: App<'_, '_>, cmd: &str, assertions: Vec<(&str, Expectation<'_>)>) { let cmd_vec: Vec<&str> = cmd.split_whitespace().collect(); let errors = match app.get_matches_from_safe(cmd_vec) { Ok(matches) => { match matches.subcommand() { (_, Some(matches)) => assert_matches(matches, assertions), (_, None) => vec![CliTestError::MissingSubcmd()], } } Err(err) => vec![CliTestError::ClapError(err.to_string())], }; if !errors.is_empty() { let error_string = errors.into_iter() .map(|error| format!("{}", error)) .collect::<Vec<_>>() .join("\n"); panic!( r#" Failed assertions for command: '{}' {} "#, cmd, error_string ); } } fn assert_matches(matches: &ArgMatches<'_>, assertions: Vec<(&str, Expectation<'_>)>) -> Vec<CliTestError> { let mut errs = Vec::new(); for (flag, expected_value) in assertions { match expected_value { Value(expected) => { match matches.value_of(flag) { Some(actual) => { if actual != expected { errs.push(CliTestError::ValueMismatch(flag.to_string(), expected.to_string(), actual.to_string())); } } None => { errs.push(CliTestError::MissingFlag(flag.to_string())); } } } Values(expected) => { match matches.values_of(flag) { Some(actual_values) => { let actual = actual_values.map(str::to_string).collect(); if actual != expected { errs.push(CliTestError::MultiValueMismatch(flag.to_string(), expected, actual)); } } None => { errs.push(CliTestError::MissingFlag(flag.to_string())); } } } Boolean(expected) => { if matches.is_present(flag) != expected { errs.push(CliTestError::ValueMismatch(flag.to_string(), expected.to_string(), (!expected).to_string())); } } } } errs } #[macro_export] macro_rules! assertion { ($flag:expr, true) => { ($flag, $crate::cli_test_helpers::Expectation::Boolean(true)) }; ($flag:expr, false) => { ($flag, $crate::cli_test_helpers::Expectation::Boolean(false)) }; ($flag:expr, [ $( $value:expr ),+ ]) => { ($flag, $crate::cli_test_helpers::Expectation::Values(vec![ $( $value.to_string() ),* ])) }; ($flag:expr, $value:expr) => { ($flag, $crate::cli_test_helpers::Expectation::Value($value)) }; } #[macro_export] macro_rules! assert_cmd { ($app:expr, $cmd:expr) => { $crate::cli_test_helpers::assert_command($app, $cmd, Vec::new()); }; ($app:expr, $cmd:expr, $($key:expr => $value:tt),* ) => ({ let mut assertions = Vec::new(); $( assertions.push(assertion!($key, $value)); )+ $crate::cli_test_helpers::assert_command($app, $cmd, assertions); }); }
Expectation
encoding_target.rs
// This file was generated by gir (d933f9a+) from gir-files (???) // DO NOT EDIT use EncodingProfile; use Error; use ffi; use glib::object::IsA; use glib::translate::*; use glib_ffi; use gobject_ffi; use std::mem; use std::ptr; glib_wrapper! { pub struct EncodingTarget(Object<ffi::GstEncodingTarget>); match fn { get_type => || ffi::gst_encoding_target_get_type(), } } impl EncodingTarget { pub fn new(name: &str, category: &str, description: &str, profiles: &[EncodingProfile]) -> EncodingTarget { assert_initialized_main_thread!(); unsafe { from_glib_full(ffi::gst_encoding_target_new(name.to_glib_none().0, category.to_glib_none().0, description.to_glib_none().0, profiles.to_glib_none().0)) } } pub fn load<'a, P: Into<Option<&'a str>>>(name: &str, category: P) -> Result<EncodingTarget, Error> { assert_initialized_main_thread!(); let category = category.into(); let category = category.to_glib_none(); unsafe { let mut error = ptr::null_mut(); let ret = ffi::gst_encoding_target_load(name.to_glib_none().0, category.0, &mut error); if error.is_null() { Ok(from_glib_full(ret)) } else { Err(from_glib_full(error)) } } } pub fn load_from_file(filepath: &str) -> Result<EncodingTarget, Error> { assert_initialized_main_thread!(); unsafe { let mut error = ptr::null_mut(); let ret = ffi::gst_encoding_target_load_from_file(filepath.to_glib_none().0, &mut error); if error.is_null() { Ok(from_glib_full(ret)) } else { Err(from_glib_full(error)) } } } } unsafe impl Send for EncodingTarget {} unsafe impl Sync for EncodingTarget {} pub trait EncodingTargetExt { fn add_profile<P: IsA<EncodingProfile>>(&self, profile: &P) -> bool; fn get_category(&self) -> Option<String>; fn get_description(&self) -> Option<String>; fn get_name(&self) -> Option<String>; fn get_profile(&self, name: &str) -> Option<EncodingProfile>; fn get_profiles(&self) -> Vec<EncodingProfile>; fn save(&self) -> Result<(), Error>; fn save_to_file(&self, filepath: &str) -> Result<(), Error>; } impl<O: IsA<EncodingTarget>> EncodingTargetExt for O { fn add_profile<P: IsA<EncodingProfile>>(&self, profile: &P) -> bool { unsafe { from_glib(ffi::gst_encoding_target_add_profile(self.to_glib_none().0, profile.to_glib_full())) } } fn get_category(&self) -> Option<String> { unsafe { from_glib_none(ffi::gst_encoding_target_get_category(self.to_glib_none().0)) } } fn get_description(&self) -> Option<String> { unsafe { from_glib_none(ffi::gst_encoding_target_get_description(self.to_glib_none().0))
unsafe { from_glib_none(ffi::gst_encoding_target_get_name(self.to_glib_none().0)) } } fn get_profile(&self, name: &str) -> Option<EncodingProfile> { unsafe { from_glib_full(ffi::gst_encoding_target_get_profile(self.to_glib_none().0, name.to_glib_none().0)) } } fn get_profiles(&self) -> Vec<EncodingProfile> { unsafe { FromGlibPtrContainer::from_glib_none(ffi::gst_encoding_target_get_profiles(self.to_glib_none().0)) } } fn save(&self) -> Result<(), Error> { unsafe { let mut error = ptr::null_mut(); let _ = ffi::gst_encoding_target_save(self.to_glib_none().0, &mut error); if error.is_null() { Ok(()) } else { Err(from_glib_full(error)) } } } fn save_to_file(&self, filepath: &str) -> Result<(), Error> { unsafe { let mut error = ptr::null_mut(); let _ = ffi::gst_encoding_target_save_to_file(self.to_glib_none().0, filepath.to_glib_none().0, &mut error); if error.is_null() { Ok(()) } else { Err(from_glib_full(error)) } } } }
} } fn get_name(&self) -> Option<String> {
evaluation.py
# noqa: D100 import logging from typing import Dict, Optional import hail as hl logging.basicConfig( format="%(asctime)s (%(name)s %(lineno)s): %(message)s", datefmt="%m/%d/%Y %I:%M:%S %p", ) logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) def compute_ranked_bin( ht: hl.Table, score_expr: hl.expr.NumericExpression, bin_expr: Dict[str, hl.expr.BooleanExpression] = {"bin": True}, compute_snv_indel_separately: bool = True, n_bins: int = 100, desc: bool = True, ) -> hl.Table: r""" Return a table with a bin for each row based on the ranking of `score_expr`. The bin is computed by dividing the `score_expr` into `n_bins` bins containing approximately equal numbers of elements. This is done by ranking the rows by `score_expr` (and a random number in cases where multiple variants have the same score) and then assigning the variant to a bin based on its ranking. If `compute_snv_indel_separately` is True all items in `bin_expr` will be stratified by snv / indels for the ranking and bin calculation. Because SNV and indel rows are mutually exclusive, they are re-combined into a single annotation. For example if we have the following four variants and scores and `n_bins` of 2: ======== ======= ====== ================= ================= Variant Type Score bin - `compute_snv_indel_separately`: -------- ------- ------ ------------------------------------- \ \ \ False True ======== ======= ====== ================= ================= Var1 SNV 0.1 1 1 Var2 SNV 0.2 1 2 Var3 Indel 0.3 2 1 Var4 Indel 0.4 2 2 ======== ======= ====== ================= ================= .. note:: The `bin_expr` defines which data the bin(s) should be computed on. E.g., to get biallelic specific binning and singleton specific binning, the following could be used: .. code-block:: python bin_expr={ 'biallelic_bin': ~ht.was_split, 'singleton_bin': ht.singleton } :param ht: Input Table :param score_expr: Expression containing the score :param bin_expr: Specific row grouping(s) to perform ranking and binning on (see note) :param compute_snv_indel_separately: Should all `bin_expr` items be stratified by SNVs / indels :param n_bins: Number of bins to bin the data into :param desc: Whether to bin the score in descending order :return: Table with the requested bin annotations """ if compute_snv_indel_separately: # For each bin, add a SNV / indel stratification bin_expr = { f"{bin_id}_{snv}": (bin_expr & snv_expr) for bin_id, bin_expr in bin_expr.items() for snv, snv_expr in [ ("snv", hl.is_snp(ht.alleles[0], ht.alleles[1])), ("indel", ~hl.is_snp(ht.alleles[0], ht.alleles[1])), ] } bin_ht = ht.select( **{f"_filter_{bin_id}": bin_expr for bin_id, bin_expr in bin_expr.items()}, _score=score_expr, snv=hl.is_snp(ht.alleles[0], ht.alleles[1]), _rand=hl.rand_unif(0, 1), ) logger.info( "Sorting the HT by score_expr followed by a random float between 0 and 1. " "Then adding a row index per grouping defined by bin_expr..." ) bin_ht = bin_ht.order_by("_score", "_rand") bin_ht = bin_ht.annotate( **{ f"{bin_id}_rank": hl.or_missing( bin_ht[f"_filter_{bin_id}"], hl.scan.count_where(bin_ht[f"_filter_{bin_id}"]), ) for bin_id in bin_expr } ) bin_ht = bin_ht.key_by("locus", "alleles") # Annotate globals with variant counts per group defined by bin_expr. This is used to determine bin assignment bin_ht = bin_ht.annotate_globals( bin_group_variant_counts=bin_ht.aggregate( hl.Struct( **{ bin_id: hl.agg.filter( bin_ht[f"_filter_{bin_id}"], hl.agg.count(), ) for bin_id in bin_expr } ) ) ) logger.info("Binning ranked rows into %d bins...", n_bins) bin_ht = bin_ht.select( "snv", **{ bin_id: hl.int( hl.floor( ( n_bins * ( bin_ht[f"{bin_id}_rank"] / hl.float64(bin_ht.bin_group_variant_counts[bin_id]) ) ) + 1 ) ) for bin_id in bin_expr }, ) if desc: bin_ht = bin_ht.annotate( **{bin_id: n_bins - bin_ht[bin_id] + 1 for bin_id in bin_expr} ) # Because SNV and indel rows are mutually exclusive, re-combine them into a single bin. # Update the global bin_group_variant_counts struct to reflect the change in bin names in the table if compute_snv_indel_separately: bin_expr_no_snv = { bin_id.rsplit("_", 1)[0] for bin_id in bin_ht.bin_group_variant_counts } bin_ht = bin_ht.annotate_globals( bin_group_variant_counts=hl.struct( **{ bin_id: hl.struct( **{ snv: bin_ht.bin_group_variant_counts[f"{bin_id}_{snv}"] for snv in ["snv", "indel"] } ) for bin_id in bin_expr_no_snv } ) ) bin_ht = bin_ht.transmute( **{ bin_id: hl.if_else( bin_ht.snv, bin_ht[f"{bin_id}_snv"], bin_ht[f"{bin_id}_indel"], ) for bin_id in bin_expr_no_snv } ) return bin_ht def
( bin_ht: hl.Table, checkpoint_path: Optional[str] = None, ) -> hl.GroupedTable: """ Group a Table that has been annotated with bins (`compute_ranked_bin` or `create_binned_ht`). The table will be grouped by bin_id (bin, biallelic, etc.), contig, snv, bi_allelic and singleton. .. note:: If performing an aggregation following this grouping (such as `score_bin_agg`) then the aggregation function will need to use `ht._parent` to get the origin Table from the GroupedTable for the aggregation :param bin_ht: Input Table with a `bin_id` annotation :param checkpoint_path: If provided an intermediate checkpoint table is created with all required annotations before shuffling. :return: Table grouped by bins(s) """ # Explode the rank table by bin_id bin_ht = bin_ht.annotate( bin_groups=hl.array( [ hl.Struct(bin_id=bin_name, bin=bin_ht[bin_name]) for bin_name in bin_ht.bin_group_variant_counts ] ) ) bin_ht = bin_ht.explode(bin_ht.bin_groups) bin_ht = bin_ht.transmute( bin_id=bin_ht.bin_groups.bin_id, bin=bin_ht.bin_groups.bin ) bin_ht = bin_ht.filter(hl.is_defined(bin_ht.bin)) if checkpoint_path is not None: bin_ht.checkpoint(checkpoint_path, overwrite=True) else: bin_ht = bin_ht.persist() # Group by bin_id, bin and additional stratification desired and compute QC metrics per bin return bin_ht.group_by( bin_id=bin_ht.bin_id, contig=bin_ht.locus.contig, snv=hl.is_snp(bin_ht.alleles[0], bin_ht.alleles[1]), bi_allelic=~bin_ht.was_split, singleton=bin_ht.singleton, release_adj=bin_ht.ac > 0, bin=bin_ht.bin, )._set_buffer_size(20000) def compute_binned_truth_sample_concordance( ht: hl.Table, binned_score_ht: hl.Table, n_bins: int = 100, add_bins: Dict[str, hl.expr.BooleanExpression] = {}, ) -> hl.Table: """ Determine the concordance (TP, FP, FN) between a truth sample within the callset and the samples truth data grouped by bins computed using `compute_ranked_bin`. .. note:: The input 'ht` should contain three row fields: - score: value to use for binning - GT: a CallExpression containing the genotype of the evaluation data for the sample - truth_GT: a CallExpression containing the genotype of the truth sample The input `binned_score_ht` should contain: - score: value used to bin the full callset - bin: the full callset bin 'add_bins` can be used to add additional global and truth sample binning to the final binned truth sample concordance HT. The keys in `add_bins` must be present in `binned_score_ht` and the values in `add_bins` should be expressions on `ht` that define a subset of variants to bin in the truth sample. An example is if we want to look at the global and truth sample binning on only bi-allelic variants. `add_bins` could be set to {'biallelic_bin': ht.biallelic}. The table is grouped by global/truth sample bin and variant type and contains TP, FP and FN. :param ht: Input HT :param binned_score_ht: Table with the bin annotation for each variant :param n_bins: Number of bins to bin the data into :param add_bins: Dictionary of additional global bin columns (key) and the expr to use for binning the truth sample (value) :return: Binned truth sample concordance HT """ # Annotate score and global bin indexed_binned_score_ht = binned_score_ht[ht.key] ht = ht.annotate( **{f"global_{bin_id}": indexed_binned_score_ht[bin_id] for bin_id in add_bins}, **{f"_{bin_id}": bin_expr for bin_id, bin_expr in add_bins.items()}, score=indexed_binned_score_ht.score, global_bin=indexed_binned_score_ht.bin, ) # Annotate the truth sample bin bin_ht = compute_ranked_bin( ht, score_expr=ht.score, bin_expr={ "truth_sample_bin": hl.expr.bool(True), **{f"truth_sample_{bin_id}": ht[f"_{bin_id}"] for bin_id in add_bins}, }, n_bins=n_bins, ) ht = ht.join(bin_ht, how="left") bin_list = [ hl.tuple(["global_bin", ht.global_bin]), hl.tuple(["truth_sample_bin", ht.truth_sample_bin]), ] bin_list.extend( [hl.tuple([f"global_{bin_id}", ht[f"global_{bin_id}"]]) for bin_id in add_bins] ) bin_list.extend( [ hl.tuple([f"truth_sample_{bin_id}", ht[f"truth_sample_{bin_id}"]]) for bin_id in add_bins ] ) # Explode the global and truth sample bins ht = ht.annotate(bin=bin_list) ht = ht.explode(ht.bin) ht = ht.annotate(bin_id=ht.bin[0], bin=hl.int(ht.bin[1])) # Compute TP, FP and FN by bin_id, variant type and bin return ( ht.group_by("bin_id", "snv", "bin") .aggregate( # TP => allele is found in both data sets tp=hl.agg.count_where(ht.GT.is_non_ref() & ht.truth_GT.is_non_ref()), # FP => allele is found only in test data set fp=hl.agg.count_where( ht.GT.is_non_ref() & hl.or_else(ht.truth_GT.is_hom_ref(), True) ), # FN => allele is found in truth data only fn=hl.agg.count_where( hl.or_else(ht.GT.is_hom_ref(), True) & ht.truth_GT.is_non_ref() ), min_score=hl.agg.min(ht.score), max_score=hl.agg.max(ht.score), n_alleles=hl.agg.count(), ) .repartition(5) ) def create_truth_sample_ht( mt: hl.MatrixTable, truth_mt: hl.MatrixTable, high_confidence_intervals_ht: hl.Table ) -> hl.Table: """ Compute a table comparing a truth sample in callset vs the truth. :param mt: MT of truth sample from callset to be compared to truth :param truth_mt: MT of truth sample :param high_confidence_intervals_ht: High confidence interval HT :return: Table containing both the callset truth sample and the truth data """ def split_filter_and_flatten_ht( truth_mt: hl.MatrixTable, high_confidence_intervals_ht: hl.Table ) -> hl.Table: """ Split a truth sample MT, filter it to the given high confidence intervals, and then "flatten" it as a HT by annotating GT in a row field. :param truth_mt: Truth sample MT :param high_confidence_intervals_ht: High confidence intervals :return: Truth sample table with GT as a row annotation """ assert truth_mt.count_cols() == 1 if not "was_split" in truth_mt.row: truth_mt = hl.split_multi_hts(truth_mt) truth_mt = truth_mt.filter_rows( hl.is_defined(high_confidence_intervals_ht[truth_mt.locus]) ) rename_entries = {"GT": "_GT"} if "adj" in truth_mt.entry: rename_entries.update({"adj": "_adj"}) truth_mt = truth_mt.rename(rename_entries) return truth_mt.annotate_rows( **{x: hl.agg.take(truth_mt[f"_{x}"], 1)[0] for x in rename_entries} ).rows() # Load truth sample MT, # restrict it to high confidence intervals # and flatten it to a HT by annotating GT in a row annotation truth_ht = split_filter_and_flatten_ht(truth_mt, high_confidence_intervals_ht) truth_ht = truth_ht.rename({f: f"truth_{f}" for f in truth_ht.row_value}) # Similarly load, filter and flatten callset truth sample MT ht = split_filter_and_flatten_ht(mt, high_confidence_intervals_ht) # Outer join of truth and callset truth and annotate the score and global bin ht = truth_ht.join(ht, how="outer") ht = ht.annotate(snv=hl.is_snp(ht.alleles[0], ht.alleles[1])) return ht def add_rank( ht: hl.Table, score_expr: hl.expr.NumericExpression, subrank_expr: Optional[Dict[str, hl.expr.BooleanExpression]] = None, ) -> hl.Table: """ Add rank based on the `score_expr`. Rank is added for snvs and indels separately. If one or more `subrank_expr` are provided, then subrank is added based on all sites for which the boolean expression is true. In addition, variant counts (snv, indel separately) is added as a global (`rank_variant_counts`). :param ht: input Hail Table containing variants (with QC annotations) to be ranked :param score_expr: the Table annotation by which ranking should be scored :param subrank_expr: Any subranking to be added in the form name_of_subrank: subrank_filtering_expr :return: Table with rankings added """ key = ht.key if subrank_expr is None: subrank_expr = {} temp_expr = {"_score": score_expr} temp_expr.update({f"_{name}": expr for name, expr in subrank_expr.items()}) rank_ht = ht.select(**temp_expr, is_snv=hl.is_snp(ht.alleles[0], ht.alleles[1])) rank_ht = rank_ht.key_by("_score").persist() scan_expr = { "rank": hl.if_else( rank_ht.is_snv, hl.scan.count_where(rank_ht.is_snv), hl.scan.count_where(~rank_ht.is_snv), ) } scan_expr.update( { name: hl.or_missing( rank_ht[f"_{name}"], hl.if_else( rank_ht.is_snv, hl.scan.count_where(rank_ht.is_snv & rank_ht[f"_{name}"]), hl.scan.count_where(~rank_ht.is_snv & rank_ht[f"_{name}"]), ), ) for name in subrank_expr } ) rank_ht = rank_ht.annotate(**scan_expr) rank_ht = rank_ht.key_by(*key).persist() rank_ht = rank_ht.select(*scan_expr.keys()) ht = ht.annotate(**rank_ht[key]) return ht
compute_grouped_binned_ht
prover.rs
//! Interpreter with enhanced functionality to prove statements. mod context_extension; mod prover_result; pub mod hint; use crate::eval::reduce_to_crypto; use crate::sigma_protocol::crypto_utils::secure_random_bytes; use crate::sigma_protocol::dht_protocol; use crate::sigma_protocol::fiat_shamir::fiat_shamir_hash_fn; use crate::sigma_protocol::fiat_shamir::fiat_shamir_tree_to_bytes; use crate::sigma_protocol::gf2_192::gf2_192poly_from_byte_array; use crate::sigma_protocol::proof_tree::ProofTree; use crate::sigma_protocol::unchecked_tree::UncheckedDhTuple; use crate::sigma_protocol::unproven_tree::CandUnproven; use crate::sigma_protocol::unproven_tree::CorUnproven; use crate::sigma_protocol::unproven_tree::NodePosition; use crate::sigma_protocol::unproven_tree::UnprovenDhTuple; use crate::sigma_protocol::Challenge; use crate::sigma_protocol::UnprovenLeaf; use crate::sigma_protocol::SOUNDNESS_BYTES; use ergotree_ir::sigma_protocol::sigma_boolean::SigmaBoolean; use ergotree_ir::sigma_protocol::sigma_boolean::SigmaConjectureItems; use gf2_192::gf2_192poly::Gf2_192Poly; use gf2_192::gf2_192poly::Gf2_192PolyError; use gf2_192::Gf2_192Error; use std::convert::TryInto; use std::rc::Rc; pub use context_extension::*; use ergotree_ir::ergo_tree::ErgoTree; use ergotree_ir::ergo_tree::ErgoTreeError; use ergotree_ir::sigma_protocol::sigma_boolean::SigmaConjecture; use ergotree_ir::sigma_protocol::sigma_boolean::SigmaProofOfKnowledgeTree; pub use prover_result::*; use self::hint::HintsBag; use super::dlog_protocol; use super::fiat_shamir::FiatShamirTreeSerializationError; use super::private_input::PrivateInput; use super::proof_tree; use super::proof_tree::ProofTreeLeaf; use super::sig_serializer::serialize_sig; use super::unchecked_tree::UncheckedConjecture; use super::unchecked_tree::UncheckedSchnorr; use super::unchecked_tree::UncheckedTree; use super::unproven_tree::CthresholdUnproven; use super::unproven_tree::UnprovenConjecture; use super::unproven_tree::UnprovenSchnorr; use super::unproven_tree::UnprovenTree; use super::FirstProverMessage::FirstDhtProverMessage; use super::FirstProverMessage::FirstDlogProverMessage; use crate::eval::context::Context; use crate::eval::env::Env; use crate::eval::EvalError; use thiserror::Error; /// Prover errors #[derive(Error, PartialEq, Eq, Debug, Clone)] pub enum ProverError { /// Failed to parse ErgoTree #[error("Ergo tree error: {0}")] ErgoTreeError(ErgoTreeError), /// Failed to evaluate ErgoTree #[error("Evaluation error: {0}")] EvalError(EvalError), /// `gf2_192` error #[error("gf2_192 error: {0}")] Gf2_192Error(Gf2_192Error), /// Script reduced to false #[error("Script reduced to false")] ReducedToFalse, /// Failed on step2(prover does not have enough witnesses to perform the proof) #[error("Failed on step2(prover does not have enough witnesses to perform the proof)")] TreeRootIsNotReal, /// Simulated leaf does not have challenge #[error("Simulated leaf does not have challenge")] SimulatedLeafWithoutChallenge, /// Lacking challenge on step 9 for "real" unproven tree #[error("Lacking challenge on step 9 for \"real\" unproven tree")] RealUnprovenTreeWithoutChallenge, /// Cannot find a secret for "real" unproven leaf #[error("Cannot find a secret for \"real\" unproven leaf")] SecretNotFound, /// Unexpected value encountered #[error("Unexpected: {0}")] Unexpected(String), /// Error while tree serialization for Fiat-Shamir hash #[error("Fiat-Shamir tree serialization error: {0}")] FiatShamirTreeSerializationError(FiatShamirTreeSerializationError), /// Not yet implemented #[error("not yet implemented: {0}")] NotYetImplemented(String), } impl From<ErgoTreeError> for ProverError { fn from(e: ErgoTreeError) -> Self { ProverError::ErgoTreeError(e) } } impl From<FiatShamirTreeSerializationError> for ProverError { fn from(e: FiatShamirTreeSerializationError) -> Self { ProverError::FiatShamirTreeSerializationError(e) } } impl From<Gf2_192Error> for ProverError { fn from(e: Gf2_192Error) -> Self { ProverError::Gf2_192Error(e) } } impl From<Gf2_192PolyError> for ProverError { fn from(e: Gf2_192PolyError) -> Self { ProverError::Gf2_192Error(Gf2_192Error::Gf2_192PolyError(e)) } } /// Prover pub trait Prover { /// Secrets of the prover fn secrets(&self) -> &[PrivateInput]; /// Add an extra secret to the prover fn append_secret(&mut self, input: PrivateInput); /// The comments in this section are taken from the algorithm for the /// Sigma-protocol prover as described in the ErgoScript white-paper /// <https://ergoplatform.org/docs/ErgoScript.pdf>, Appendix A /// /// Generate proofs for the given message for ErgoTree reduced to Sigma boolean expression fn prove( &self, tree: &ErgoTree, env: &Env, ctx: Rc<Context>, message: &[u8], hints_bag: &HintsBag, ) -> Result<ProverResult, ProverError> { let expr = tree.proposition()?; let ctx_ext = ctx.extension.clone(); let reduction_result = reduce_to_crypto(expr.as_ref(), env, ctx).map_err(ProverError::EvalError)?; self.generate_proof(reduction_result.sigma_prop, message, hints_bag, ctx_ext) } /// Generate proofs for the given message for the given Sigma boolean expression fn generate_proof( &self, sigmabool: SigmaBoolean, message: &[u8], hints_bag: &HintsBag, ctx_ext: ContextExtension, ) -> Result<ProverResult, ProverError> { let unchecked_tree_opt = match sigmabool { SigmaBoolean::TrivialProp(true) => Ok(None), SigmaBoolean::TrivialProp(false) => Err(ProverError::ReducedToFalse), sb => { let tree = convert_to_unproven(sb)?; let unchecked_tree = prove_to_unchecked(self, tree, message, hints_bag)?; Ok(Some(unchecked_tree)) } }?; let proof = match unchecked_tree_opt { Some(tree) => serialize_sig(tree), None => ProofBytes::Empty, }; Ok(ProverResult { proof, extension: ctx_ext, }) } } /// The comments in this section are taken from the algorithm for the /// Sigma-protocol prover as described in the white paper /// <https://ergoplatform.org/docs/ErgoScript.pdf> (Appendix A) // if we are concerned about timing attacks against the prover, we should make sure that this code // takes the same amount of time regardless of which nodes are real and which nodes are simulated // In particular, we should avoid the use of exists and forall, because they short-circuit the evaluation // once the right value is (or is not) found. We should also make all loops look similar, the same // amount of copying is done regardless of what's real or simulated, // real vs. simulated computations take the same time, etc. fn prove_to_unchecked<P: Prover + ?Sized>( prover: &P, unproven_tree: UnprovenTree, message: &[u8], hints_bag: &HintsBag, ) -> Result<UncheckedTree, ProverError> { // Prover Step 1: Mark as real everything the prover can prove let step1 = mark_real(prover, unproven_tree, hints_bag)?; // dbg!(&step1); // Prover Step 2: If the root of the tree is marked "simulated" then the prover does not have enough witnesses // to perform the proof. Abort. if !step1.is_real() { return Err(ProverError::TreeRootIsNotReal); } // Prover Step 3: Change some "real" nodes to "simulated" to make sure each node // has the right number of simulated children. let step3 = polish_simulated(prover, step1)?; // dbg!(&step3); // Prover Steps 4, 5, and 6 together: find challenges for simulated nodes; simulate simulated leaves; // compute commitments for real leaves let step6 = simulate_and_commit(step3, hints_bag)?; // dbg!(&step6); // Prover Steps 7: convert the relevant information in the tree (namely, tree structure, node types, // the statements being proven and commitments at the leaves) // to a string let var_name = fiat_shamir_tree_to_bytes(&step6.clone().into())?; let mut s = var_name; // Prover Step 8: compute the challenge for the root of the tree as the Fiat-Shamir hash of s // and the message being signed. s.append(&mut message.to_vec()); let root_challenge: Challenge = fiat_shamir_hash_fn(s.as_slice()).into(); let step8 = step6.with_challenge(root_challenge); // dbg!(&step8); // Prover Step 9: complete the proof by computing challenges at real nodes and additionally responses at real leaves let step9 = proving(prover, step8.into(), hints_bag)?; // dbg!(&step9); // Prover Step 10: output the right information into the proof convert_to_unchecked(step9) } /** Prover Step 1: This step will mark as "real" every node for which the prover can produce a real proof. This step may mark as "real" more nodes than necessary if the prover has more than the minimal necessary number of witnesses (for example, more than one child of an OR). This will be corrected in the next step. In a bottom-up traversal of the tree, do the following for each node: */ fn mark_real<P: Prover + ?Sized>( prover: &P, unproven_tree: UnprovenTree, hints_bag: &HintsBag, ) -> Result<UnprovenTree, ProverError> { proof_tree::rewrite(unproven_tree.into(), &|tree| { Ok(match tree { ProofTree::UnprovenTree(unp) => match unp { UnprovenTree::UnprovenLeaf(unp_leaf) => { // If the node is a leaf, mark it "real'' if either the witness for it is // available or a hint shows the secret is known to an external participant in multi-signing; // else mark it "simulated" let secret_known = hints_bag.real_images().contains(&unp_leaf.proposition()) || prover .secrets() .iter() .any(|s| s.public_image() == unp_leaf.proposition()); Some(unp_leaf.clone().with_simulated(!secret_known).into()) } UnprovenTree::UnprovenConjecture(unp_conj) => match unp_conj { UnprovenConjecture::CandUnproven(cand) => { // If the node is AND, mark it "real" if all of its children are marked real; else mark it "simulated" let simulated = cast_to_unp(cand.children.clone())? .iter() .any(|c| c.simulated()); Some( CandUnproven { simulated, ..cand.clone() } .into(), ) } UnprovenConjecture::CorUnproven(cor) => { // If the node is OR, mark it "real" if at least one child is marked real; else mark it "simulated" let simulated = cast_to_unp(cor.children.clone())? .iter() .all(|c| c.simulated()); Some( CorUnproven { simulated, ..cor.clone() } .into(), ) } UnprovenConjecture::CthresholdUnproven(ct) => { // If the node is THRESHOLD(k), mark it "real" if at least k of its children are marked real; else mark it "simulated" let simulated = cast_to_unp(ct.children.clone())? .iter() .filter(|c| c.simulated()) .count() >= ct.k as usize; Some( CthresholdUnproven { simulated, ..ct.clone() } .into(), ) } }, }, ProofTree::UncheckedTree(_) => None, }) })? .try_into() .map_err(|e: &str| ProverError::Unexpected(e.to_string())) } /// Set positions for children of a unproven inner node (conjecture, so AND/OR/THRESHOLD) fn set_positions(uc: UnprovenConjecture) -> Result<UnprovenConjecture, ProverError> { let upd_children = uc .children() .try_mapped(|c| match c { ProofTree::UncheckedTree(unch) => Err(ProverError::Unexpected(format!( "set_positions: unexpected UncheckedTree: {:?}", unch ))), ProofTree::UnprovenTree(unp) => Ok(unp), })? .enumerated() .mapped(|(idx, utree)| utree.with_position(uc.position().child(idx)).into()); Ok(match uc { UnprovenConjecture::CandUnproven(cand) => cand.with_children(upd_children).into(), UnprovenConjecture::CorUnproven(cor) => cor.with_children(upd_children).into(), UnprovenConjecture::CthresholdUnproven(ct) => ct.with_children(upd_children).into(), }) } /// If the node is OR marked "real", mark all but one of its children "simulated" /// (the node is guaranteed by step 1 to have at least one "real" child). /// Which particular child is left "real" is not important for security; /// the choice can be guided by efficiency or convenience considerations. fn make_cor_children_simulated(cor: CorUnproven) -> Result<CorUnproven, ProverError> { let casted_children = cast_to_unp(cor.children)?; let first_real_child = casted_children .iter() .find(|it| it.is_real()) .ok_or_else(|| { ProverError::Unexpected(format!( "make_cor_children_simulated: no real child is found amoung: {:?}", casted_children )) })?; let children = casted_children .clone() .mapped(|c| { if &c == first_real_child || c.simulated() { c } else { c.with_simulated(true) } }) .mapped(|c| c.into()); Ok(CorUnproven { children, ..cor }) } fn cast_to_unp( children: SigmaConjectureItems<ProofTree>, ) -> Result<SigmaConjectureItems<UnprovenTree>, ProverError> { children.try_mapped(|c| { if let ProofTree::UnprovenTree(ut) = c { Ok(ut) } else { Err(ProverError::Unexpected(format!( "make_cor_children_simulated: expected UnprovenTree got: {:?}", c ))) } }) } /// Prover Step 3: This step will change some "real" nodes to "simulated" to make sure each node has /// the right number of simulated children. /// In a top-down traversal of the tree, do the following for each node: fn polish_simulated<P: Prover + ?Sized>( _prover: &P, unproven_tree: UnprovenTree, ) -> Result<UnprovenTree, ProverError> { proof_tree::rewrite(unproven_tree.into(), &|tree| match tree { ProofTree::UnprovenTree(ut) => match ut { UnprovenTree::UnprovenLeaf(_) => Ok(None), UnprovenTree::UnprovenConjecture(conj) => match conj { UnprovenConjecture::CandUnproven(cand) => { // If the node is marked "simulated", mark all of its children "simulated" let a: CandUnproven = if cand.simulated { cand.clone().with_children( cast_to_unp(cand.children.clone())? .mapped(|c| c.with_simulated(true).into()), ) } else { cand.clone() }; Ok(Some(set_positions(a.into())?.into())) } UnprovenConjecture::CorUnproven(cor) => { // If the node is marked "simulated", mark all of its children "simulated" let o: CorUnproven = if cor.simulated { CorUnproven { children: cast_to_unp(cor.children.clone())? .mapped(|c| c.with_simulated(true).into()), ..cor.clone() } } else { // If the node is OR marked "real", mark all but one of its children "simulated" make_cor_children_simulated(cor.clone())? }; Ok(Some(set_positions(o.into())?.into())) } UnprovenConjecture::CthresholdUnproven(ct) => { // If the node is marked "simulated", mark all of its children "simulated" let t: CthresholdUnproven = if ct.simulated { CthresholdUnproven { children: cast_to_unp(ct.children.clone())? .mapped(|c| c.with_simulated(true).into()), ..ct.clone() } } else { // If the node is THRESHOLD(k) marked "real", mark all but k of its children "simulated" // (the node is guaranteed, by the previous step, to have at least k "real" children). // Which particular ones are left "real" is not important for security; // the choice can be guided by efficiency or convenience considerations. // // We'll mark the first k real ones real let mut count_of_real = 0; let mut children_indices_to_be_marked_simulated = Vec::new(); let unproven_children = cast_to_unp(ct.children.clone())?; for (idx, kid) in unproven_children.clone().enumerated() { if kid.is_real() { count_of_real += 1; if count_of_real >= ct.k { children_indices_to_be_marked_simulated.push(idx); }; }; } CthresholdUnproven { children: unproven_children.enumerated().mapped(|(idx, c)| { if children_indices_to_be_marked_simulated.contains(&idx) { c.with_simulated(true) } else { c } .into() }), ..ct.clone() } }; Ok(Some(set_positions(t.into())?.into())) } }, }, ProofTree::UncheckedTree(_) => Ok(None), })? .try_into() .map_err(|e: &str| ProverError::Unexpected(e.to_string())) } fn step4_real_conj( uc: UnprovenConjecture, hints_bag: &HintsBag, ) -> Result<Option<ProofTree>, ProverError> { assert!(uc.is_real()); match uc { // A real AND node has no simulated children UnprovenConjecture::CandUnproven(_) => Ok(None), //real OR Threshold case UnprovenConjecture::CorUnproven(_) | UnprovenConjecture::CthresholdUnproven(_) => { let new_children = cast_to_unp(uc.children())? .mapped(|c| { if c.is_real() { c } else { // take challenge from previously done proof stored in the hints bag, // or generate random challenge for simulated child let new_challenge: Challenge = hints_bag .proofs() .into_iter() .find(|p| p.position() == c.position()) .map(|p| p.challenge().clone()) .unwrap_or_else(Challenge::secure_random); c.with_challenge(new_challenge) } }) .mapped(|c| c.into()); Ok(Some( uc.with_children(new_children).into() // CorUnproven { // children: new_children, // ..cor.clone() // } // .into(), )) } } } fn step4_simulated_and_conj(cand: CandUnproven) -> Result<Option<ProofTree>, ProverError> { assert!(cand.simulated); // If the node is AND, then all of its children get e_0 as the challenge if let Some(challenge) = cand.challenge_opt.clone() { let new_children = cand .children .clone() .mapped(|it| it.with_challenge(challenge.clone())); Ok(Some( CandUnproven { children: new_children, ..cand } .into(), )) } else { Err(ProverError::Unexpected( "simulate_and_commit: missing CandUnproven(simulated).challenge".to_string(), )) } } fn step4_simulated_or_conj(cor: CorUnproven) -> Result<Option<ProofTree>, ProverError> { // If the node is OR, then each of its children except one gets a fresh uniformly random // challenge in {0,1}^t. The remaining child gets a challenge computed as an XOR of the challenges of all // the other children and e_0. assert!(cor.simulated); if let Some(challenge) = cor.challenge_opt.clone() { let unproven_children = cast_to_unp(cor.children.clone())?; let mut tail: Vec<UnprovenTree> = unproven_children .clone() .into_iter() .skip(1) .map(|it| it.with_challenge(Challenge::secure_random())) .collect(); let mut xored_challenge = challenge; for it in &tail { xored_challenge = xored_challenge.xor( it.challenge() .ok_or_else(|| ProverError::Unexpected(format!("no challenge in {:?}", it)))?, ); } let head = unproven_children .first() .clone() .with_challenge(xored_challenge); let mut new_children = vec![head]; new_children.append(&mut tail); #[allow(clippy::unwrap_used)] // since quantity is preserved unwrap is safe here Ok(Some( CorUnproven { children: new_children .into_iter() .map(|c| c.into()) .collect::<Vec<ProofTree>>() .try_into() .unwrap(), ..cor } .into(), )) } else { Err(ProverError::Unexpected( "simulate_and_commit: missing CandUnproven(simulated).challenge".to_string(), )) } } fn step4_simulated_threshold_conj( ct: CthresholdUnproven, ) -> Result<Option<ProofTree>, ProverError>
fn step5_schnorr( us: UnprovenSchnorr, hints_bag: &HintsBag, ) -> Result<Option<ProofTree>, ProverError> { // Steps 5 & 6: first try pulling out commitment from the hints bag. If it exists proceed with it, // otherwise, compute the commitment (if the node is real) or simulate it (if the node is simulated) // Step 6 (real leaf -- compute the commitment a or take it from the hints bag) let res: ProofTree = match hints_bag .commitments() .into_iter() .find(|c| c.position() == &us.position) { Some(cmt_hint) => { let pt: ProofTree = UnprovenSchnorr { commitment_opt: Some( cmt_hint .commitment() .clone() .try_into() .map_err(|e: &str| ProverError::Unexpected(e.to_string()))?, ), ..us.clone() } .into(); pt } None => { if us.simulated { // Step 5 (simulated leaf -- complete the simulation) if let Some(challenge) = us.challenge_opt.clone() { let (fm, sm) = dlog_protocol::interactive_prover::simulate(&us.proposition, &challenge); Ok(ProofTree::UncheckedTree( UncheckedSchnorr { proposition: us.proposition.clone(), commitment_opt: Some(fm), challenge, second_message: sm, } .into(), )) } else { Err(ProverError::SimulatedLeafWithoutChallenge) } } else { // Step 6 (real leaf -- compute the commitment a) let (r, commitment) = dlog_protocol::interactive_prover::first_message(); Ok(ProofTree::UnprovenTree( UnprovenSchnorr { commitment_opt: Some(commitment), randomness_opt: Some(r), ..us.clone() } .into(), )) }? } }; Ok(Some(res)) } fn step5_diffie_hellman_tuple( dhu: UnprovenDhTuple, hints_bag: &HintsBag, ) -> Result<Option<ProofTree>, ProverError> { //Steps 5 & 6: pull out commitment from the hints bag, otherwise, compute the commitment(if the node is real), // or simulate it (if the node is simulated) // Step 6 (real leaf -- compute the commitment a or take it from the hints bag) let res: Result<ProofTree, _> = hints_bag .commitments() .iter() .find(|c| c.position() == &dhu.position) .map(|cmt_hint| { Ok(dhu .clone() .with_commitment(match cmt_hint.commitment() { FirstDlogProverMessage(_) => { return Err(ProverError::Unexpected( "Step 5 & 6 for UnprovenDhTuple: FirstDlogProverMessage is not expected here".to_string(), )) } FirstDhtProverMessage(dhtm) => dhtm.clone(), }) .into()) }) .unwrap_or_else(|| { if dhu.simulated { // Step 5 (simulated leaf -- complete the simulation) if let Some(dhu_challenge) = dhu.challenge_opt.clone() { let (fm, sm) = dht_protocol::interactive_prover::simulate( &dhu.proposition, &dhu_challenge, ); Ok(UncheckedDhTuple { proposition: dhu.proposition.clone(), commitment_opt: Some(fm), challenge: dhu_challenge, second_message: sm, } .into()) } else { Err(ProverError::SimulatedLeafWithoutChallenge) } } else { // Step 6 -- compute the commitment let (r, fm) = dht_protocol::interactive_prover::first_message(&dhu.proposition); Ok(UnprovenDhTuple { commitment_opt: Some(fm), randomness_opt: Some(r), ..dhu.clone() } .into()) } }); Ok(Some(res?)) } /** Prover Step 4: In a top-down traversal of the tree, compute the challenges e for simulated children of every node Prover Step 5: For every leaf marked "simulated", use the simulator of the Sigma-protocol for that leaf to compute the commitment $a$ and the response z, given the challenge e that is already stored in the leaf. Prover Step 6: For every leaf marked "real", use the first prover step of the Sigma-protocol for that leaf to compute the commitment a. */ fn simulate_and_commit( unproven_tree: UnprovenTree, hints_bag: &HintsBag, ) -> Result<UnprovenTree, ProverError> { proof_tree::rewrite(unproven_tree.into(), &|tree| { match tree { // Step 4 part 1: If the node is marked "real", jhen each of its simulated children gets a fresh uniformly // random challenge in {0,1}^t. ProofTree::UnprovenTree(UnprovenTree::UnprovenConjecture(uc)) => { if uc.is_real() { step4_real_conj(uc.clone(), hints_bag) } else { match uc { // Step 4 part 2: If the node is marked "simulated", let e_0 be the challenge computed for it. // All of its children are simulated, and thus we compute challenges for all // of them, as follows: UnprovenConjecture::CandUnproven(cand) => { step4_simulated_and_conj(cand.clone()) } UnprovenConjecture::CorUnproven(cor) => { step4_simulated_or_conj(cor.clone()) } UnprovenConjecture::CthresholdUnproven(ct) => { step4_simulated_threshold_conj(ct.clone()) } } } } ProofTree::UnprovenTree(UnprovenTree::UnprovenLeaf(UnprovenLeaf::UnprovenSchnorr( us, ))) => step5_schnorr(us.clone(), hints_bag), ProofTree::UnprovenTree(UnprovenTree::UnprovenLeaf(UnprovenLeaf::UnprovenDhTuple( dhu, ))) => step5_diffie_hellman_tuple(dhu.clone(), hints_bag), ProofTree::UncheckedTree(_) => Ok(None), } })? .try_into() .map_err(|e: &str| ProverError::Unexpected(e.to_string())) } fn step9_real_and(cand: CandUnproven) -> Result<Option<ProofTree>, ProverError> { assert!(cand.is_real()); // If the node is AND, let each of its children have the challenge e_0 if let Some(challenge) = cand.challenge_opt.clone() { let updated = cand .clone() .children .mapped(|child| child.with_challenge(challenge.clone())); Ok(Some(cand.with_children(updated).into())) } else { Err(ProverError::Unexpected( "proving: CandUnproven.challenge_opt is empty".to_string(), )) } } fn step9_real_or(cor: CorUnproven) -> Result<Option<ProofTree>, ProverError> { assert!(cor.is_real()); // If the node is OR, it has only one child marked "real". // Let this child have the challenge equal to the XOR of the challenges of all // the other children and e_0 if let Some(root_challenge) = &cor.challenge_opt { let challenge: Challenge = cor .children .clone() .iter() .flat_map(|c| c.challenge()) .fold(root_challenge.clone(), |acc, c| acc.xor(c)); let children = cor.children.clone().mapped(|c| match c { ProofTree::UnprovenTree(ref ut) if ut.is_real() => c.with_challenge(challenge.clone()), _ => c, }); Ok(Some( CorUnproven { children, ..cor.clone() } .into(), )) } else { Err(ProverError::Unexpected( "proving: CorUnproven.challenge_opt is empty".to_string(), )) } } fn step9_real_threshold(ct: CthresholdUnproven) -> Result<Option<ProofTree>, ProverError> { assert!(ct.is_real()); // If the node is THRESHOLD(k), number its children from 1 to no. Let i_1,..., i_{n-k} // be the indices of thechildren marked `"simulated" and e_1, ..., e_{n-k} be // their corresponding challenges. // Let i_0 = 0. Viewing 0, 1, 2, ..., n and e_0, ..., e_{n-k} as elements of GF(2^t), // find (via polynomial interpolation) the lowest-degree polynomial // Q(x)=sum_{i=0}^{n-k} a_i x^i over GF(2^t) that is equal to e_j at i_j // for each f from 0 to n-k // (this polynomial will have n-k+1 coefficients, and the lowest coefficient // will be e_0). For child number i of the node, if the child is marked "real", // compute its challenge as Q(i) (if the child is marked // "simulated", its challenge is already Q(i), by construction of Q). if let Some(challenge) = ct.challenge_opt.clone() { let mut points = Vec::new(); let mut values = Vec::new(); for (idx, child) in ct.children.clone().enumerated() { let one_based_idx = idx + 1; let challenge_opt = match child { ProofTree::UncheckedTree(ut) => match ut { UncheckedTree::UncheckedLeaf(ul) => Some(ul.challenge()), UncheckedTree::UncheckedConjecture(_) => None, }, ProofTree::UnprovenTree(unpt) => unpt.challenge(), }; if let Some(challenge) = challenge_opt { points.append(&mut one_based_idx.to_be_bytes().to_vec()); values.push(challenge.into()); }; } let value_at_zero = challenge.into(); let q = Gf2_192Poly::interpolate(&points, &values, value_at_zero)?; let new_children = ct.children.clone().enumerated().mapped(|(idx, child)| { // Note the cast to `u8` is safe since `ct.children` is of type // `SigmaConjectureItems<_>` which is a `BoundedVec<_, 2, 255>`. let one_based_idx = (idx + 1) as u8; match &child { ProofTree::UnprovenTree(ut) if ut.is_real() => { child.with_challenge(q.evaluate(one_based_idx).into()) } _ => child, } }); Ok(Some( ct.with_polynomial(q).with_children(new_children).into(), )) } else { Err(ProverError::Unexpected( "proving: CthresholdUnproven.challenge_opt is empty".to_string(), )) } } fn step9_real_schnorr<P: Prover + ?Sized>( us: UnprovenSchnorr, prover: &P, ) -> Result<Option<ProofTree>, ProverError> { assert!(us.is_real()); // If the node is a leaf marked "real", compute its response according to the second prover step // of the Sigma-protocol given the commitment, challenge, and witness, or pull response from the hints bag if let Some(challenge) = us.challenge_opt.clone() { if let Some(priv_key) = prover .secrets() .iter() .flat_map(|s| match s { PrivateInput::DlogProverInput(dl) => vec![dl], _ => vec![], }) .find(|prover_input| prover_input.public_image() == us.proposition) { let z = dlog_protocol::interactive_prover::second_message( priv_key, us.randomness_opt.ok_or_else(|| { ProverError::Unexpected(format!("empty randomness in {:?}", us)) })?, &challenge, ); Ok(Some( UncheckedSchnorr { proposition: us.proposition.clone(), commitment_opt: None, challenge, second_message: z, } .into(), )) } else { Err(ProverError::SecretNotFound) } } else { Err(ProverError::RealUnprovenTreeWithoutChallenge) } } fn step9_real_dh_tuple<P: Prover + ?Sized>( dhu: UnprovenDhTuple, prover: &P, hints_bag: &HintsBag, ) -> Result<Option<ProofTree>, ProverError> { assert!(dhu.is_real()); // If the node is a leaf marked "real", compute its response according to the second prover step // of the Sigma-protocol given the commitment, challenge, and witness, or pull response from // the hints bag if let Some(dhu_challenge) = dhu.challenge_opt.clone() { let priv_key_opt = prover .secrets() .iter() .find(|s| s.public_image() == dhu.proposition.clone().into()); let z = match priv_key_opt { Some(PrivateInput::DhTupleProverInput(priv_key)) => match hints_bag .own_commitments() .iter() .find(|c| c.position == dhu.position) { Some(commitment_from_hints_bag) => { dht_protocol::interactive_prover::second_message( priv_key, &commitment_from_hints_bag.secret_randomness, &dhu_challenge, ) } None => dht_protocol::interactive_prover::second_message( priv_key, &dhu.randomness_opt.ok_or_else(|| { ProverError::Unexpected(format!("empty randomness in {:?}", dhu)) })?, &dhu_challenge, ), }, Some(pi) => { return Err(ProverError::Unexpected(format!( "Expected DH prover input in prover secrets, got {:?}", pi ))) } None => { return Err(ProverError::NotYetImplemented( "when secret not found".to_string(), )) } }; Ok(Some( UncheckedDhTuple { proposition: dhu.proposition.clone(), commitment_opt: None, challenge: dhu_challenge, second_message: z, } .into(), )) } else { Err(ProverError::RealUnprovenTreeWithoutChallenge) } } /** Prover Step 9: Perform a top-down traversal of only the portion of the tree marked "real" in order to compute the challenge e for every node marked "real" below the root and, additionally, the response z for every leaf marked "real" */ fn proving<P: Prover + ?Sized>( prover: &P, proof_tree: ProofTree, hints_bag: &HintsBag, ) -> Result<ProofTree, ProverError> { proof_tree::rewrite(proof_tree, &|tree| { match &tree { ProofTree::UncheckedTree(unch) => match unch { UncheckedTree::UncheckedLeaf(_) => Ok(None), UncheckedTree::UncheckedConjecture(_) => Err(ProverError::Unexpected(format!( "proving: unexpected {:?}", tree ))), }, ProofTree::UnprovenTree(unproven_tree) => match unproven_tree { UnprovenTree::UnprovenConjecture(conj) => { if conj.is_real() { match conj { UnprovenConjecture::CandUnproven(cand) => step9_real_and(cand.clone()), UnprovenConjecture::CorUnproven(cor) => step9_real_or(cor.clone()), UnprovenConjecture::CthresholdUnproven(ct) => { step9_real_threshold(ct.clone()) } } } else { Ok(None) } } UnprovenTree::UnprovenLeaf(unp_leaf) => { if unp_leaf.is_real() { match unp_leaf { UnprovenLeaf::UnprovenSchnorr(us) => { step9_real_schnorr(us.clone(), prover) } UnprovenLeaf::UnprovenDhTuple(dhu) => { step9_real_dh_tuple(dhu.clone(), prover, hints_bag) } } } else { // if the simulated node is proven by someone else, take it from hints bag let res: ProofTree = hints_bag .simulated_proofs() .into_iter() .find(|proof| proof.image == unp_leaf.proposition()) .map(|proof| proof.unchecked_tree.into()) .unwrap_or_else(|| unp_leaf.clone().into()); Ok(Some(res)) } } }, } }) } fn convert_to_unproven(sb: SigmaBoolean) -> Result<UnprovenTree, ProverError> { Ok(match sb { SigmaBoolean::ProofOfKnowledge(pok) => match pok { SigmaProofOfKnowledgeTree::ProveDhTuple(pdht) => UnprovenDhTuple { proposition: pdht, commitment_opt: None, randomness_opt: None, challenge_opt: None, simulated: false, position: NodePosition::crypto_tree_prefix(), } .into(), SigmaProofOfKnowledgeTree::ProveDlog(prove_dlog) => UnprovenSchnorr { proposition: prove_dlog, commitment_opt: None, randomness_opt: None, challenge_opt: None, simulated: false, position: NodePosition::crypto_tree_prefix(), } .into(), }, SigmaBoolean::SigmaConjecture(conj) => match conj { SigmaConjecture::Cand(cand) => CandUnproven { proposition: cand.clone(), challenge_opt: None, simulated: false, children: cand .items .try_mapped(|it| convert_to_unproven(it).map(Into::into))?, position: NodePosition::crypto_tree_prefix(), } .into(), SigmaConjecture::Cor(cor) => CorUnproven { proposition: cor.clone(), challenge_opt: None, simulated: false, children: cor .items .try_mapped(|it| convert_to_unproven(it).map(Into::into))?, position: NodePosition::crypto_tree_prefix(), } .into(), SigmaConjecture::Cthreshold(ct) => CthresholdUnproven { proposition: ct.clone(), k: ct.k, children: ct .children .try_mapped(|it| convert_to_unproven(it).map(Into::into))?, polinomial_opt: None, challenge_opt: None, simulated: false, position: NodePosition::crypto_tree_prefix(), } .into(), }, SigmaBoolean::TrivialProp(_) => { return Err(ProverError::Unexpected( "TrivialProp is not expected here".to_string(), )) } }) } fn convert_to_unchecked(tree: ProofTree) -> Result<UncheckedTree, ProverError> { match &tree { ProofTree::UncheckedTree(unch_tree) => match unch_tree { UncheckedTree::UncheckedLeaf(_) => Ok(unch_tree.clone()), UncheckedTree::UncheckedConjecture(_) => Err(ProverError::Unexpected(format!( "convert_to_unchecked: unexpected {:?}", tree ))), }, ProofTree::UnprovenTree(unp_tree) => match unp_tree { UnprovenTree::UnprovenLeaf(_) => Err(ProverError::Unexpected(format!( "convert_to_unchecked: unexpected {:?}", tree ))), UnprovenTree::UnprovenConjecture(conj) => match conj { UnprovenConjecture::CandUnproven(cand) => Ok(UncheckedConjecture::CandUnchecked { challenge: cand.challenge_opt.clone().ok_or_else(|| { ProverError::Unexpected(format!("no challenge in {:?}", cand)) })?, children: cand.children.clone().try_mapped(convert_to_unchecked)?, } .into()), UnprovenConjecture::CorUnproven(cor) => Ok(UncheckedConjecture::CorUnchecked { challenge: cor.challenge_opt.clone().ok_or_else(|| { ProverError::Unexpected(format!("no challenge in {:?}", cor)) })?, children: cor.children.clone().try_mapped(convert_to_unchecked)?, } .into()), UnprovenConjecture::CthresholdUnproven(ct) => { Ok(UncheckedConjecture::CthresholdUnchecked { challenge: ct.challenge_opt.clone().ok_or_else(|| { ProverError::Unexpected(format!("no challenge in {:?}", ct)) })?, children: ct.children.clone().try_mapped(convert_to_unchecked)?, k: ct.k, polynomial: ct.polinomial_opt.clone().ok_or_else(|| { ProverError::Unexpected(format!("no polynomial in {:?}", ct)) })?, } .into()) } }, }, } } /// Test prover implementation pub struct TestProver { /// secrets to be used in proofs generation pub secrets: Vec<PrivateInput>, } impl Prover for TestProver { fn secrets(&self) -> &[PrivateInput] { self.secrets.as_ref() } fn append_secret(&mut self, input: PrivateInput) { self.secrets.push(input) } } #[allow(clippy::unwrap_used)] #[cfg(test)] mod tests { use super::*; use crate::sigma_protocol::private_input::DhTupleProverInput; use crate::sigma_protocol::private_input::DlogProverInput; use ergotree_ir::mir::constant::Constant; use ergotree_ir::mir::constant::Literal; use ergotree_ir::mir::expr::Expr; use ergotree_ir::mir::sigma_and::SigmaAnd; use ergotree_ir::mir::sigma_or::SigmaOr; use ergotree_ir::types::stype::SType; use sigma_test_util::force_any_val; use std::convert::TryFrom; use std::rc::Rc; #[test] fn test_prove_true_prop() { let bool_true_tree = ErgoTree::try_from(Expr::Const(Constant { tpe: SType::SBoolean, v: Literal::Boolean(true), })) .unwrap(); let message = vec![0u8; 100]; let prover = TestProver { secrets: vec![] }; let res = prover.prove( &bool_true_tree, &Env::empty(), Rc::new(force_any_val::<Context>()), message.as_slice(), &HintsBag::empty(), ); assert!(res.is_ok()); assert_eq!(res.unwrap().proof, ProofBytes::Empty); } #[test] fn test_prove_false_prop() { let bool_false_tree = ErgoTree::try_from(Expr::Const(Constant { tpe: SType::SBoolean, v: Literal::Boolean(false), })) .unwrap(); let message = vec![0u8; 100]; let prover = TestProver { secrets: vec![] }; let res = prover.prove( &bool_false_tree, &Env::empty(), Rc::new(force_any_val::<Context>()), message.as_slice(), &HintsBag::empty(), ); assert!(res.is_err()); assert_eq!(res.err().unwrap(), ProverError::ReducedToFalse); } #[test] fn test_prove_pk_prop() { let secret = DlogProverInput::random(); let pk = secret.public_image(); let tree = ErgoTree::try_from(Expr::Const(pk.into())).unwrap(); let message = vec![0u8; 100]; let prover = TestProver { secrets: vec![PrivateInput::DlogProverInput(secret)], }; let res = prover.prove( &tree, &Env::empty(), Rc::new(force_any_val::<Context>()), message.as_slice(), &HintsBag::empty(), ); assert!(res.is_ok()); assert_ne!(res.unwrap().proof, ProofBytes::Empty); } #[test] fn test_prove_pk_and_pk() { let secret1 = DlogProverInput::random(); let secret2 = DlogProverInput::random(); let pk1 = secret1.public_image(); let pk2 = secret2.public_image(); let expr: Expr = SigmaAnd::new(vec![Expr::Const(pk1.into()), Expr::Const(pk2.into())]) .unwrap() .into(); let tree: ErgoTree = expr.try_into().unwrap(); let message = vec![0u8; 100]; let prover = TestProver { secrets: vec![secret1.into(), secret2.into()], }; let res = prover.prove( &tree, &Env::empty(), Rc::new(force_any_val::<Context>()), message.as_slice(), &HintsBag::empty(), ); assert_ne!(res.unwrap().proof, ProofBytes::Empty); } #[test] fn test_prove_pk_and_or() { let secret1 = DlogProverInput::random(); let secret2 = DlogProverInput::random(); let secret3 = DlogProverInput::random(); let pk1 = secret1.public_image(); let pk2 = secret2.public_image(); let pk3 = secret3.public_image(); let expr: Expr = SigmaAnd::new(vec![ Expr::Const(pk1.into()), SigmaOr::new(vec![Expr::Const(pk2.into()), Expr::Const(pk3.into())]) .unwrap() .into(), ]) .unwrap() .into(); let tree: ErgoTree = expr.try_into().unwrap(); let message = vec![0u8; 100]; let prover = TestProver { secrets: vec![secret1.into(), secret2.into()], }; let res = prover.prove( &tree, &Env::empty(), Rc::new(force_any_val::<Context>()), message.as_slice(), &HintsBag::empty(), ); assert_ne!(res.unwrap().proof, ProofBytes::Empty); } #[test] fn test_prove_pk_or_pk() { let secret1 = DlogProverInput::random(); let secret2 = DlogProverInput::random(); let pk1 = secret1.public_image(); let pk2 = secret2.public_image(); let expr: Expr = SigmaOr::new(vec![Expr::Const(pk1.into()), Expr::Const(pk2.into())]) .unwrap() .into(); let tree: ErgoTree = expr.try_into().unwrap(); let message = vec![0u8; 100]; let prover = TestProver { secrets: vec![secret1.into(), secret2.into()], }; let res = prover.prove( &tree, &Env::empty(), Rc::new(force_any_val::<Context>()), message.as_slice(), &HintsBag::empty(), ); assert_ne!(res.unwrap().proof, ProofBytes::Empty); } #[test] fn test_prove_pk_or_and() { let secret1 = DlogProverInput::random(); let secret2 = DlogProverInput::random(); let secret3 = DlogProverInput::random(); let pk1 = secret1.public_image(); let pk2 = secret2.public_image(); let pk3 = secret3.public_image(); let expr: Expr = SigmaOr::new(vec![ Expr::Const(pk1.into()), SigmaAnd::new(vec![Expr::Const(pk2.into()), Expr::Const(pk3.into())]) .unwrap() .into(), ]) .unwrap() .into(); let tree: ErgoTree = expr.try_into().unwrap(); let message = vec![0u8; 100]; let prover = TestProver { secrets: vec![secret2.into(), secret3.into()], }; let res = prover.prove( &tree, &Env::empty(), Rc::new(force_any_val::<Context>()), message.as_slice(), &HintsBag::empty(), ); assert_ne!(res.unwrap().proof, ProofBytes::Empty); } #[test] fn test_prove_dht_prop() { let secret = DhTupleProverInput::random(); let pi = secret.public_image(); let tree = ErgoTree::try_from(Expr::Const(pi.clone().into())).unwrap(); let message = vec![0u8; 100]; let prover = TestProver { secrets: vec![PrivateInput::DhTupleProverInput(secret)], }; let res = prover.prove( &tree, &Env::empty(), Rc::new(force_any_val::<Context>()), message.as_slice(), &HintsBag::empty(), ); assert!(res.is_ok()); assert_ne!(res.unwrap().proof, ProofBytes::Empty); } }
{ // The faster algorithm is as follows. Pick n-k fresh uniformly random values // q_1, ..., q_{n-k} from {0,1}^t and let q_0=e_0. // Viewing 1, 2, ..., n and q_0, ..., q_{n-k} as elements of GF(2^t), // evaluate the polynomial Q(x) = sum {q_i x^i} over GF(2^t) at points 1, 2, ..., n // to get challenges for child 1, 2, ..., n, respectively. assert!(ct.simulated); if let Some(challenge) = ct.challenge_opt.clone() { let unproven_children = cast_to_unp(ct.children.clone())?; let n = ct.children.len(); let q = gf2_192poly_from_byte_array( challenge, secure_random_bytes(SOUNDNESS_BYTES * (n - ct.k as usize)), )?; let new_children = unproven_children .enumerated() .mapped(|(idx, c)| { // Note the cast to `u8` is safe since `unproven_children` is of type // `SigmaConjectureItems<_>` which is a `BoundedVec<_, 2, 255>`. let one_based_idx = (idx + 1) as u8; let new_challenge = q.evaluate(one_based_idx).into(); c.with_challenge(new_challenge) }) .mapped(|c| c.into()); Ok(Some( ct.with_polynomial(q).with_children(new_children).into(), )) } else { Err(ProverError::Unexpected( "simulate_and_commit: missing CthresholdUnproven(simulated).challenge".to_string(), )) } }
create.component.ts
import { Component, OnInit } from '@angular/core'; import {FormControl, FormGroup, Validators} from '@angular/forms'; import {DatasetService} from '../shared/services/dataset.service'; import {AuthService} from '../shared/services/auth.service'; @Component({ selector: 'app-create', templateUrl: './create.component.html', styleUrls: ['./create.component.css'] }) export class
implements OnInit { createDataForm: FormGroup; constructor(private dataService: DatasetService, private auth: AuthService) { } ngOnInit(): void { this.createDataForm = new FormGroup({ setName: new FormControl(null, [Validators.required, Validators.max(50)]), public: new FormControl(true, [Validators.required]) }); for (let i = 1; i <= 30; i++) { this.createDataForm.addControl('q' + i.toString(), new FormControl(null, [Validators.required, Validators.minLength(3), Validators.maxLength(200)])); this.createDataForm.addControl('a' + i.toString(), new FormControl(null, [Validators.required, Validators.minLength(3), Validators.maxLength(50)])); } } saveDataForm(): void { const data = this.createDataForm.value; if (this.createDataForm.value.public === true){ data.public = '1'; } else { data.public = '0'; } data.user_id = this.auth.user.value.id; this.dataService.saveDataset(data).subscribe(res => { console.log(res); }); } }
CreateComponent
TranslationConstants.ts
export const SUPPORT_LANGUAGES = { FI: 'fi', SV: 'sv',
EN: 'en', };
schema.rs
// Generated code; do not modify #[derive(Clone, PartialEq, Debug, Deserialize, Serialize)] pub struct HttpHeadersItem { pub name: String, #[serde(default)] pub value: Option<String>, } impl HttpHeadersItem { pub fn new(name: String) -> Self { Self { name, value: None } } } pub type HttpHeaders = Vec<HttpHeadersItem>; #[derive(Clone, PartialEq, Debug, Default, Deserialize, Serialize)] #[serde(rename = "ignition-config")] pub struct IgnitionConfig { #[serde(skip_serializing_if = "Option::is_none")] pub merge: Option<Vec<Resource>>, #[serde(skip_serializing_if = "Option::is_none")] pub replace: Option<Resource>, } #[derive(Clone, PartialEq, Debug, Default, Deserialize, Serialize)] #[serde(rename = "proxy")] pub struct Proxy { #[serde(default)] #[serde(rename = "httpProxy")] pub http_proxy: Option<String>, #[serde(default)] #[serde(rename = "httpsProxy")] pub https_proxy: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "noProxy")] pub no_proxy: Option<Vec<Option<String>>>, } #[derive(Clone, PartialEq, Debug, Default, Deserialize, Serialize)] pub struct SecurityTls { #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "certificateAuthorities")] pub certificate_authorities: Option<Vec<Resource>>, } #[derive(Clone, PartialEq, Debug, Default, Deserialize, Serialize)] #[serde(rename = "security")] pub struct Security { #[serde(skip_serializing_if = "Option::is_none")] pub tls: Option<SecurityTls>, } #[derive(Clone, PartialEq, Debug, Default, Deserialize, Serialize)] #[serde(rename = "timeouts")] pub struct Timeouts { #[serde(default)] #[serde(rename = "httpResponseHeaders")] pub http_response_headers: Option<i64>, #[serde(default)] #[serde(rename = "httpTotal")] pub http_total: Option<i64>, } #[derive(Clone, PartialEq, Debug, Default, Deserialize, Serialize)] #[serde(rename = "ignition")] pub struct Ignition { #[serde(skip_serializing_if = "Option::is_none")] pub config: Option<IgnitionConfig>, #[serde(skip_serializing_if = "Option::is_none")] pub proxy: Option<Proxy>, #[serde(skip_serializing_if = "Option::is_none")] pub security: Option<Security>, #[serde(skip_serializing_if = "Option::is_none")] pub timeouts: Option<Timeouts>, #[serde(skip_serializing_if = "Option::is_none")] pub version: Option<String>, } pub type KernelArgument = String; #[derive(Clone, PartialEq, Debug, Default, Deserialize, Serialize)] #[serde(rename = "kernelArguments")] pub struct KernelArguments { #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "shouldExist")] pub should_exist: Option<Vec<KernelArgument>>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "shouldNotExist")] pub should_not_exist: Option<Vec<KernelArgument>>, } #[derive(Clone, PartialEq, Debug, Deserialize, Serialize)] #[serde(rename = "group")] pub struct Group { #[serde(default)] pub gid: Option<i64>, pub name: String, #[serde(default)] #[serde(rename = "passwordHash")] pub password_hash: Option<String>, #[serde(default)] #[serde(rename = "shouldExist")] pub should_exist: Option<bool>, #[serde(default)] pub system: Option<bool>, } impl Group { pub fn new(name: String) -> Self { Self { gid: None, name, password_hash: None, should_exist: None, system: None, } } } #[derive(Clone, PartialEq, Debug, Deserialize, Serialize)] #[serde(rename = "user")] pub struct User { #[serde(default)] pub gecos: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] pub groups: Option<Vec<String>>, #[serde(default)] #[serde(rename = "homeDir")] pub home_dir: Option<String>, pub name: String, #[serde(default)] #[serde(rename = "noCreateHome")] pub no_create_home: Option<bool>, #[serde(default)] #[serde(rename = "noLogInit")] pub no_log_init: Option<bool>, #[serde(default)] #[serde(rename = "noUserGroup")] pub no_user_group: Option<bool>, #[serde(default)] #[serde(rename = "passwordHash")] pub password_hash: Option<String>, #[serde(default)] #[serde(rename = "primaryGroup")] pub primary_group: Option<String>, #[serde(default)] pub shell: Option<String>, #[serde(default)] #[serde(rename = "shouldExist")] pub should_exist: Option<bool>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "sshAuthorizedKeys")] pub ssh_authorized_keys: Option<Vec<String>>, #[serde(default)] pub system: Option<bool>, #[serde(default)] pub uid: Option<i64>, } impl User { pub fn new(name: String) -> Self { Self { gecos: None, groups: None, home_dir: None, name, no_create_home: None, no_log_init: None, no_user_group: None, password_hash: None, primary_group: None, shell: None, should_exist: None, ssh_authorized_keys: None, system: None, uid: None, } } } #[derive(Clone, PartialEq, Debug, Default, Deserialize, Serialize)] #[serde(rename = "passwd")] pub struct Passwd { #[serde(skip_serializing_if = "Option::is_none")] pub groups: Option<Vec<Group>>, #[serde(skip_serializing_if = "Option::is_none")] pub users: Option<Vec<User>>, } #[derive(Clone, PartialEq, Debug, Default, Deserialize, Serialize)] #[serde(rename = "resource")] pub struct Resource { #[serde(default)] pub compression: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "httpHeaders")] pub http_headers: Option<HttpHeaders>, #[serde(default)] pub source: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] pub verification: Option<Verification>, } #[derive(Clone, PartialEq, Debug, Default, Deserialize, Serialize)] #[serde(rename = "clevis")] pub struct Clevis { #[serde(skip_serializing_if = "Option::is_none")] pub custom: Option<ClevisCustom>, #[serde(skip_serializing_if = "Option::is_none")] pub tang: Option<Vec<Tang>>, #[serde(default)] pub threshold: Option<i64>, #[serde(default)] #[serde(rename = "tpm2")] pub tpm2: Option<bool>, } #[derive(Clone, PartialEq, Debug, Default, Deserialize, Serialize)] #[serde(rename = "clevisCustom")] pub struct ClevisCustom { #[serde(default)] pub config: Option<String>, #[serde(default)] #[serde(rename = "needsNetwork")] pub needs_network: Option<bool>, #[serde(default)] pub pin: Option<String>, } #[derive(Clone, PartialEq, Debug, Deserialize, Serialize)] #[serde(rename = "directory")] pub struct Directory { #[serde(skip_serializing_if = "Option::is_none")] pub group: Option<NodeGroup>, #[serde(default)] pub mode: Option<i64>, #[serde(default)] pub overwrite: Option<bool>, pub path: String, #[serde(skip_serializing_if = "Option::is_none")] pub user: Option<NodeUser>, } impl Directory { pub fn new(path: String) -> Self { Self { group: None, mode: None, overwrite: None, path, user: None, } } } #[derive(Clone, PartialEq, Debug, Deserialize, Serialize)] #[serde(rename = "disk")] pub struct Disk { pub device: String, #[serde(skip_serializing_if = "Option::is_none")] pub partitions: Option<Vec<Partition>>, #[serde(default)] #[serde(rename = "wipeTable")] pub wipe_table: Option<bool>, } impl Disk { pub fn new(device: String) -> Self { Self { device, partitions: None, wipe_table: None, } } } #[derive(Clone, PartialEq, Debug, Deserialize, Serialize)] #[serde(rename = "file")] pub struct File { #[serde(skip_serializing_if = "Option::is_none")] pub append: Option<Vec<Resource>>, #[serde(skip_serializing_if = "Option::is_none")] pub contents: Option<Resource>, #[serde(skip_serializing_if = "Option::is_none")] pub group: Option<NodeGroup>, #[serde(default)] pub mode: Option<i64>, #[serde(default)] pub overwrite: Option<bool>, pub path: String, #[serde(skip_serializing_if = "Option::is_none")] pub user: Option<NodeUser>, } impl File { pub fn new(path: String) -> Self { Self { append: None, contents: None, group: None, mode: None, overwrite: None, path, user: None, } } } #[derive(Clone, PartialEq, Debug, Deserialize, Serialize)] #[serde(rename = "filesystem")] pub struct Filesystem { pub device: String, #[serde(default)] pub format: Option<String>, #[serde(default)] pub label: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "mountOptions")] pub mount_options: Option<Vec<String>>, #[serde(skip_serializing_if = "Option::is_none")] pub options: Option<Vec<String>>, #[serde(default)] pub path: Option<String>, #[serde(default)] pub uuid: Option<String>, #[serde(default)] #[serde(rename = "wipeFilesystem")] pub wipe_filesystem: Option<bool>, } impl Filesystem { pub fn new(device: String) -> Self { Self { device, format: None, label: None, mount_options: None, options: None, path: None, uuid: None, wipe_filesystem: None, } } } #[derive(Clone, PartialEq, Debug, Deserialize, Serialize)] #[serde(rename = "link")] pub struct Link { #[serde(skip_serializing_if = "Option::is_none")] pub group: Option<NodeGroup>, #[serde(default)] pub hard: Option<bool>, #[serde(default)] pub overwrite: Option<bool>, pub path: String, #[serde(default)] pub target: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] pub user: Option<NodeUser>, } impl Link { pub fn new(path: String) -> Self { Self { group: None, hard: None, overwrite: None, path, target: None, user: None, } } } #[derive(Clone, PartialEq, Debug, Deserialize, Serialize)] #[serde(rename = "luks")] pub struct Luks { #[serde(skip_serializing_if = "Option::is_none")] pub clevis: Option<Clevis>, #[serde(default)] pub device: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "keyFile")] pub key_file: Option<Resource>, #[serde(default)] pub label: Option<String>, pub name: String, #[serde(skip_serializing_if = "Option::is_none")] pub options: Option<Vec<String>>, #[serde(default)] pub uuid: Option<String>, #[serde(default)] #[serde(rename = "wipeVolume")] pub wipe_volume: Option<bool>, } impl Luks { pub fn new(name: String) -> Self { Self { clevis: None, device: None, key_file: None, label: None, name, options: None, uuid: None, wipe_volume: None, } } } #[derive(Clone, PartialEq, Debug, Default, Deserialize, Serialize)] pub struct NodeGroup { #[serde(default)] pub id: Option<i64>, #[serde(default)] pub name: Option<String>, } #[derive(Clone, PartialEq, Debug, Default, Deserialize, Serialize)] pub struct NodeUser { #[serde(default)] pub id: Option<i64>, #[serde(default)] pub name: Option<String>, } #[derive(Clone, PartialEq, Debug, Deserialize, Serialize)] #[serde(rename = "node")] pub struct Node { #[serde(skip_serializing_if = "Option::is_none")] pub group: Option<NodeGroup>, #[serde(default)] pub overwrite: Option<bool>, pub path: String, #[serde(skip_serializing_if = "Option::is_none")] pub user: Option<NodeUser>, } impl Node { pub fn new(path: String) -> Self { Self { group: None, overwrite: None, path, user: None, } } } #[derive(Clone, PartialEq, Debug, Default, Deserialize, Serialize)] #[serde(rename = "partition")] pub struct Partition { #[serde(default)] pub guid: Option<String>, #[serde(default)] pub label: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] pub number: Option<i64>, #[serde(default)] pub resize: Option<bool>, #[serde(default)] #[serde(rename = "shouldExist")] pub should_exist: Option<bool>, #[serde(default)] #[serde(rename = "sizeMiB")] pub size_mib: Option<i64>, #[serde(default)] #[serde(rename = "startMiB")] pub start_mib: Option<i64>, #[serde(default)] #[serde(rename = "typeGuid")] pub type_guid: Option<String>, #[serde(default)] #[serde(rename = "wipePartitionEntry")] pub wipe_partition_entry: Option<bool>, } #[derive(Clone, PartialEq, Debug, Deserialize, Serialize)] #[serde(rename = "raid")] pub struct Raid { #[serde(skip_serializing_if = "Option::is_none")] pub devices: Option<Vec<String>>, #[serde(default)] pub level: Option<String>, pub name: String, #[serde(skip_serializing_if = "Option::is_none")] pub options: Option<Vec<String>>, #[serde(default)] pub spares: Option<i64>, } impl Raid { pub fn new(name: String) -> Self { Self { devices: None, level: None, name, options: None, spares: None, } } } #[derive(Clone, PartialEq, Debug, Default, Deserialize, Serialize)] #[serde(rename = "tang")] pub struct Tang { #[serde(default)] pub thumbprint: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] pub url: Option<String>, } #[derive(Clone, PartialEq, Debug, Default, Deserialize, Serialize)] #[serde(rename = "storage")] pub struct Storage { #[serde(skip_serializing_if = "Option::is_none")] pub directories: Option<Vec<Directory>>, #[serde(skip_serializing_if = "Option::is_none")] pub disks: Option<Vec<Disk>>, #[serde(skip_serializing_if = "Option::is_none")] pub files: Option<Vec<File>>, #[serde(skip_serializing_if = "Option::is_none")] pub filesystems: Option<Vec<Filesystem>>, #[serde(skip_serializing_if = "Option::is_none")] pub links: Option<Vec<Link>>, #[serde(skip_serializing_if = "Option::is_none")] pub luks: Option<Vec<Luks>>, #[serde(skip_serializing_if = "Option::is_none")] pub raid: Option<Vec<Raid>>, } #[derive(Clone, PartialEq, Debug, Deserialize, Serialize)] #[serde(rename = "dropin")] pub struct Dropin { #[serde(default)] pub contents: Option<String>, pub name: String, } impl Dropin { pub fn new(name: String) -> Self { Self { contents: None, name, } } } #[derive(Clone, PartialEq, Debug, Deserialize, Serialize)] #[serde(rename = "unit")] pub struct
{ #[serde(default)] pub contents: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] pub dropins: Option<Vec<Dropin>>, #[serde(default)] pub enabled: Option<bool>, #[serde(default)] pub mask: Option<bool>, pub name: String, } impl Unit { pub fn new(name: String) -> Self { Self { contents: None, dropins: None, enabled: None, mask: None, name, } } } #[derive(Clone, PartialEq, Debug, Default, Deserialize, Serialize)] #[serde(rename = "systemd")] pub struct Systemd { #[serde(skip_serializing_if = "Option::is_none")] pub units: Option<Vec<Unit>>, } #[derive(Clone, PartialEq, Debug, Default, Deserialize, Serialize)] #[serde(rename = "verification")] pub struct Verification { #[serde(default)] pub hash: Option<String>, } #[derive(Clone, PartialEq, Debug, Deserialize, Serialize)] pub struct Config { pub ignition: Ignition, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "kernelArguments")] pub kernel_arguments: Option<KernelArguments>, #[serde(skip_serializing_if = "Option::is_none")] pub passwd: Option<Passwd>, #[serde(skip_serializing_if = "Option::is_none")] pub storage: Option<Storage>, #[serde(skip_serializing_if = "Option::is_none")] pub systemd: Option<Systemd>, } impl Config { pub fn new(ignition: Ignition) -> Self { Self { ignition, kernel_arguments: None, passwd: None, storage: None, systemd: None, } } }
Unit
structured_errors.rs
use rustc::session::Session; use syntax_pos::Span; use errors::{Applicability, DiagnosticId, DiagnosticBuilder}; use rustc::ty::{Ty, TypeFoldable}; pub trait StructuredDiagnostic<'tcx> { fn session(&self) -> &Session; fn code(&self) -> DiagnosticId; fn common(&self) -> DiagnosticBuilder<'tcx>; fn diagnostic(&self) -> DiagnosticBuilder<'tcx> { let err = self.common(); if self.session().teach(&self.code()) { self.extended(err) } else { self.regular(err) } } fn regular(&self, err: DiagnosticBuilder<'tcx>) -> DiagnosticBuilder<'tcx> { err } fn extended(&self, err: DiagnosticBuilder<'tcx>) -> DiagnosticBuilder<'tcx> { err } } pub struct VariadicError<'tcx> { sess: &'tcx Session, span: Span, t: Ty<'tcx>, cast_ty: &'tcx str, } impl<'tcx> VariadicError<'tcx> { pub fn new(sess: &'tcx Session, span: Span, t: Ty<'tcx>, cast_ty: &'tcx str) -> VariadicError<'tcx> { VariadicError { sess, span, t, cast_ty } } } impl<'tcx> StructuredDiagnostic<'tcx> for VariadicError<'tcx> { fn session(&self) -> &Session { self.sess } fn code(&self) -> DiagnosticId { __diagnostic_used!(E0617); DiagnosticId::Error("E0617".to_owned()) } fn common(&self) -> DiagnosticBuilder<'tcx> { let mut err = if self.t.references_error() { self.sess.diagnostic().struct_dummy() } else { self.sess.struct_span_fatal_with_code( self.span, &format!("can't pass `{}` to variadic function", self.t), self.code(), ) }; if let Ok(snippet) = self.sess.source_map().span_to_snippet(self.span) { err.span_suggestion( self.span, &format!("cast the value to `{}`", self.cast_ty), format!("{} as {}", snippet, self.cast_ty), Applicability::MachineApplicable, ); } else
err } fn extended(&self, mut err: DiagnosticBuilder<'tcx>) -> DiagnosticBuilder<'tcx> { err.note(&format!("certain types, like `{}`, must be cast before passing them to a \ variadic function, because of arcane ABI rules dictated by the C \ standard", self.t)); err } } pub struct SizedUnsizedCastError<'tcx> { sess: &'tcx Session, span: Span, expr_ty: Ty<'tcx>, cast_ty: String, } impl<'tcx> SizedUnsizedCastError<'tcx> { pub fn new(sess: &'tcx Session, span: Span, expr_ty: Ty<'tcx>, cast_ty: String) -> SizedUnsizedCastError<'tcx> { SizedUnsizedCastError { sess, span, expr_ty, cast_ty } } } impl<'tcx> StructuredDiagnostic<'tcx> for SizedUnsizedCastError<'tcx> { fn session(&self) -> &Session { self.sess } fn code(&self) -> DiagnosticId { __diagnostic_used!(E0607); DiagnosticId::Error("E0607".to_owned()) } fn common(&self) -> DiagnosticBuilder<'tcx> { if self.expr_ty.references_error() { self.sess.diagnostic().struct_dummy() } else { self.sess.struct_span_fatal_with_code( self.span, &format!("cannot cast thin pointer `{}` to fat pointer `{}`", self.expr_ty, self.cast_ty), self.code(), ) } } fn extended(&self, mut err: DiagnosticBuilder<'tcx>) -> DiagnosticBuilder<'tcx> { err.help( "Thin pointers are \"simple\" pointers: they are purely a reference to a memory address. Fat pointers are pointers referencing \"Dynamically Sized Types\" (also called DST). DST don't have a statically known size, therefore they can only exist behind some kind of pointers that contain additional information. Slices and trait objects are DSTs. In the case of slices, the additional information the fat pointer holds is their size. To fix this error, don't try to cast directly between thin and fat pointers. For more information about casts, take a look at The Book: https://doc.rust-lang.org/book/first-edition/casting-between-types.html"); err } }
{ err.help(&format!("cast the value to `{}`", self.cast_ty)); }
naming.go
// Copyright 2016 The LUCI Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package cfgclient import ( "go.chromium.org/gae/service/info" "go.chromium.org/luci/config" "golang.org/x/net/context" ) // ProjectConfigPath is the path of a project's project-wide configuration file. const ProjectConfigPath = "project.cfg" // CurrentServiceName returns the current service name, as used to identify it // in configurations. This is based on the current App ID. func CurrentServiceName(c context.Context) string
// CurrentServiceConfigSet returns the config set for the current AppEngine // service, based on its current service name. func CurrentServiceConfigSet(c context.Context) config.Set { return config.ServiceSet(CurrentServiceName(c)) }
{ return info.TrimmedAppID(c) }
GridSegmentDirection.py
class GridSegmentDirection(Enum,IComparable,IFormattable,IConvertible): """ Specify one of the four adjacent segments to a GridNode. See Autodesk.Revit.DB.DividedSurface. enum GridSegmentDirection,values: NegativeU (1),NegativeV (3),PositiveU (0),PositiveV (2) """ def __eq__(self,*args): """ x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """ pass def __format__(self,*args): """ __format__(formattable: IFormattable,format: str) -> str """ pass def __ge__(self,*args): pass def __gt__(self,*args): pass def __init__(self,*args): """ x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """ pass def __le__(self,*args): pass def __lt__(self,*args): pass def __ne__(self,*args): pass def __reduce_ex__(self,*args): pass def
(self,*args): pass NegativeU=None NegativeV=None PositiveU=None PositiveV=None value__=None
__str__
start_simulator.py
from multiprocessing import freeze_support from pathlib import Path from typing import Dict from deafwave.full_node.full_node import FullNode from deafwave.rpc.full_node_rpc_api import FullNodeRpcApi from deafwave.server.outbound_message import NodeType from deafwave.server.start_service import run_service from deafwave.util.block_tools import BlockTools, test_constants from deafwave.util.config import load_config_cli from deafwave.util.default_root import DEFAULT_ROOT_PATH from deafwave.util.path import mkdir, path_from_root from .full_node_simulator import FullNodeSimulator # See: https://bugs.python.org/issue29288 "".encode("idna") SERVICE_NAME = "full_node" def service_kwargs_for_full_node_simulator(root_path: Path, config: Dict, bt: BlockTools) -> Dict:
def main() -> None: config = load_config_cli(DEFAULT_ROOT_PATH, "config.yaml", SERVICE_NAME) config["database_path"] = config["simulator_database_path"] config["peer_db_path"] = config["simulator_peer_db_path"] config["introducer_peer"]["host"] = "127.0.0.1" config["introducer_peer"]["port"] = 58735 config["selected_network"] = "testnet0" config["simulation"] = True kwargs = service_kwargs_for_full_node_simulator( DEFAULT_ROOT_PATH, config, BlockTools(test_constants), ) return run_service(**kwargs) if __name__ == "__main__": freeze_support() main()
mkdir(path_from_root(root_path, config["database_path"]).parent) constants = bt.constants node = FullNode( config, root_path=root_path, consensus_constants=constants, name=SERVICE_NAME, ) peer_api = FullNodeSimulator(node, bt) network_id = config["selected_network"] kwargs = dict( root_path=root_path, node=node, peer_api=peer_api, node_type=NodeType.FULL_NODE, advertised_port=config["port"], service_name=SERVICE_NAME, server_listen_ports=[config["port"]], on_connect_callback=node.on_connect, rpc_info=(FullNodeRpcApi, config["rpc_port"]), network_id=network_id, ) return kwargs
core_3964_test.py
#coding:utf-8 # # id: bugs.core_3964 # title: It is not possible to create a ddl-trigger with "any DDL statement" clause # decription: # tracker_id: CORE-3964 # min_versions: ['3.0'] # versions: 3.0 # qmid: None import pytest from firebird.qa import db_factory, isql_act, Action # version: 3.0 # resources: None substitutions_1 = [] init_script_1 = """""" db_1 = db_factory(page_size=4096, sql_dialect=3, init=init_script_1) test_script_1 = """ create table mp$modified_tables (relation_name char(31)); commit; create index mp$modified_tables_idx on mp$modified_tables (relation_name); commit; set term ^; create trigger taa_sql1 active after any ddl statement position 0 as begin if ( rdb$get_context('DDL_TRIGGER', 'OBJECT_TYPE') = 'TABLE' and ( rdb$get_context('DDL_TRIGGER', 'EVENT_TYPE') in ('CREATE', 'DROP') or rdb$get_context('DDL_TRIGGER', 'SQL_SOURCE') containing 'FIELD' ) ) then insert into mp$modified_tables (relation_name) values (rdb$get_context('DDL_TRIGGER', 'OBJECT_NAME')); end ^ set term ;^ commit; """ act_1 = isql_act('db_1', test_script_1, substitutions=substitutions_1) @pytest.mark.version('>=3.0') def test_1(act_1: Action):
act_1.execute()
testing.js
/** * @license Angular v9.1.6 * (c) 2010-2020 Google LLC. https://angular.io/ * License: MIT */ import { Injectable, Inject, ɵstringify, NgModule, Directive, Component, Pipe, createPlatformFactory, COMPILER_OPTIONS, Injector, CompilerFactory } from '@angular/core'; import { TestComponentRenderer, ɵMetadataOverrider, ɵTestingCompilerFactory } from '@angular/core/testing'; import { ɵplatformCoreDynamic, ɵINTERNAL_BROWSER_DYNAMIC_PLATFORM_PROVIDERS } from '@angular/platform-browser-dynamic'; import { BrowserTestingModule } from '@angular/platform-browser/testing'; import { ɵgetDOM, DOCUMENT } from '@angular/common'; import { CompileReflector, PipeResolver, DirectiveResolver, NgModuleResolver, ERROR_COMPONENT_TYPE } from '@angular/compiler'; import { MockPipeResolver, MockDirectiveResolver, MockNgModuleResolver } from '@angular/compiler/testing'; /** * @fileoverview added by tsickle * Generated from: packages/platform-browser-dynamic/testing/src/dom_test_component_renderer.ts * @suppress {checkTypes,constantProperty,extraRequire,missingOverride,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc */ /** * A DOM based implementation of the TestComponentRenderer. */ import * as ɵngcc0 from '@angular/core'; class DOMTestComponentRenderer extends TestComponentRenderer { /** * @param {?} _doc */ constructor(_doc) { super(); this._doc = _doc; } /** * @param {?} rootElId * @return {?} */ insertRootElement(rootElId) { /** @type {?} */ const template = ɵgetDOM().getDefaultDocument().createElement('template'); template.innerHTML = `<div id="${rootElId}"></div>`; /** @type {?} */ const rootEl = (/** @type {?} */ (getContent(template).firstChild)); // TODO(juliemr): can/should this be optional? /** @type {?} */ const oldRoots = this._doc.querySelectorAll('[id^=root]'); for (let i = 0; i < oldRoots.length; i++) { ɵgetDOM().remove(oldRoots[i]); } this._doc.body.appendChild(rootEl); } } DOMTestComponentRenderer.ɵfac = function DOMTestComponentRenderer_Factory(t) { return new (t || DOMTestComponentRenderer)(ɵngcc0.ɵɵinject(DOCUMENT)); }; DOMTestComponentRenderer.ɵprov = ɵngcc0.ɵɵdefineInjectable({ token: DOMTestComponentRenderer, factory: DOMTestComponentRenderer.ɵfac }); /** @nocollapse */ DOMTestComponentRenderer.ctorParameters = () => [ { type: undefined, decorators: [{ type: Inject, args: [DOCUMENT,] }] } ]; /*@__PURE__*/ (function () { ɵngcc0.ɵsetClassMetadata(DOMTestComponentRenderer, [{ type: Injectable }], function () { return [{ type: undefined, decorators: [{ type: Inject, args: [DOCUMENT] }] }]; }, null); })(); if (false) { /** * @type {?} * @private */ DOMTestComponentRenderer.prototype._doc; } /** * @param {?} node * @return {?} */ function getContent(node) { if ('content' in node) { return ((/** @type {?} */ (node))).content; } else { return node; } } /** * @fileoverview added by tsickle * Generated from: packages/platform-browser-dynamic/testing/src/metadata_overrider.ts * @suppress {checkTypes,constantProperty,extraRequire,missingOverride,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc */ /** * @fileoverview added by tsickle * Generated from: packages/platform-browser-dynamic/testing/src/compiler_factory.ts * @suppress {checkTypes,constantProperty,extraRequire,missingOverride,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc */ /** @type {?} */ const COMPILER_PROVIDERS = [ { provide: MockPipeResolver, deps: [CompileReflector] }, { provide: PipeResolver, useExisting: MockPipeResolver }, { provide: MockDirectiveResolver, deps: [CompileReflector] }, { provide: DirectiveResolver, useExisting: MockDirectiveResolver }, { provide: MockNgModuleResolver, deps: [CompileReflector] }, { provide: NgModuleResolver, useExisting: MockNgModuleResolver }, ]; class TestingCompilerFacto
aram {?} _injector * @param {?} _compilerFactory */ constructor(_injector, _compilerFactory) { this._injector = _injector; this._compilerFactory = _compilerFactory; } /** * @param {?} options * @return {?} */ createTestingCompiler(options) { /** @type {?} */ const compiler = (/** @type {?} */ (this._compilerFactory.createCompiler(options))); return new TestingCompilerImpl(compiler, compiler.injector.get(MockDirectiveResolver), compiler.injector.get(MockPipeResolver), compiler.injector.get(MockNgModuleResolver)); } } if (false) { /** * @type {?} * @private */ TestingCompilerFactoryImpl.prototype._injector; /** * @type {?} * @private */ TestingCompilerFactoryImpl.prototype._compilerFactory; } class TestingCompilerImpl { /** * @param {?} _compiler * @param {?} _directiveResolver * @param {?} _pipeResolver * @param {?} _moduleResolver */ constructor(_compiler, _directiveResolver, _pipeResolver, _moduleResolver) { this._compiler = _compiler; this._directiveResolver = _directiveResolver; this._pipeResolver = _pipeResolver; this._moduleResolver = _moduleResolver; this._overrider = new ɵMetadataOverrider(); } /** * @return {?} */ get injector() { return this._compiler.injector; } /** * @template T * @param {?} moduleType * @return {?} */ compileModuleSync(moduleType) { return this._compiler.compileModuleSync(moduleType); } /** * @template T * @param {?} moduleType * @return {?} */ compileModuleAsync(moduleType) { return this._compiler.compileModuleAsync(moduleType); } /** * @template T * @param {?} moduleType * @return {?} */ compileModuleAndAllComponentsSync(moduleType) { return this._compiler.compileModuleAndAllComponentsSync(moduleType); } /** * @template T * @param {?} moduleType * @return {?} */ compileModuleAndAllComponentsAsync(moduleType) { return this._compiler.compileModuleAndAllComponentsAsync(moduleType); } /** * @template T * @param {?} component * @return {?} */ getComponentFactory(component) { return this._compiler.getComponentFactory(component); } /** * @param {?} type * @return {?} */ checkOverrideAllowed(type) { if (this._compiler.hasAotSummary(type)) { throw new Error(`${ɵstringify(type)} was AOT compiled, so its metadata cannot be changed.`); } } /** * @param {?} ngModule * @param {?} override * @return {?} */ overrideModule(ngModule, override) { this.checkOverrideAllowed(ngModule); /** @type {?} */ const oldMetadata = this._moduleResolver.resolve(ngModule, false); this._moduleResolver.setNgModule(ngModule, this._overrider.overrideMetadata(NgModule, oldMetadata, override)); this.clearCacheFor(ngModule); } /** * @param {?} directive * @param {?} override * @return {?} */ overrideDirective(directive, override) { this.checkOverrideAllowed(directive); /** @type {?} */ const oldMetadata = this._directiveResolver.resolve(directive, false); this._directiveResolver.setDirective(directive, this._overrider.overrideMetadata(Directive, (/** @type {?} */ (oldMetadata)), override)); this.clearCacheFor(directive); } /** * @param {?} component * @param {?} override * @return {?} */ overrideComponent(component, override) { this.checkOverrideAllowed(component); /** @type {?} */ const oldMetadata = this._directiveResolver.resolve(component, false); this._directiveResolver.setDirective(component, this._overrider.overrideMetadata(Component, (/** @type {?} */ (oldMetadata)), override)); this.clearCacheFor(component); } /** * @param {?} pipe * @param {?} override * @return {?} */ overridePipe(pipe, override) { this.checkOverrideAllowed(pipe); /** @type {?} */ const oldMetadata = this._pipeResolver.resolve(pipe, false); this._pipeResolver.setPipe(pipe, this._overrider.overrideMetadata(Pipe, oldMetadata, override)); this.clearCacheFor(pipe); } /** * @param {?} summaries * @return {?} */ loadAotSummaries(summaries) { this._compiler.loadAotSummaries(summaries); } /** * @return {?} */ clearCache() { this._compiler.clearCache(); } /** * @param {?} type * @return {?} */ clearCacheFor(type) { this._compiler.clearCacheFor(type); } /** * @param {?} error * @return {?} */ getComponentFromError(error) { return ((/** @type {?} */ (error)))[ERROR_COMPONENT_TYPE] || null; } /** * @param {?} moduleType * @return {?} */ getModuleId(moduleType) { return this._moduleResolver.resolve(moduleType, true).id; } } if (false) { /** * @type {?} * @private */ TestingCompilerImpl.prototype._overrider; /** * @type {?} * @private */ TestingCompilerImpl.prototype._compiler; /** * @type {?} * @private */ TestingCompilerImpl.prototype._directiveResolver; /** * @type {?} * @private */ TestingCompilerImpl.prototype._pipeResolver; /** * @type {?} * @private */ TestingCompilerImpl.prototype._moduleResolver; } /** * @fileoverview added by tsickle * Generated from: packages/platform-browser-dynamic/testing/src/platform_core_dynamic_testing.ts * @suppress {checkTypes,constantProperty,extraRequire,missingOverride,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc */ const ɵ0 = { providers: COMPILER_PROVIDERS }; /** * Platform for dynamic tests * * \@publicApi * @type {?} */ const platformCoreDynamicTesting = createPlatformFactory(ɵplatformCoreDynamic, 'coreDynamicTesting', [ { provide: COMPILER_OPTIONS, useValue: ɵ0, multi: true }, { provide: ɵTestingCompilerFactory, useClass: TestingCompilerFactoryImpl, deps: [Injector, CompilerFactory] } ]); /** * @fileoverview added by tsickle * Generated from: packages/platform-browser-dynamic/testing/src/private_export_testing.ts * @suppress {checkTypes,constantProperty,extraRequire,missingOverride,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc */ /** * @fileoverview added by tsickle * Generated from: packages/platform-browser-dynamic/testing/src/testing.ts * @suppress {checkTypes,constantProperty,extraRequire,missingOverride,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc */ /** * \@publicApi * @type {?} */ const platformBrowserDynamicTesting = createPlatformFactory(platformCoreDynamicTesting, 'browserDynamicTesting', ɵINTERNAL_BROWSER_DYNAMIC_PLATFORM_PROVIDERS); /** * NgModule for testing. * * \@publicApi */ class BrowserDynamicTestingModule { } BrowserDynamicTestingModule.ɵmod = ɵngcc0.ɵɵdefineNgModule({ type: BrowserDynamicTestingModule }); BrowserDynamicTestingModule.ɵinj = ɵngcc0.ɵɵdefineInjector({ factory: function BrowserDynamicTestingModule_Factory(t) { return new (t || BrowserDynamicTestingModule)(); }, providers: [ { provide: TestComponentRenderer, useClass: DOMTestComponentRenderer }, ], imports: [BrowserTestingModule] }); (function () { (typeof ngJitMode === "undefined" || ngJitMode) && ɵngcc0.ɵɵsetNgModuleScope(BrowserDynamicTestingModule, { exports: function () { return [BrowserTestingModule]; } }); })(); /*@__PURE__*/ (function () { ɵngcc0.ɵsetClassMetadata(BrowserDynamicTestingModule, [{ type: NgModule, args: [{ exports: [BrowserTestingModule], providers: [ { provide: TestComponentRenderer, useClass: DOMTestComponentRenderer }, ] }] }], null, null); })(); /** * @fileoverview added by tsickle * Generated from: packages/platform-browser-dynamic/testing/public_api.ts * @suppress {checkTypes,constantProperty,extraRequire,missingOverride,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc */ /** * @fileoverview added by tsickle * Generated from: packages/platform-browser-dynamic/testing/index.ts * @suppress {checkTypes,constantProperty,extraRequire,missingOverride,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc */ /** * Generated bundle index. Do not edit. */ export { BrowserDynamicTestingModule, platformBrowserDynamicTesting, DOMTestComponentRenderer as ɵDOMTestComponentRenderer, COMPILER_PROVIDERS as ɵangular_packages_platform_browser_dynamic_testing_testing_a, TestingCompilerFactoryImpl as ɵangular_packages_platform_browser_dynamic_testing_testing_b, platformCoreDynamicTesting as ɵplatformCoreDynamicTesting }; //# sourceMappingURL=testing.js.map
ryImpl { /** * @p
tui-code-snippet.js
/*! * tui-code-snippet.js * @version 1.3.0 * @author NHNEnt FE Development Lab <[email protected]> * @license MIT */ (function webpackUniversalModuleDefinition(root, factory) { if(typeof exports === 'object' && typeof module === 'object') module.exports = factory(); else if(typeof define === 'function' && define.amd) define([], factory); else if(typeof exports === 'object') exports["util"] = factory(); else root["tui"] = root["tui"] || {}, root["tui"]["util"] = factory(); })(this, function() { return /******/ (function(modules) { // webpackBootstrap /******/ // The module cache /******/ var installedModules = {}; /******/ // The require function /******/ function __webpack_require__(moduleId) { /******/ // Check if module is in cache /******/ if(installedModules[moduleId]) /******/ return installedModules[moduleId].exports; /******/ // Create a new module (and put it into the cache) /******/ var module = installedModules[moduleId] = { /******/ exports: {}, /******/ id: moduleId, /******/ loaded: false /******/ }; /******/ // Execute the module function /******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); /******/ // Flag the module as loaded /******/ module.loaded = true; /******/ // Return the exports of the module /******/ return module.exports; /******/ } /******/ // expose the modules object (__webpack_modules__) /******/ __webpack_require__.m = modules; /******/ // expose the module cache /******/ __webpack_require__.c = installedModules; /******/ // __webpack_public_path__ /******/ __webpack_require__.p = "dist"; /******/ // Load entry module and return exports /******/ return __webpack_require__(0); /******/ }) /************************************************************************/ /******/ ([ /* 0 */ /***/ (function(module, exports, __webpack_require__) { 'use strict'; /** * @fileoverview * @author NHN Ent. * FE Development Lab <[email protected]> * @namespace tui.util * @example * // node, commonjs * var util = require('tui-code-snippet'); * @example * // distribution file, script * <script src='path-to/tui-code-snippt.js'></script> * <script> * var util = tui.util; * <script> */ var util = {}; var object = __webpack_require__(1); var extend = object.extend; extend(util, object); extend(util, __webpack_require__(3)); extend(util, __webpack_require__(2)); extend(util, __webpack_require__(4)); extend(util, __webpack_require__(5)); extend(util, __webpack_require__(6)); extend(util, __webpack_require__(7)); extend(util, __webpack_require__(8)); extend(util, __webpack_require__(9)); util.browser = __webpack_require__(10); util.popup = __webpack_require__(11); util.formatDate = __webpack_require__(12); util.defineClass = __webpack_require__(13); util.defineModule = __webpack_require__(14); util.defineNamespace = __webpack_require__(15); util.CustomEvents = __webpack_require__(16); util.Enum = __webpack_require__(17); util.ExMap = __webpack_require__(18); util.HashMap = __webpack_require__(20); util.Map = __webpack_require__(19); module.exports = util; /***/ }), /* 1 */ /***/ (function(module, exports, __webpack_require__) { /** * @fileoverview This module has some functions for handling a plain object, json. * @author NHN Ent. * FE Development Lab <[email protected]> */ 'use strict'; var type = __webpack_require__(2); var array = __webpack_require__(3); /** * The last id of stamp * @type {number} * @private */ var lastId = 0; /** * Extend the target object from other objects. * @param {object} target - Object that will be extended * @param {...object} objects - Objects as sources * @returns {object} Extended object * @memberof tui.util */ function extend(target, objects) { // eslint-disable-line no-unused-vars var hasOwnProp = Object.prototype.hasOwnProperty; var source, prop, i, len; for (i = 1, len = arguments.length; i < len; i += 1) { source = arguments[i]; for (prop in source) { if (hasOwnProp.call(source, prop)) { target[prop] = source[prop]; } } } return target; } /** * Assign a unique id to an object * @param {object} obj - Object that will be assigned id. * @returns {number} Stamped id * @memberof tui.util */ function stamp(obj) { if (!obj.__fe_id) { lastId += 1; obj.__fe_id = lastId; // eslint-disable-line camelcase } return obj.__fe_id; } /** * Verify whether an object has a stamped id or not. * @param {object} obj - adjusted object * @returns {boolean} * @memberof tui.util */ function hasStamp(obj) { return type.isExisty(pick(obj, '__fe_id')); } /** * Reset the last id of stamp * @private */ function resetLastId() { lastId = 0; } /** * Return a key-list(array) of a given object * @param {object} obj - Object from which a key-list will be extracted * @returns {Array} A key-list(array) * @memberof tui.util */ function keys(obj) { var keyArray = []; var key; for (key in obj) { if (obj.hasOwnProperty(key)) { keyArray.push(key); } } return keyArray; } /** * Return the equality for multiple objects(jsonObjects).<br> * See {@link http://stackoverflow.com/questions/1068834/object-comparison-in-javascript} * @param {...object} object - Multiple objects for comparing. * @returns {boolean} Equality * @memberof tui.util * @example * //-- #1. Get Module --// * var util = require('tui-code-snippet'); // node, commonjs * var util = tui.util; // distribution file * * //-- #2. Use property --// * var jsonObj1 = {name:'milk', price: 1000}; * var jsonObj2 = {name:'milk', price: 1000}; * var jsonObj3 = {name:'milk', price: 1000}; * util.compareJSON(jsonObj1, jsonObj2, jsonObj3); // true * * var jsonObj4 = {name:'milk', price: 1000}; * var jsonObj5 = {name:'beer', price: 3000}; * util.compareJSON(jsonObj4, jsonObj5); // false */ function compareJSON(object) { var argsLen = arguments.length; var i = 1; if (argsLen < 1) { return true; } for (; i < argsLen; i += 1) { if (!isSameObject(object, arguments[i])) { return false; } } return true; } /** * @param {*} x - object to compare * @param {*} y - object to compare * @returns {boolean} - whether object x and y is same or not * @private */ function isSameObject(x, y) { // eslint-disable-line complexity var leftChain = []; var rightChain = []; var p; // remember that NaN === NaN returns false // and isNaN(undefined) returns true if (isNaN(x) && isNaN(y) && type.isNumber(x) && type.isNumber(y)) { return true; } // Compare primitives and functions. // Check if both arguments link to the same object. // Especially useful on step when comparing prototypes if (x === y) { return true; } // Works in case when functions are created in constructor. // Comparing dates is a common scenario. Another built-ins? // We can even handle functions passed across iframes if ((type.isFunction(x) && type.isFunction(y)) || (x instanceof Date && y instanceof Date) || (x instanceof RegExp && y instanceof RegExp) || (x instanceof String && y instanceof String) || (x instanceof Number && y instanceof Number)) { return x.toString() === y.toString(); } // At last checking prototypes as good a we can if (!(x instanceof Object && y instanceof Object)) { return false; } if (x.isPrototypeOf(y) || y.isPrototypeOf(x) || x.constructor !== y.constructor || x.prototype !== y.prototype) { return false; } // check for infinitive linking loops if (array.inArray(x, leftChain) > -1 || array.inArray(y, rightChain) > -1) { return false; } // Quick checking of one object beeing a subset of another. for (p in y) { if (y.hasOwnProperty(p) !== x.hasOwnProperty(p)) { return false; } else if (typeof y[p] !== typeof x[p]) { return false; } } // This for loop executes comparing with hasOwnProperty() and typeof for each property in 'x' object, // and verifying equality for x[property] and y[property]. for (p in x) { if (y.hasOwnProperty(p) !== x.hasOwnProperty(p)) { return false; } else if (typeof y[p] !== typeof x[p]) { return false; } if (typeof (x[p]) === 'object' || typeof (x[p]) === 'function') { leftChain.push(x); rightChain.push(y); if (!isSameObject(x[p], y[p])) { return false; } leftChain.pop(); rightChain.pop(); } else if (x[p] !== y[p]) { return false; } } return true; } /* eslint-enable complexity */ /** * Retrieve a nested item from the given object/array * @param {object|Array} obj - Object for retrieving * @param {...string|number} paths - Paths of property * @returns {*} Value * @memberof tui.util * @example * //-- #1. Get Module --// * var util = require('tui-code-snippet'); // node, commonjs * var util = tui.util; // distribution file * * //-- #2. Use property --// * var obj = { * 'key1': 1, * 'nested' : { * 'key1': 11, * 'nested': { * 'key1': 21 * } * } * }; * util.pick(obj, 'nested', 'nested', 'key1'); // 21 * util.pick(obj, 'nested', 'nested', 'key2'); // undefined * * var arr = ['a', 'b', 'c']; * util.pick(arr, 1); // 'b' */ function pick(obj, paths) { // eslint-disable-line no-unused-vars var args = arguments; var target = args[0]; var i = 1; var length = args.length; for (; i < length; i += 1) { if (type.isUndefined(target) || type.isNull(target)) { return; } target = target[args[i]]; } return target; // eslint-disable-line consistent-return } module.exports = { extend: extend, stamp: stamp, hasStamp: hasStamp, resetLastId: resetLastId, keys: Object.prototype.keys || keys, compareJSON: compareJSON, pick: pick }; /***/ }), /* 2 */ /***/ (function(module, exports) { /** * @fileoverview This module provides some functions to check the type of variable * @author NHN Ent. * FE Development Lab <[email protected]> */ 'use strict'; var toString = Object.prototype.toString; /** * Check whether the given variable is existing or not.<br> * If the given variable is not null and not undefined, returns true. * @param {*} param - Target for checking * @returns {boolean} Is existy? * @memberof tui.util * @example * //-- #1. Get Module --// * var util = require('tui-code-snippet'); // node, commonjs * var util = tui.util; // distribution file * * //-- #2. Use property --// * util.isExisty(''); //true * util.isExisty(0); //true * util.isExisty([]); //true * util.isExisty({}); //true * util.isExisty(null); //false * util.isExisty(undefined); //false */ function isExisty(param) { return !isUndefined(param) && !isNull(param); } /** * Check whether the given variable is undefined or not.<br> * If the given variable is undefined, returns true. * @param {*} obj - Target for checking * @returns {boolean} Is undefined? * @memberof tui.util */ function isUndefined(obj) { return obj === undefined; // eslint-disable-line no-undefined } /** * Check whether the given variable is null or not.<br> * If the given variable(arguments[0]) is null, returns true. * @param {*} obj - Target for checking * @returns {boolean} Is null? * @memberof tui.util */ function isNull(obj) { return obj === null; } /** * Check whether the given variable is truthy or not.<br> * If the given variable is not null or not undefined or not false, returns true.<br> * (It regards 0 as true) * @param {*} obj - Target for checking * @returns {boolean} Is truthy? * @memberof tui.util */ function isTruthy(obj) { return isExisty(obj) && obj !== false; } /** * Check whether the given variable is falsy or not.<br> * If the given variable is null or undefined or false, returns true. * @param {*} obj - Target for checking * @returns {boolean} Is falsy? * @memberof tui.util */ function isFalsy(obj) { return !isTruthy(obj); } /** * Check whether the given variable is an arguments object or not.<br> * If the given variable is an arguments object, return true. * @param {*} obj - Target for checking * @returns {boolean} Is arguments? * @memberof tui.util */ function isArguments(obj) { var result = isExisty(obj) && ((toString.call(obj) === '[object Arguments]') || !!obj.callee); return result; } /** * Check whether the given variable is an instance of Array or not.<br> * If the given variable is an instance of Array, return true. * @param {*} obj - Target for checking * @returns {boolean} Is array instance? * @memberof tui.util */ function isArray(obj) { return obj instanceof Array; } /** * Check whether the given variable is an object or not.<br> * If the given variable is an object, return true. * @param {*} obj - Target for checking * @returns {boolean} Is object? * @memberof tui.util */ function isObject(obj) { return obj === Object(obj); } /** * Check whether the given variable is a function or not.<br> * If the given variable is a function, return true. * @param {*} obj - Target for checking * @returns {boolean} Is function? * @memberof tui.util */ function isFunction(obj) { return obj instanceof Function; } /** * Check whether the given variable is a number or not.<br> * If the given variable is a number, return true. * @param {*} obj - Target for checking * @returns {boolean} Is number? * @memberof tui.util */ function isNumber(obj) { return typeof obj === 'number' || obj instanceof Number; } /** * Check whether the given variable is a string or not.<br> * If the given variable is a string, return true. * @param {*} obj - Target for checking * @returns {boolean} Is string? * @memberof tui.util */ function isString(obj) { return typeof obj === 'string' || obj instanceof String; } /** * Check whether the given variable is a boolean or not.<br> * If the given variable is a boolean, return true. * @param {*} obj - Target for checking * @returns {boolean} Is boolean? * @memberof tui.util */ function
(obj) { return typeof obj === 'boolean' || obj instanceof Boolean; } /** * Check whether the given variable is an instance of Array or not.<br> * If the given variable is an instance of Array, return true.<br> * (It is used for multiple frame environments) * @param {*} obj - Target for checking * @returns {boolean} Is an instance of array? * @memberof tui.util */ function isArraySafe(obj) { return toString.call(obj) === '[object Array]'; } /** * Check whether the given variable is a function or not.<br> * If the given variable is a function, return true.<br> * (It is used for multiple frame environments) * @param {*} obj - Target for checking * @returns {boolean} Is a function? * @memberof tui.util */ function isFunctionSafe(obj) { return toString.call(obj) === '[object Function]'; } /** * Check whether the given variable is a number or not.<br> * If the given variable is a number, return true.<br> * (It is used for multiple frame environments) * @param {*} obj - Target for checking * @returns {boolean} Is a number? * @memberof tui.util */ function isNumberSafe(obj) { return toString.call(obj) === '[object Number]'; } /** * Check whether the given variable is a string or not.<br> * If the given variable is a string, return true.<br> * (It is used for multiple frame environments) * @param {*} obj - Target for checking * @returns {boolean} Is a string? * @memberof tui.util */ function isStringSafe(obj) { return toString.call(obj) === '[object String]'; } /** * Check whether the given variable is a boolean or not.<br> * If the given variable is a boolean, return true.<br> * (It is used for multiple frame environments) * @param {*} obj - Target for checking * @returns {boolean} Is a boolean? * @memberof tui.util */ function isBooleanSafe(obj) { return toString.call(obj) === '[object Boolean]'; } /** * Check whether the given variable is a instance of HTMLNode or not.<br> * If the given variables is a instance of HTMLNode, return true. * @param {*} html - Target for checking * @returns {boolean} Is HTMLNode ? * @memberof tui.util */ function isHTMLNode(html) { if (typeof HTMLElement === 'object') { return (html && (html instanceof HTMLElement || !!html.nodeType)); } return !!(html && html.nodeType); } /** * Check whether the given variable is a HTML tag or not.<br> * If the given variables is a HTML tag, return true. * @param {*} html - Target for checking * @returns {Boolean} Is HTML tag? * @memberof tui.util */ function isHTMLTag(html) { if (typeof HTMLElement === 'object') { return (html && (html instanceof HTMLElement)); } return !!(html && html.nodeType && html.nodeType === 1); } /** * Check whether the given variable is empty(null, undefined, or empty array, empty object) or not.<br> * If the given variables is empty, return true. * @param {*} obj - Target for checking * @returns {boolean} Is empty? * @memberof tui.util */ function isEmpty(obj) { if (!isExisty(obj) || _isEmptyString(obj)) { return true; } if (isArray(obj) || isArguments(obj)) { return obj.length === 0; } if (isObject(obj) && !isFunction(obj)) { return !_hasOwnProperty(obj); } return true; } /** * Check whether given argument is empty string * @param {*} obj - Target for checking * @returns {boolean} whether given argument is empty string * @memberof tui.util * @private */ function _isEmptyString(obj) { return isString(obj) && obj === ''; } /** * Check whether given argument has own property * @param {Object} obj - Target for checking * @returns {boolean} - whether given argument has own property * @memberof tui.util * @private */ function _hasOwnProperty(obj) { var key; for (key in obj) { if (obj.hasOwnProperty(key)) { return true; } } return false; } /** * Check whether the given variable is not empty * (not null, not undefined, or not empty array, not empty object) or not.<br> * If the given variables is not empty, return true. * @param {*} obj - Target for checking * @returns {boolean} Is not empty? * @memberof tui.util */ function isNotEmpty(obj) { return !isEmpty(obj); } /** * Check whether the given variable is an instance of Date or not.<br> * If the given variables is an instance of Date, return true. * @param {*} obj - Target for checking * @returns {boolean} Is an instance of Date? * @memberof tui.util */ function isDate(obj) { return obj instanceof Date; } /** * Check whether the given variable is an instance of Date or not.<br> * If the given variables is an instance of Date, return true.<br> * (It is used for multiple frame environments) * @param {*} obj - Target for checking * @returns {boolean} Is an instance of Date? * @memberof tui.util */ function isDateSafe(obj) { return toString.call(obj) === '[object Date]'; } module.exports = { isExisty: isExisty, isUndefined: isUndefined, isNull: isNull, isTruthy: isTruthy, isFalsy: isFalsy, isArguments: isArguments, isArray: isArray, isArraySafe: isArraySafe, isObject: isObject, isFunction: isFunction, isFunctionSafe: isFunctionSafe, isNumber: isNumber, isNumberSafe: isNumberSafe, isDate: isDate, isDateSafe: isDateSafe, isString: isString, isStringSafe: isStringSafe, isBoolean: isBoolean, isBooleanSafe: isBooleanSafe, isHTMLNode: isHTMLNode, isHTMLTag: isHTMLTag, isEmpty: isEmpty, isNotEmpty: isNotEmpty }; /***/ }), /* 3 */ /***/ (function(module, exports, __webpack_require__) { /** * @fileoverview This module has some functions for handling array. * @author NHN Ent. * FE Development Lab <[email protected]> */ 'use strict'; var collection = __webpack_require__(4); var type = __webpack_require__(2); var aps = Array.prototype.slice; var util; /** * Generate an integer Array containing an arithmetic progression. * @param {number} start - start index * @param {number} stop - stop index * @param {number} step - next visit index = current index + step * @returns {Array} * @memberof tui.util * @example * //-- #1. Get Module --// * var util = require('tui-code-snippet'); // node, commonjs * var util = tui.util; // distribution file * * //-- #2. Use property --// * util.range(5); // [0, 1, 2, 3, 4] * util.range(1, 5); // [1,2,3,4] * util.range(2, 10, 2); // [2,4,6,8] * util.range(10, 2, -2); // [10,8,6,4] */ var range = function(start, stop, step) { var arr = []; var flag; if (type.isUndefined(stop)) { stop = start || 0; start = 0; } step = step || 1; flag = step < 0 ? -1 : 1; stop *= flag; for (; start * flag < stop; start += step) { arr.push(start); } return arr; }; /* eslint-disable valid-jsdoc */ /** * Zip together multiple lists into a single array * @param {...Array} * @returns {Array} * @memberof tui.util * @example * //-- #1. Get Module --// * var util = require('tui-code-snippet'); // node, commonjs * var util = tui.util; // distribution file * * //-- #2. Use property --// * var result = util.zip([1, 2, 3], ['a', 'b','c'], [true, false, true]); * console.log(result[0]); // [1, 'a', true] * console.log(result[1]); // [2, 'b', false] * console.log(result[2]); // [3, 'c', true] */ var zip = function() {/* eslint-enable valid-jsdoc */ var arr2d = aps.call(arguments); var result = []; collection.forEach(arr2d, function(arr) { collection.forEach(arr, function(value, index) { if (!result[index]) { result[index] = []; } result[index].push(value); }); }); return result; }; /** * Returns the first index at which a given element can be found in the array * from start index(default 0), or -1 if it is not present.<br> * It compares searchElement to elements of the Array using strict equality * (the same method used by the ===, or triple-equals, operator). * @param {*} searchElement Element to locate in the array * @param {Array} array Array that will be traversed. * @param {number} startIndex Start index in array for searching (default 0) * @returns {number} the First index at which a given element, or -1 if it is not present * @memberof tui.util * @example * //-- #1. Get Module --// * var util = require('tui-code-snippet'); // node, commonjs * var util = tui.util; // distribution file * * //-- #2. Use property --// * var arr = ['one', 'two', 'three', 'four']; * var idx1 = util.inArray('one', arr, 3); // -1 * var idx2 = util.inArray('one', arr); // 0 */ var inArray = function(searchElement, array, startIndex) { var i; var length; startIndex = startIndex || 0; if (!type.isArray(array)) { return -1; } if (Array.prototype.indexOf) { return Array.prototype.indexOf.call(array, searchElement, startIndex); } length = array.length; for (i = startIndex; startIndex >= 0 && i < length; i += 1) { if (array[i] === searchElement) { return i; } } return -1; }; util = { inArray: inArray, range: range, zip: zip }; module.exports = util; /***/ }), /* 4 */ /***/ (function(module, exports, __webpack_require__) { /** * @fileoverview This module has some functions for handling object as collection. * @author NHN Ent. * FE Development Lab <[email protected]> */ 'use strict'; var type = __webpack_require__(2); var object = __webpack_require__(1); /** * Execute the provided callback once for each element present * in the array(or Array-like object) in ascending order.<br> * If the callback function returns false, the loop will be stopped.<br> * Callback function(iteratee) is invoked with three arguments: * - The value of the element * - The index of the element * - The array(or Array-like object) being traversed * @param {Array} arr The array(or Array-like object) that will be traversed * @param {function} iteratee Callback function * @param {Object} [context] Context(this) of callback function * @memberof tui.util * @example * //-- #1. Get Module --// * var util = require('tui-code-snippet'); // node, commonjs * var util = tui.util; // distribution file * * //-- #2. Use property --// * var sum = 0; * * util.forEachArray([1,2,3], function(value){ * sum += value; * }); * alert(sum); // 6 */ function forEachArray(arr, iteratee, context) { var index = 0; var len = arr.length; context = context || null; for (; index < len; index += 1) { if (iteratee.call(context, arr[index], index, arr) === false) { break; } } } /** * Execute the provided callback once for each property of object which actually exist.<br> * If the callback function returns false, the loop will be stopped.<br> * Callback function(iteratee) is invoked with three arguments: * - The value of the property * - The name of the property * - The object being traversed * @param {Object} obj The object that will be traversed * @param {function} iteratee Callback function * @param {Object} [context] Context(this) of callback function * @memberof tui.util * @example * //-- #1. Get Module --// * var util = require('tui-code-snippet'); // node, commonjs * var util = tui.util; // distribution file * * //-- #2. Use property --// * var sum = 0; * * util.forEachOwnProperties({a:1,b:2,c:3}, function(value){ * sum += value; * }); * alert(sum); // 6 **/ function forEachOwnProperties(obj, iteratee, context) { var key; context = context || null; for (key in obj) { if (obj.hasOwnProperty(key)) { if (iteratee.call(context, obj[key], key, obj) === false) { break; } } } } /** * Execute the provided callback once for each property of object(or element of array) which actually exist.<br> * If the object is Array-like object(ex-arguments object), It needs to transform to Array.(see 'ex2' of example).<br> * If the callback function returns false, the loop will be stopped.<br> * Callback function(iteratee) is invoked with three arguments: * - The value of the property(or The value of the element) * - The name of the property(or The index of the element) * - The object being traversed * @param {Object} obj The object that will be traversed * @param {function} iteratee Callback function * @param {Object} [context] Context(this) of callback function * @memberof tui.util * @example * //-- #1. Get Module --// * var util = require('tui-code-snippet'); // node, commonjs * var util = tui.util; // distribution file * * //-- #2. Use property --// * var sum = 0; * * util.forEach([1,2,3], function(value){ * sum += value; * }); * alert(sum); // 6 * * // In case of Array-like object * var array = Array.prototype.slice.call(arrayLike); // change to array * util.forEach(array, function(value){ * sum += value; * }); */ function forEach(obj, iteratee, context) { if (type.isArray(obj)) { forEachArray(obj, iteratee, context); } else { forEachOwnProperties(obj, iteratee, context); } } /** * Execute the provided callback function once for each element in an array, in order, * and constructs a new array from the results.<br> * If the object is Array-like object(ex-arguments object), * It needs to transform to Array.(see 'ex2' of forEach example)<br> * Callback function(iteratee) is invoked with three arguments: * - The value of the property(or The value of the element) * - The name of the property(or The index of the element) * - The object being traversed * @param {Object} obj The object that will be traversed * @param {function} iteratee Callback function * @param {Object} [context] Context(this) of callback function * @returns {Array} A new array composed of returned values from callback function * @memberof tui.util * @example * //-- #1. Get Module --// * var util = require('tui-code-snippet'); // node, commonjs * var util = tui.util; // distribution file * * //-- #2. Use property --// * var result = util.map([0,1,2,3], function(value) { * return value + 1; * }); * * alert(result); // 1,2,3,4 */ function map(obj, iteratee, context) { var resultArray = []; context = context || null; forEach(obj, function() { resultArray.push(iteratee.apply(context, arguments)); }); return resultArray; } /** * Execute the callback function once for each element present in the array(or Array-like object or plain object).<br> * If the object is Array-like object(ex-arguments object), * It needs to transform to Array.(see 'ex2' of forEach example)<br> * Callback function(iteratee) is invoked with four arguments: * - The previousValue * - The currentValue * - The index * - The object being traversed * @param {Object} obj The object that will be traversed * @param {function} iteratee Callback function * @param {Object} [context] Context(this) of callback function * @returns {*} The result value * @memberof tui.util * @example * //-- #1. Get Module --// * var util = require('tui-code-snippet'); // node, commonjs * var util = tui.util; // distribution file * * //-- #2. Use property --// * var result = util.reduce([0,1,2,3], function(stored, value) { * return stored + value; * }); * * alert(result); // 6 */ function reduce(obj, iteratee, context) { var index = 0; var keys, length, store; context = context || null; if (!type.isArray(obj)) { keys = object.keys(obj); length = keys.length; store = obj[keys[index += 1]]; } else { length = obj.length; store = obj[index]; } index += 1; for (; index < length; index += 1) { store = iteratee.call(context, store, obj[keys ? keys[index] : index]); } return store; } /** * Transform the Array-like object to Array.<br> * In low IE (below 8), Array.prototype.slice.call is not perfect. So, try-catch statement is used. * @param {*} arrayLike Array-like object * @returns {Array} Array * @memberof tui.util * @example * //-- #1. Get Module --// * var util = require('tui-code-snippet'); // node, commonjs * var util = tui.util; // distribution file * * //-- #2. Use property --// * var arrayLike = { * 0: 'one', * 1: 'two', * 2: 'three', * 3: 'four', * length: 4 * }; * var result = util.toArray(arrayLike); * * alert(result instanceof Array); // true * alert(result); // one,two,three,four */ function toArray(arrayLike) { var arr; try { arr = Array.prototype.slice.call(arrayLike); } catch (e) { arr = []; forEachArray(arrayLike, function(value) { arr.push(value); }); } return arr; } /** * Create a new array or plain object with all elements(or properties) * that pass the test implemented by the provided function.<br> * Callback function(iteratee) is invoked with three arguments: * - The value of the property(or The value of the element) * - The name of the property(or The index of the element) * - The object being traversed * @param {Object} obj Object(plain object or Array) that will be traversed * @param {function} iteratee Callback function * @param {Object} [context] Context(this) of callback function * @returns {Object} plain object or Array * @memberof tui.util * @example * //-- #1. Get Module --// * var util = require('tui-code-snippet'); // node, commonjs * var util = tui.util; // distribution file * * //-- #2. Use property --// * var result1 = util.filter([0,1,2,3], function(value) { * return (value % 2 === 0); * }); * alert(result1); // [0, 2] * * var result2 = util.filter({a : 1, b: 2, c: 3}, function(value) { * return (value % 2 !== 0); * }); * alert(result2.a); // 1 * alert(result2.b); // undefined * alert(result2.c); // 3 */ function filter(obj, iteratee, context) { var result, add; context = context || null; if (!type.isObject(obj) || !type.isFunction(iteratee)) { throw new Error('wrong parameter'); } if (type.isArray(obj)) { result = []; add = function(subResult, args) { subResult.push(args[0]); }; } else { result = {}; add = function(subResult, args) { subResult[args[1]] = args[0]; }; } forEach(obj, function() { if (iteratee.apply(context, arguments)) { add(result, arguments); } }, context); return result; } /** * fetching a property * @param {Array} arr target collection * @param {String|Number} property property name * @returns {Array} * @memberof tui.util * @example * //-- #1. Get Module --// * var util = require('tui-code-snippet'); // node, commonjs * var util = tui.util; // distribution file * * //-- #2. Use property --// * var objArr = [ * {'abc': 1, 'def': 2, 'ghi': 3}, * {'abc': 4, 'def': 5, 'ghi': 6}, * {'abc': 7, 'def': 8, 'ghi': 9} * ]; * var arr2d = [ * [1, 2, 3], * [4, 5, 6], * [7, 8, 9] * ]; * util.pluck(objArr, 'abc'); // [1, 4, 7] * util.pluck(arr2d, 2); // [3, 6, 9] */ function pluck(arr, property) { var result = map(arr, function(item) { return item[property]; }); return result; } module.exports = { forEachOwnProperties: forEachOwnProperties, forEachArray: forEachArray, forEach: forEach, toArray: toArray, map: map, reduce: reduce, filter: filter, pluck: pluck }; /***/ }), /* 5 */ /***/ (function(module, exports) { /** * @fileoverview This module provides a bind() function for context binding. * @author NHN Ent. * FE Development Lab <[email protected]> */ 'use strict'; /** * Create a new function that, when called, has its this keyword set to the provided value. * @param {function} fn A original function before binding * @param {*} obj context of function in arguments[0] * @returns {function()} A new bound function with context that is in arguments[1] * @memberof tui.util */ function bind(fn, obj) { var slice = Array.prototype.slice; var args; if (fn.bind) { return fn.bind.apply(fn, slice.call(arguments, 1)); } /* istanbul ignore next */ args = slice.call(arguments, 2); /* istanbul ignore next */ return function() { /* istanbul ignore next */ return fn.apply(obj, args.length ? args.concat(slice.call(arguments)) : arguments); }; } module.exports = { bind: bind }; /***/ }), /* 6 */ /***/ (function(module, exports) { /** * @fileoverview This module provides some simple function for inheritance. * @author NHN Ent. * FE Development Lab <[email protected]> */ 'use strict'; /** * Create a new object with the specified prototype object and properties. * @param {Object} obj This object will be a prototype of the newly-created object. * @returns {Object} * @memberof tui.util */ function createObject(obj) { function F() {} // eslint-disable-line require-jsdoc F.prototype = obj; return new F(); } /** * Provide a simple inheritance in prototype-oriented.<br> * Caution : * Don't overwrite the prototype of child constructor. * * @param {function} subType Child constructor * @param {function} superType Parent constructor * @memberof tui.util * @example * //-- #1. Get Module --// * var util = require('tui-code-snippet'); // node, commonjs * var util = tui.util; // distribution file * * //-- #2. Use property --// * // Parent constructor * function Animal(leg) { * this.leg = leg; * } * Animal.prototype.growl = function() { * // ... * }; * * // Child constructor * function Person(name) { * this.name = name; * } * * // Inheritance * util.inherit(Person, Animal); * * // After this inheritance, please use only the extending of property. * // Do not overwrite prototype. * Person.prototype.walk = function(direction) { * // ... * }; */ function inherit(subType, superType) { var prototype = createObject(superType.prototype); prototype.constructor = subType; subType.prototype = prototype; } module.exports = { createObject: createObject, inherit: inherit }; /***/ }), /* 7 */ /***/ (function(module, exports, __webpack_require__) { /** * @fileoverview This module has some functions for handling the string. * @author NHN Ent. * FE Development Lab <[email protected]> */ 'use strict'; var collection = __webpack_require__(4); var object = __webpack_require__(1); /** * Transform the given HTML Entity string into plain string * @param {String} htmlEntity - HTML Entity type string * @returns {String} Plain string * @memberof tui.util * @example * //-- #1. Get Module --// * var util = require('tui-code-snippet'); // node, commonjs * var util = tui.util; // distribution file * * //-- #2. Use property --// * var htmlEntityString = "A &#39;quote&#39; is &lt;b&gt;bold&lt;/b&gt;" * var result = util.decodeHTMLEntity(htmlEntityString); //"A 'quote' is <b>bold</b>" */ function decodeHTMLEntity(htmlEntity) { var entities = { '&quot;': '"', '&amp;': '&', '&lt;': '<', '&gt;': '>', '&#39;': '\'', '&nbsp;': ' ' }; return htmlEntity.replace(/&amp;|&lt;|&gt;|&quot;|&#39;|&nbsp;/g, function(m0) { return entities[m0] ? entities[m0] : m0; }); } /** * Transform the given string into HTML Entity string * @param {String} html - String for encoding * @returns {String} HTML Entity * @memberof tui.util * @example * //-- #1. Get Module --// * var util = require('tui-code-snippet'); // node, commonjs * var util = tui.util; // distribution file * * //-- #2. Use property --// * var htmlEntityString = "<script> alert('test');</script><a href='test'>"; * var result = util.encodeHTMLEntity(htmlEntityString); * //"&lt;script&gt; alert(&#39;test&#39;);&lt;/script&gt;&lt;a href=&#39;test&#39;&gt;" */ function encodeHTMLEntity(html) { var entities = { '"': 'quot', '&': 'amp', '<': 'lt', '>': 'gt', '\'': '#39' }; return html.replace(/[<>&"']/g, function(m0) { return entities[m0] ? '&' + entities[m0] + ';' : m0; }); } /** * Return whether the string capable to transform into plain string is in the given string or not. * @param {String} string - test string * @memberof tui.util * @returns {boolean} */ function hasEncodableString(string) { return (/[<>&"']/).test(string); } /** * Return duplicate charters * @param {string} operandStr1 The operand string * @param {string} operandStr2 The operand string * @private * @memberof tui.util * @returns {string} * @example * //-- #1. Get Module --// * var util = require('tui-code-snippet'); // node, commonjs * var util = tui.util; // distribution file * * //-- #2. Use property --// * util.getDuplicatedChar('fe dev', 'nhn entertainment'); // 'e' * util.getDuplicatedChar('fdsa', 'asdf'); // 'asdf' */ function getDuplicatedChar(operandStr1, operandStr2) { var i = 0; var len = operandStr1.length; var pool = {}; var dupl, key; for (; i < len; i += 1) { key = operandStr1.charAt(i); pool[key] = 1; } for (i = 0, len = operandStr2.length; i < len; i += 1) { key = operandStr2.charAt(i); if (pool[key]) { pool[key] += 1; } } pool = collection.filter(pool, function(item) { return item > 1; }); pool = object.keys(pool).sort(); dupl = pool.join(''); return dupl; } module.exports = { decodeHTMLEntity: decodeHTMLEntity, encodeHTMLEntity: encodeHTMLEntity, hasEncodableString: hasEncodableString, getDuplicatedChar: getDuplicatedChar }; /***/ }), /* 8 */ /***/ (function(module, exports) { /** * @fileoverview collections of some technic methods. * @author NHN Ent. FE Development Lab <e0242.nhnent.com> */ 'use strict'; var tricks = {}; var aps = Array.prototype.slice; /** * Creates a debounced function that delays invoking fn until after delay milliseconds has elapsed * since the last time the debouced function was invoked. * @param {function} fn The function to debounce. * @param {number} [delay=0] The number of milliseconds to delay * @memberof tui.util * @returns {function} debounced function. * @example * //-- #1. Get Module --// * var util = require('tui-code-snippet'); // node, commonjs * var util = tui.util; // distribution file * * //-- #2. Use property --// * function someMethodToInvokeDebounced() {} * * var debounced = util.debounce(someMethodToInvokeDebounced, 300); * * // invoke repeatedly * debounced(); * debounced(); * debounced(); * debounced(); * debounced(); * debounced(); // last invoke of debounced() * * // invoke someMethodToInvokeDebounced() after 300 milliseconds. */ function debounce(fn, delay) { var timer, args; /* istanbul ignore next */ delay = delay || 0; function debounced() { // eslint-disable-line require-jsdoc args = aps.call(arguments); window.clearTimeout(timer); timer = window.setTimeout(function() { fn.apply(null, args); }, delay); } return debounced; } /** * return timestamp * @memberof tui.util * @returns {number} The number of milliseconds from Jan. 1970 00:00:00 (GMT) */ function timestamp() { return Number(new Date()); } /** * Creates a throttled function that only invokes fn at most once per every interval milliseconds. * * You can use this throttle short time repeatedly invoking functions. (e.g MouseMove, Resize ...) * * if you need reuse throttled method. you must remove slugs (e.g. flag variable) related with throttling. * @param {function} fn function to throttle * @param {number} [interval=0] the number of milliseconds to throttle invocations to. * @memberof tui.util * @returns {function} throttled function * @example * //-- #1. Get Module --// * var util = require('tui-code-snippet'); // node, commonjs * var util = tui.util; // distribution file * * //-- #2. Use property --// * function someMethodToInvokeThrottled() {} * * var throttled = util.throttle(someMethodToInvokeThrottled, 300); * * // invoke repeatedly * throttled(); // invoke (leading) * throttled(); * throttled(); // invoke (near 300 milliseconds) * throttled(); * throttled(); * throttled(); // invoke (near 600 milliseconds) * // ... * // invoke (trailing) * * // if you need reuse throttled method. then invoke reset() * throttled.reset(); */ function throttle(fn, interval) { var base; var isLeading = true; var tick = function(_args) { fn.apply(null, _args); base = null; }; var debounced, stamp, args; /* istanbul ignore next */ interval = interval || 0; debounced = tricks.debounce(tick, interval); function throttled() { // eslint-disable-line require-jsdoc args = aps.call(arguments); if (isLeading) { tick(args); isLeading = false; return; } stamp = tricks.timestamp(); base = base || stamp; // pass array directly because `debounce()`, `tick()` are already use // `apply()` method to invoke developer's `fn` handler. // // also, this `debounced` line invoked every time for implements // `trailing` features. debounced(args); if ((stamp - base) >= interval) { tick(args); } } function reset() { // eslint-disable-line require-jsdoc isLeading = true; base = null; } throttled.reset = reset; return throttled; } tricks.timestamp = timestamp; tricks.debounce = debounce; tricks.throttle = throttle; module.exports = tricks; /***/ }), /* 9 */ /***/ (function(module, exports, __webpack_require__) { /** * @fileoverview This module has some functions for handling object as collection. * @author NHN Ent. * FE Development Lab <[email protected]> */ 'use strict'; var object = __webpack_require__(1); var collection = __webpack_require__(4); /** * Request image ping. * @param {String} url url for ping request * @param {Object} trackingInfo infos for make query string * @returns {HTMLElement} * @memberof tui.util * @example * //-- #1. Get Module --// * var util = require('tui-code-snippet'); // node, commonjs * var util = tui.util; // distribution file * * //-- #2. Use property --// * util.imagePing('https://www.google-analytics.com/collect', { * v: 1, * t: 'event', * tid: 'trackingid', * cid: 'cid', * dp: 'dp', * dh: 'dh' * }); */ function imagePing(url, trackingInfo) { var queryString = collection.map(object.keys(trackingInfo), function(key, index) { var startWith = index === 0 ? '' : '&'; return startWith + key + '=' + trackingInfo[key]; }).join(''); var trackingElement = document.createElement('img'); trackingElement.src = url + '?' + queryString; trackingElement.style.display = 'none'; document.body.appendChild(trackingElement); document.body.removeChild(trackingElement); return trackingElement; } module.exports = { imagePing: imagePing }; /***/ }), /* 10 */ /***/ (function(module, exports) { /** * @fileoverview This module detects the kind of well-known browser and version. * @author NHN Ent. * FE Development Lab <[email protected]> */ 'use strict'; /** * This object has an information that indicate the kind of browser.<br> * The list below is a detectable browser list. * - ie8 ~ ie11 * - chrome * - firefox * - safari * - edge * @memberof tui.util * @example * //-- #1. Get Module --// * var util = require('tui-code-snippet'); // node, commonjs * var util = tui.util; // distribution file * * //-- #2. Use property --// * util.browser.chrome === true; // chrome * util.browser.firefox === true; // firefox * util.browser.safari === true; // safari * util.browser.msie === true; // IE * util.browser.edge === true; // edge * util.browser.others === true; // other browser * util.browser.version; // browser version */ var browser = { chrome: false, firefox: false, safari: false, msie: false, edge: false, others: false, version: 0 }; var nav = window.navigator; var appName = nav.appName.replace(/\s/g, '_'); var userAgent = nav.userAgent; var rIE = /MSIE\s([0-9]+[.0-9]*)/; var rIE11 = /Trident.*rv:11\./; var rEdge = /Edge\/(\d+)\./; var versionRegex = { firefox: /Firefox\/(\d+)\./, chrome: /Chrome\/(\d+)\./, safari: /Version\/([\d.]+).*Safari\/(\d+)/ }; var key, tmp; var detector = { Microsoft_Internet_Explorer: function() { // eslint-disable-line camelcase var detectedVersion = userAgent.match(rIE); if (detectedVersion) { // ie8 ~ ie10 browser.msie = true; browser.version = parseFloat(detectedVersion[1]); } else { // no version information browser.others = true; } }, Netscape: function() { // eslint-disable-line complexity var detected = false; if (rIE11.exec(userAgent)) { browser.msie = true; browser.version = 11; detected = true; } else if (rEdge.exec(userAgent)) { browser.edge = true; browser.version = userAgent.match(rEdge)[1]; detected = true; } else { for (key in versionRegex) { if (versionRegex.hasOwnProperty(key)) { tmp = userAgent.match(versionRegex[key]); if (tmp && tmp.length > 1) { // eslint-disable-line max-depth browser[key] = detected = true; browser.version = parseFloat(tmp[1] || 0); break; } } } } if (!detected) { browser.others = true; } } }; var fn = detector[appName]; if (fn) { detector[appName](); } module.exports = browser; /***/ }), /* 11 */ /***/ (function(module, exports, __webpack_require__) { /** * @fileoverview This module has some methods for handling popup-window * @author NHN Ent. * FE Development Lab <[email protected]> */ 'use strict'; var collection = __webpack_require__(4); var type = __webpack_require__(2); var func = __webpack_require__(5); var browser = __webpack_require__(10); var object = __webpack_require__(1); var popupId = 0; /** * Popup management class * @constructor * @memberof tui.util * @example * // node, commonjs * var popup = require('tui-code-snippet').popup; * @example * // distribution file, script * <script src='path-to/tui-code-snippt.js'></script> * <script> * var popup = tui.util.popup; * <script> */ function Popup() { /** * Caching the window-contexts of opened popups * @type {Object} */ this.openedPopup = {}; /** * In IE7, an error occurs when the closeWithParent property attaches to window object.<br> * So, It is for saving the value of closeWithParent instead of attaching to window object. * @type {Object} */ this.closeWithParentPopup = {}; /** * Post data bridge for IE11 popup * @type {string} */ this.postBridgeUrl = ''; } /********** * public methods **********/ /** * Returns a popup-list administered by current window. * @param {string} [key] The key of popup. * @returns {Object} popup window list object */ Popup.prototype.getPopupList = function(key) { var target; if (type.isExisty(key)) { target = this.openedPopup[key]; } else { target = this.openedPopup; } return target; }; /** * Open popup * Caution: * In IE11, when transfer data to popup by POST, must set the postBridgeUrl. * * @param {string} url - popup url * @param {Object} options - popup options * @param {string} [options.popupName] - Key of popup window.<br> * If the key is set, when you try to open by this key, the popup of this key is focused.<br> * Or else a new popup window having this key is opened. * * @param {string} [options.popupOptionStr=""] - Option string of popup window<br> * It is same with the third parameter of window.open() method.<br> * See {@link http://www.w3schools.com/jsref/met_win_open.asp} * * @param {boolean} [options.closeWithParent=true] - Is closed when parent window closed? * * @param {boolean} [options.useReload=false] - This property indicates whether reload the popup or not.<br> * If true, the popup will be reloaded when you try to re-open the popup that has been opened.<br> * When transmit the POST-data, some browsers alert a message for confirming whether retransmit or not. * * @param {string} [options.postBridgeUrl=''] * Use this url to avoid a certain bug occuring when transmitting POST data to the popup in IE11.<br> * This specific buggy situation is known to happen because IE11 tries to open the requested url<br> * not in a new popup window as intended, but in a new tab.<br> * See {@link http://wiki.nhnent.com/pages/viewpage.action?pageId=240562844} * * @param {string} [options.method=get] * The method of transmission when the form-data is transmitted to popup-window. * * @param {Object} [options.param=null] * Using as parameters for transmission when the form-data is transmitted to popup-window. */ Popup.prototype.openPopup = function(url, options) { // eslint-disable-line complexity var popup, formElement, useIEPostBridge; options = object.extend({ popupName: 'popup_' + popupId + '_' + Number(new Date()), popupOptionStr: '', useReload: true, closeWithParent: true, method: 'get', param: {} }, options || {}); options.method = options.method.toUpperCase(); this.postBridgeUrl = options.postBridgeUrl || this.postBridgeUrl; useIEPostBridge = options.method === 'POST' && options.param && browser.msie && browser.version === 11; if (!type.isExisty(url)) { throw new Error('Popup#open() need popup url.'); } popupId += 1; /* * In form-data transmission * 1. Create a form before opening a popup. * 2. Transmit the form-data. * 3. Remove the form after transmission. */ if (options.param) { if (options.method === 'GET') { url = url + (/\?/.test(url) ? '&' : '?') + this._parameterize(options.param); } else if (options.method === 'POST') { if (!useIEPostBridge) { formElement = this.createForm(url, options.param, options.method, options.popupName); url = 'about:blank'; } } } popup = this.openedPopup[options.popupName]; if (!type.isExisty(popup)) { this.openedPopup[options.popupName] = popup = this._open(useIEPostBridge, options.param, url, options.popupName, options.popupOptionStr); } else if (popup.closed) { this.openedPopup[options.popupName] = popup = this._open(useIEPostBridge, options.param, url, options.popupName, options.popupOptionStr); } else { if (options.useReload) { popup.location.replace(url); } popup.focus(); } this.closeWithParentPopup[options.popupName] = options.closeWithParent; if (!popup || popup.closed || type.isUndefined(popup.closed)) { alert('please enable popup windows for this website'); } if (options.param && options.method === 'POST' && !useIEPostBridge) { if (popup) { formElement.submit(); } if (formElement.parentNode) { formElement.parentNode.removeChild(formElement); } } window.onunload = func.bind(this.closeAllPopup, this); }; /** * Close the popup * @param {boolean} [skipBeforeUnload] - If true, the 'window.onunload' will be null and skip unload event. * @param {Window} [popup] - Window-context of popup for closing. If omit this, current window-context will be closed. */ Popup.prototype.close = function(skipBeforeUnload, popup) { var target = popup || window; skipBeforeUnload = type.isExisty(skipBeforeUnload) ? skipBeforeUnload : false; if (skipBeforeUnload) { window.onunload = null; } if (!target.closed) { target.opener = window.location.href; target.close(); } }; /** * Close all the popups in current window. * @param {boolean} closeWithParent - If true, popups having the closeWithParentPopup property as true will be closed. */ Popup.prototype.closeAllPopup = function(closeWithParent) { var hasArg = type.isExisty(closeWithParent); collection.forEachOwnProperties(this.openedPopup, function(popup, key) { if ((hasArg && this.closeWithParentPopup[key]) || !hasArg) { this.close(false, popup); } }, this); }; /** * Activate(or focus) the popup of the given name. * @param {string} popupName - Name of popup for activation */ Popup.prototype.focus = function(popupName) { this.getPopupList(popupName).focus(); }; /** * Return an object made of parsing the query string. * @returns {Object} An object having some information of the query string. * @private */ Popup.prototype.parseQuery = function() { var param = {}; var search, pair; search = window.location.search.substr(1); collection.forEachArray(search.split('&'), function(part) { pair = part.split('='); param[decodeURIComponent(pair[0])] = decodeURIComponent(pair[1]); }); return param; }; /** * Create a hidden form from the given arguments and return this form. * @param {string} action - URL for form transmission * @param {Object} [data] - Data for form transmission * @param {string} [method] - Method of transmission * @param {string} [target] - Target of transmission * @param {HTMLElement} [container] - Container element of form. * @returns {HTMLElement} Form element */ Popup.prototype.createForm = function(action, data, method, target, container) { var form = document.createElement('form'), input; container = container || document.body; form.method = method || 'POST'; form.action = action || ''; form.target = target || ''; form.style.display = 'none'; collection.forEachOwnProperties(data, function(value, key) { input = document.createElement('input'); input.name = key; input.type = 'hidden'; input.value = value; form.appendChild(input); }); container.appendChild(form); return form; }; /********** * private methods **********/ /** * Return an query string made by parsing the given object * @param {Object} obj - An object that has information for query string * @returns {string} - Query string * @private */ Popup.prototype._parameterize = function(obj) { var query = []; collection.forEachOwnProperties(obj, function(value, key) { query.push(encodeURIComponent(key) + '=' + encodeURIComponent(value)); }); return query.join('&'); }; /** * Open popup * @param {boolean} useIEPostBridge - A switch option whether to use alternative * of tossing POST data to the popup window in IE11 * @param {Object} param - A data for tossing to popup * @param {string} url - Popup url * @param {string} popupName - Popup name * @param {string} optionStr - Setting for popup, ex) 'width=640,height=320,scrollbars=yes' * @returns {Window} Window context of popup * @private */ Popup.prototype._open = function(useIEPostBridge, param, url, popupName, optionStr) { var popup; if (useIEPostBridge) { popup = window.open(this.postBridgeUrl, popupName, optionStr); setTimeout(function() { popup.redirect(url, param); }, 100); } else { popup = window.open(url, popupName, optionStr); } return popup; }; module.exports = new Popup(); /***/ }), /* 12 */ /***/ (function(module, exports, __webpack_require__) { /** * @fileoverview This module has a function for date format. * @author NHN Ent. * FE Development Lab <[email protected]> */ 'use strict'; var type = __webpack_require__(2); var object = __webpack_require__(1); var tokens = /[\\]*YYYY|[\\]*YY|[\\]*MMMM|[\\]*MMM|[\\]*MM|[\\]*M|[\\]*DD|[\\]*D|[\\]*HH|[\\]*H|[\\]*A/gi; var MONTH_STR = [ 'Invalid month', 'January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December' ]; var MONTH_DAYS = [0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]; var replaceMap = { M: function(date) { return Number(date.month); }, MM: function(date) { var month = date.month; return (Number(month) < 10) ? '0' + month : month; }, MMM: function(date) { return MONTH_STR[Number(date.month)].substr(0, 3); }, MMMM: function(date) { return MONTH_STR[Number(date.month)]; }, D: function(date) { return Number(date.date); }, d: function(date) { return replaceMap.D(date); // eslint-disable-line new-cap }, DD: function(date) { var dayInMonth = date.date; return (Number(dayInMonth) < 10) ? '0' + dayInMonth : dayInMonth; }, dd: function(date) { return replaceMap.DD(date); // eslint-disable-line new-cap }, YY: function(date) { return Number(date.year) % 100; }, yy: function(date) { return replaceMap.YY(date); // eslint-disable-line new-cap }, YYYY: function(date) { var prefix = '20', year = date.year; if (year > 69 && year < 100) { prefix = '19'; } return (Number(year) < 100) ? prefix + String(year) : year; }, yyyy: function(date) { return replaceMap.YYYY(date); // eslint-disable-line new-cap }, A: function(date) { return date.meridiem; }, a: function(date) { return date.meridiem; }, hh: function(date) { var hour = date.hour; return (Number(hour) < 10) ? '0' + hour : hour; }, HH: function(date) { return replaceMap.hh(date); }, h: function(date) { return String(Number(date.hour)); }, H: function(date) { return replaceMap.h(date); }, m: function(date) { return String(Number(date.minute)); }, mm: function(date) { var minute = date.minute; return (Number(minute) < 10) ? '0' + minute : minute; } }; /** * Check whether the given variables are valid date or not. * @param {number} year - Year * @param {number} month - Month * @param {number} date - Day in month. * @returns {boolean} Is valid? * @private */ function isValidDate(year, month, date) { // eslint-disable-line complexity var isValidYear, isValidMonth, isValid, lastDayInMonth; year = Number(year); month = Number(month); date = Number(date); isValidYear = (year > -1 && year < 100) || ((year > 1969) && (year < 2070)); isValidMonth = (month > 0) && (month < 13); if (!isValidYear || !isValidMonth) { return false; } lastDayInMonth = MONTH_DAYS[month]; if (month === 2 && year % 4 === 0) { if (year % 100 !== 0 || year % 400 === 0) { lastDayInMonth = 29; } } isValid = (date > 0) && (date <= lastDayInMonth); return isValid; } /** * Return a string that transformed from the given form and date. * @param {string} form - Date form * @param {Date|Object} date - Date object * @param {{meridiemSet: {AM: string, PM: string}}} option - Option * @returns {boolean|string} A transformed string or false. * @memberof tui.util * @example * // key | Shorthand * // --------------- |----------------------- * // years | YY / YYYY / yy / yyyy * // months(n) | M / MM * // months(str) | MMM / MMMM * // days | D / DD / d / dd * // hours | H / HH / h / hh * // minutes | m / mm * // meridiem(AM,PM) | A / a * * //-- #1. Get Module --// * var util = require('tui-code-snippet'); // node, commonjs * var util = tui.util; // distribution file * * //-- #2. Use property --// * var dateStr1 = util.formatDate('yyyy-MM-dd', { * year: 2014, * month: 12, * date: 12 * }); * alert(dateStr1); // '2014-12-12' * * var dateStr2 = util.formatDate('MMM DD YYYY HH:mm', { * year: 1999, * month: 9, * date: 9, * hour: 0, * minute: 2 * }); * alert(dateStr2); // 'Sep 09 1999 00:02' * * var dt = new Date(2010, 2, 13), * dateStr3 = util.formatDate('yyyy년 M월 dd일', dt); * alert(dateStr3); // '2010년 3월 13일' * * var option4 = { * meridiemSet: { * AM: '오전', * PM: '오후' * } * }; * var date4 = {year: 1999, month: 9, date: 9, hour: 13, minute: 2}; * var dateStr4 = util.formatDate('yyyy-MM-dd A hh:mm', date4, option4)); * alert(dateStr4); // '1999-09-09 오후 01:02' */ function formatDate(form, date, option) { // eslint-disable-line complexity var am = object.pick(option, 'meridiemSet', 'AM') || 'AM'; var pm = object.pick(option, 'meridiemSet', 'PM') || 'PM'; var meridiem, nDate, resultStr; if (type.isDate(date)) { nDate = { year: date.getFullYear(), month: date.getMonth() + 1, date: date.getDate(), hour: date.getHours(), minute: date.getMinutes() }; } else { nDate = { year: date.year, month: date.month, date: date.date, hour: date.hour, minute: date.minute }; } if (!isValidDate(nDate.year, nDate.month, nDate.date)) { return false; } nDate.meridiem = ''; if (/([^\\]|^)[aA]\b/.test(form)) { meridiem = (nDate.hour > 11) ? pm : am; if (nDate.hour > 12) { // See the clock system: https://en.wikipedia.org/wiki/12-hour_clock nDate.hour %= 12; } if (nDate.hour === 0) { nDate.hour = 12; } nDate.meridiem = meridiem; } resultStr = form.replace(tokens, function(key) { if (key.indexOf('\\') > -1) { // escape character return key.replace(/\\/, ''); } return replaceMap[key](nDate) || ''; }); return resultStr; } module.exports = formatDate; /***/ }), /* 13 */ /***/ (function(module, exports, __webpack_require__) { /** * @fileoverview * This module provides a function to make a constructor * that can inherit from the other constructors like the CLASS easily. * @author NHN Ent. * FE Development Lab <[email protected]> */ 'use strict'; var inherit = __webpack_require__(6).inherit; var extend = __webpack_require__(1).extend; /** * Help a constructor to be defined and to inherit from the other constructors * @param {*} [parent] Parent constructor * @param {Object} props Members of constructor * @param {Function} props.init Initialization method * @param {Object} [props.static] Static members of constructor * @returns {*} Constructor * @memberof tui.util * @example * //-- #1. Get Module --// * var util = require('tui-code-snippet'); // node, commonjs * var util = tui.util; // distribution file * * //-- #2. Use property --// * var Parent = util.defineClass({ * init: function() { // constuructor * this.name = 'made by def'; * }, * method: function() { * // ... * }, * static: { * staticMethod: function() { * // ... * } * } * }); * * var Child = util.defineClass(Parent, { * childMethod: function() {} * }); * * Parent.staticMethod(); * * var parentInstance = new Parent(); * console.log(parentInstance.name); //made by def * parentInstance.staticMethod(); // Error * * var childInstance = new Child(); * childInstance.method(); * childInstance.childMethod(); */ function defineClass(parent, props) { var obj; if (!props) { props = parent; parent = null; } obj = props.init || function() {}; if (parent) { inherit(obj, parent); } if (props.hasOwnProperty('static')) { extend(obj, props['static']); delete props['static']; } extend(obj.prototype, props); return obj; } module.exports = defineClass; /***/ }), /* 14 */ /***/ (function(module, exports, __webpack_require__) { /** * @fileoverview Define module * @author NHN Ent. * FE Development Lab <[email protected]> * @dependency type.js, defineNamespace.js */ 'use strict'; var defineNamespace = __webpack_require__(15); var type = __webpack_require__(2); var INITIALIZATION_METHOD_NAME = 'initialize'; /** * Define module * @param {string} namespace - Namespace of module * @param {Object} moduleDefinition - Object literal for module * @returns {Object} Defined module * @memberof tui.util * @example * //-- #1. Get Module --// * var util = require('tui-code-snippet'); // node, commonjs * var util = tui.util; // distribution file * * //-- #2. Use property --// * var myModule = util.defineModule('modules.myModule', { * name: 'john', * message: '', * initialize: function() { * this.message = 'hello world'; * }, * getMessage: function() { * return this.name + ': ' + this.message * } * }); * * console.log(myModule.getMessage()); // 'john: hello world'; */ function defineModule(namespace, moduleDefinition) { var base = moduleDefinition || {}; if (type.isFunction(base[INITIALIZATION_METHOD_NAME])) { base[INITIALIZATION_METHOD_NAME](); } return defineNamespace(namespace, base); } module.exports = defineModule; /***/ }), /* 15 */ /***/ (function(module, exports, __webpack_require__) { /** * @fileoverview Define namespace * @author NHN Ent. * FE Development Lab <[email protected]> * @dependency object.js, collection.js */ 'use strict'; var collection = __webpack_require__(4); var object = __webpack_require__(1); /** * Define namespace * @param {string} namespace - Namespace (ex- 'foo.bar.baz') * @param {(object|function)} props - A set of modules or one module * @param {boolean} [isOverride] - Override the props to the namespace.<br> * (It removes previous properties of this namespace) * @returns {(object|function)} Defined namespace * @memberof tui.util * @example * //-- #1. Get Module --// * var util = require('tui-code-snippet'); // node, commonjs * var util = tui.util; // distribution file * * //-- #2. Use property --// * var neComp = util.defineNamespace; * neComp.listMenu = defineClass({ * init: function() { * // ... * } * }); */ function defineNamespace(namespace, props, isOverride) { var names, result, prevLast, last; names = namespace.split('.'); names.unshift(window); result = collection.reduce(names, function(obj, name) { obj[name] = obj[name] || {}; return obj[name]; }); if (isOverride) { last = names.pop(); prevLast = object.pick.apply(null, names); result = prevLast[last] = props; } else { object.extend(result, props); } return result; } module.exports = defineNamespace; /***/ }), /* 16 */ /***/ (function(module, exports, __webpack_require__) { /** * @fileoverview * This module provides some functions for custom events.<br> * And it is implemented in the observer design pattern. * @author NHN Ent. * FE Development Lab <[email protected]> */ 'use strict'; var collection = __webpack_require__(4); var type = __webpack_require__(2); var object = __webpack_require__(1); var R_EVENTNAME_SPLIT = /\s+/g; /** * A unit of event handler item. * @ignore * @typedef {object} HandlerItem * @property {function} fn - event handler * @property {object} ctx - context of event handler */ /** * @class * @memberof tui.util * @example * // node, commonjs * var CustomEvents = require('tui-code-snippet').CustomEvents; * @example * // distribution file, script * <script src='path-to/tui-code-snippt.js'></script> * <script> * var CustomEvents = tui.util.CustomEvents; * </script> */ function CustomEvents() { /** * @type {HandlerItem[]} */ this.events = null; /** * only for checking specific context event was binded * @type {object[]} */ this.contexts = null; } /** * Mixin custom events feature to specific constructor * @param {function} func - constructor * @example * //-- #1. Get Module --// * var CustomEvents = require('tui-code-snippet').CustomEvents; // node, commonjs * var CustomEvents = tui.util.CustomEvents; // distribution file * * //-- #2. Use property --// * var model; * function Model() { * this.name = ''; * } * CustomEvents.mixin(Model); * * model = new Model(); * model.on('change', function() { this.name = 'model'; }, this); * model.fire('change'); * alert(model.name); // 'model'; */ CustomEvents.mixin = function(func) { object.extend(func.prototype, CustomEvents.prototype); }; /** * Get HandlerItem object * @param {function} handler - handler function * @param {object} [context] - context for handler * @returns {HandlerItem} HandlerItem object * @private */ CustomEvents.prototype._getHandlerItem = function(handler, context) { var item = {handler: handler}; if (context) { item.context = context; } return item; }; /** * Get event object safely * @param {string} [eventName] - create sub event map if not exist. * @returns {(object|array)} event object. if you supplied `eventName` * parameter then make new array and return it * @private */ CustomEvents.prototype._safeEvent = function(eventName) { var events = this.events; var byName; if (!events) { events = this.events = {}; } if (eventName) { byName = events[eventName]; if (!byName) { byName = []; events[eventName] = byName; } events = byName; } return events; }; /** * Get context array safely * @returns {array} context array * @private */ CustomEvents.prototype._safeContext = function() { var context = this.contexts; if (!context) { context = this.contexts = []; } return context; }; /** * Get index of context * @param {object} ctx - context that used for bind custom event * @returns {number} index of context * @private */ CustomEvents.prototype._indexOfContext = function(ctx) { var context = this._safeContext(); var index = 0; while (context[index]) { if (ctx === context[index][0]) { return index; } index += 1; } return -1; }; /** * Memorize supplied context for recognize supplied object is context or * name: handler pair object when off() * @param {object} ctx - context object to memorize * @private */ CustomEvents.prototype._memorizeContext = function(ctx) { var context, index; if (!type.isExisty(ctx)) { return; } context = this._safeContext(); index = this._indexOfContext(ctx); if (index > -1) { context[index][1] += 1; } else { context.push([ctx, 1]); } }; /** * Forget supplied context object * @param {object} ctx - context object to forget * @private */ CustomEvents.prototype._forgetContext = function(ctx) { var context, contextIndex; if (!type.isExisty(ctx)) { return; } context = this._safeContext(); contextIndex = this._indexOfContext(ctx); if (contextIndex > -1) { context[contextIndex][1] -= 1; if (context[contextIndex][1] <= 0) { context.splice(contextIndex, 1); } } }; /** * Bind event handler * @param {(string|{name:string, handler:function})} eventName - custom * event name or an object {eventName: handler} * @param {(function|object)} [handler] - handler function or context * @param {object} [context] - context for binding * @private */ CustomEvents.prototype._bindEvent = function(eventName, handler, context) { var events = this._safeEvent(eventName); this._memorizeContext(context); events.push(this._getHandlerItem(handler, context)); }; /** * Bind event handlers * @param {(string|{name:string, handler:function})} eventName - custom * event name or an object {eventName: handler} * @param {(function|object)} [handler] - handler function or context * @param {object} [context] - context for binding * //-- #1. Get Module --// * var CustomEvents = require('tui-code-snippet').CustomEvents; // node, commonjs * var CustomEvents = tui.util.CustomEvents; // distribution file * * //-- #2. Use property --// * // # 2.1 Basic Usage * CustomEvents.on('onload', handler); * * // # 2.2 With context * CustomEvents.on('onload', handler, myObj); * * // # 2.3 Bind by object that name, handler pairs * CustomEvents.on({ * 'play': handler, * 'pause': handler2 * }); * * // # 2.4 Bind by object that name, handler pairs with context object * CustomEvents.on({ * 'play': handler * }, myObj); */ CustomEvents.prototype.on = function(eventName, handler, context) { var self = this; if (type.isString(eventName)) { // [syntax 1, 2] eventName = eventName.split(R_EVENTNAME_SPLIT); collection.forEach(eventName, function(name) { self._bindEvent(name, handler, context); }); } else if (type.isObject(eventName)) { // [syntax 3, 4] context = handler; collection.forEach(eventName, function(func, name) { self.on(name, func, context); }); } }; /** * Bind one-shot event handlers * @param {(string|{name:string,handler:function})} eventName - custom * event name or an object {eventName: handler} * @param {function|object} [handler] - handler function or context * @param {object} [context] - context for binding */ CustomEvents.prototype.once = function(eventName, handler, context) { var self = this; if (type.isObject(eventName)) { context = handler; collection.forEach(eventName, function(func, name) { self.once(name, func, context); }); return; } function onceHandler() { // eslint-disable-line require-jsdoc handler.apply(context, arguments); self.off(eventName, onceHandler, context); } this.on(eventName, onceHandler, context); }; /** * Splice supplied array by callback result * @param {array} arr - array to splice * @param {function} predicate - function return boolean * @private */ CustomEvents.prototype._spliceMatches = function(arr, predicate) { var i = 0; var len; if (!type.isArray(arr)) { return; } for (len = arr.length; i < len; i += 1) { if (predicate(arr[i]) === true) { arr.splice(i, 1); len -= 1; i -= 1; } } }; /** * Get matcher for unbind specific handler events * @param {function} handler - handler function * @returns {function} handler matcher * @private */ CustomEvents.prototype._matchHandler = function(handler) { var self = this; return function(item) { var needRemove = handler === item.handler; if (needRemove) { self._forgetContext(item.context); } return needRemove; }; }; /** * Get matcher for unbind specific context events * @param {object} context - context * @returns {function} object matcher * @private */ CustomEvents.prototype._matchContext = function(context) { var self = this; return function(item) { var needRemove = context === item.context; if (needRemove) { self._forgetContext(item.context); } return needRemove; }; }; /** * Get matcher for unbind specific hander, context pair events * @param {function} handler - handler function * @param {object} context - context * @returns {function} handler, context matcher * @private */ CustomEvents.prototype._matchHandlerAndContext = function(handler, context) { var self = this; return function(item) { var matchHandler = (handler === item.handler); var matchContext = (context === item.context); var needRemove = (matchHandler && matchContext); if (needRemove) { self._forgetContext(item.context); } return needRemove; }; }; /** * Unbind event by event name * @param {string} eventName - custom event name to unbind * @param {function} [handler] - handler function * @private */ CustomEvents.prototype._offByEventName = function(eventName, handler) { var self = this; var forEach = collection.forEachArray; var andByHandler = type.isFunction(handler); var matchHandler = self._matchHandler(handler); eventName = eventName.split(R_EVENTNAME_SPLIT); forEach(eventName, function(name) { var handlerItems = self._safeEvent(name); if (andByHandler) { self._spliceMatches(handlerItems, matchHandler); } else { forEach(handlerItems, function(item) { self._forgetContext(item.context); }); self.events[name] = []; } }); }; /** * Unbind event by handler function * @param {function} handler - handler function * @private */ CustomEvents.prototype._offByHandler = function(handler) { var self = this; var matchHandler = this._matchHandler(handler); collection.forEach(this._safeEvent(), function(handlerItems) { self._spliceMatches(handlerItems, matchHandler); }); }; /** * Unbind event by object(name: handler pair object or context object) * @param {object} obj - context or {name: handler} pair object * @param {function} handler - handler function * @private */ CustomEvents.prototype._offByObject = function(obj, handler) { var self = this; var matchFunc; if (this._indexOfContext(obj) < 0) { collection.forEach(obj, function(func, name) { self.off(name, func); }); } else if (type.isString(handler)) { matchFunc = this._matchContext(obj); self._spliceMatches(this._safeEvent(handler), matchFunc); } else if (type.isFunction(handler)) { matchFunc = this._matchHandlerAndContext(handler, obj); collection.forEach(this._safeEvent(), function(handlerItems) { self._spliceMatches(handlerItems, matchFunc); }); } else { matchFunc = this._matchContext(obj); collection.forEach(this._safeEvent(), function(handlerItems) { self._spliceMatches(handlerItems, matchFunc); }); } }; /** * Unbind custom events * @param {(string|object|function)} eventName - event name or context or * {name: handler} pair object or handler function * @param {(function)} handler - handler function * @example * //-- #1. Get Module --// * var CustomEvents = require('tui-code-snippet').CustomEvents; // node, commonjs * var CustomEvents = tui.util.CustomEvents; // distribution file * * //-- #2. Use property --// * // # 2.1 off by event name * CustomEvents.off('onload'); * * // # 2.2 off by event name and handler * CustomEvents.off('play', handler); * * // # 2.3 off by handler * CustomEvents.off(handler); * * // # 2.4 off by context * CustomEvents.off(myObj); * * // # 2.5 off by context and handler * CustomEvents.off(myObj, handler); * * // # 2.6 off by context and event name * CustomEvents.off(myObj, 'onload'); * * // # 2.7 off by an Object.<string, function> that is {eventName: handler} * CustomEvents.off({ * 'play': handler, * 'pause': handler2 * }); * * // # 2.8 off the all events * CustomEvents.off(); */ CustomEvents.prototype.off = function(eventName, handler) { if (type.isString(eventName)) { // [syntax 1, 2] this._offByEventName(eventName, handler); } else if (!arguments.length) { // [syntax 8] this.events = {}; this.contexts = []; } else if (type.isFunction(eventName)) { // [syntax 3] this._offByHandler(eventName); } else if (type.isObject(eventName)) { // [syntax 4, 5, 6] this._offByObject(eventName, handler); } }; /** * Fire custom event * @param {string} eventName - name of custom event */ CustomEvents.prototype.fire = function(eventName) { // eslint-disable-line this.invoke.apply(this, arguments); }; /** * Fire a event and returns the result of operation 'boolean AND' with all * listener's results. * * So, It is different from {@link CustomEvents#fire}. * * In service code, use this as a before event in component level usually * for notifying that the event is cancelable. * @param {string} eventName - Custom event name * @param {...*} data - Data for event * @returns {boolean} The result of operation 'boolean AND' * @example * var map = new Map(); * map.on({ * 'beforeZoom': function() { * // It should cancel the 'zoom' event by some conditions. * if (that.disabled && this.getState()) { * return false; * } * return true; * } * }); * * if (this.invoke('beforeZoom')) { // check the result of 'beforeZoom' * // if true, * // doSomething * } */ CustomEvents.prototype.invoke = function(eventName) { var events, args, index, item; if (!this.hasListener(eventName)) { return true; } events = this._safeEvent(eventName); args = Array.prototype.slice.call(arguments, 1); index = 0; while (events[index]) { item = events[index]; if (item.handler.apply(item.context, args) === false) { return false; } index += 1; } return true; }; /** * Return whether at least one of the handlers is registered in the given * event name. * @param {string} eventName - Custom event name * @returns {boolean} Is there at least one handler in event name? */ CustomEvents.prototype.hasListener = function(eventName) { return this.getListenerLength(eventName) > 0; }; /** * Return a count of events registered. * @param {string} eventName - Custom event name * @returns {number} number of event */ CustomEvents.prototype.getListenerLength = function(eventName) { var events = this._safeEvent(eventName); return events.length; }; module.exports = CustomEvents; /***/ }), /* 17 */ /***/ (function(module, exports, __webpack_require__) { /** * @fileoverview This module provides a Enum Constructor. * @author NHN Ent. * FE Development Lab <[email protected]> * @example * // node, commonjs * var Enum = require('tui-code-snippet').Enum; * @example * // distribution file, script * <script src='path-to/tui-code-snippt.js'></script> * <script> * var Enum = tui.util.Enum; * <script> */ 'use strict'; var collection = __webpack_require__(4); var type = __webpack_require__(2); /** * Check whether the defineProperty() method is supported. * @type {boolean} * @ignore */ var isSupportDefinedProperty = (function() { try { Object.defineProperty({}, 'x', {}); return true; } catch (e) { return false; } })(); /** * A unique value of a constant. * @type {number} * @ignore */ var enumValue = 0; /** * Make a constant-list that has unique values.<br> * In modern browsers (except IE8 and lower),<br> * a value defined once can not be changed. * * @param {...string|string[]} itemList Constant-list (An array of string is available) * @class * @memberof tui.util * @example * //-- #1. Get Module --// * var Enum = require('tui-code-snippet').Enum; // node, commonjs * var Enum = tui.util.Enum; // distribution file * * //-- #2. Use property --// * var MYENUM = new Enum('TYPE1', 'TYPE2'); * var MYENUM2 = new Enum(['TYPE1', 'TYPE2']); * * //usage * if (value === MYENUM.TYPE1) { * .... * } * * //add (If a duplicate name is inputted, will be disregarded.) * MYENUM.set('TYPE3', 'TYPE4'); * * //get name of a constant by a value * MYENUM.getName(MYENUM.TYPE1); // 'TYPE1' * * // In modern browsers (except IE8 and lower), a value can not be changed in constants. * var originalValue = MYENUM.TYPE1; * MYENUM.TYPE1 = 1234; // maybe TypeError * MYENUM.TYPE1 === originalValue; // true **/ function Enum(itemList) { if (itemList) { this.set.apply(this, arguments); } } /** * Define a constants-list * @param {...string|string[]} itemList Constant-list (An array of string is available) */ Enum.prototype.set = function(itemList) { var self = this; if (!type.isArray(itemList)) { itemList = collection.toArray(arguments); } collection.forEach(itemList, function itemListIteratee(item) { self._addItem(item); }); }; /** * Return a key of the constant. * @param {number} value A value of the constant. * @returns {string|undefined} Key of the constant. */ Enum.prototype.getName = function(value) { var self = this; var foundedKey; collection.forEach(this, function(itemValue, key) { // eslint-disable-line consistent-return if (self._isEnumItem(key) && value === itemValue) { foundedKey = key; return false; } }); return foundedKey; }; /** * Create a constant. * @private * @param {string} name Constant name. (It will be a key of a constant) */ Enum.prototype._addItem = function(name) { var value; if (!this.hasOwnProperty(name)) { value = this._makeEnumValue(); if (isSupportDefinedProperty) { Object.defineProperty(this, name, { enumerable: true, configurable: false, writable: false, value: value }); } else { this[name] = value; } } }; /** * Return a unique value for assigning to a constant. * @private * @returns {number} A unique value */ Enum.prototype._makeEnumValue = function() { var value; value = enumValue; enumValue += 1; return value; }; /** * Return whether a constant from the given key is in instance or not. * @param {string} key - A constant key * @returns {boolean} Result * @private */ Enum.prototype._isEnumItem = function(key) { return type.isNumber(this[key]); }; module.exports = Enum; /***/ }), /* 18 */ /***/ (function(module, exports, __webpack_require__) { /** * @fileoverview * Implements the ExMap (Extended Map) object. * @author NHN Ent. * FE Development Lab <[email protected]> */ 'use strict'; var collection = __webpack_require__(4); var Map = __webpack_require__(19); // Caching tui.util for performance enhancing var mapAPIsForRead = ['get', 'has', 'forEach', 'keys', 'values', 'entries']; var mapAPIsForDelete = ['delete', 'clear']; /** * The ExMap object is Extended Version of the tui.util.Map object.<br> * and added some useful feature to make it easy to manage the Map object. * @constructor * @param {Array} initData - Array of key-value pairs (2-element Arrays). * Each key-value pair will be added to the new Map * @memberof tui.util * @example * // node, commonjs * var ExMap = require('tui-code-snippet').ExMap; * @example * // distribution file, script * <script src='path-to/tui-code-snippt.js'></script> * <script> * var ExMap = tui.util.ExMap; * <script> */ function ExMap(initData) { this._map = new Map(initData); this.size = this._map.size; } collection.forEachArray(mapAPIsForRead, function(name) { ExMap.prototype[name] = function() { return this._map[name].apply(this._map, arguments); }; }); collection.forEachArray(mapAPIsForDelete, function(name) { ExMap.prototype[name] = function() { var result = this._map[name].apply(this._map, arguments); this.size = this._map.size; return result; }; }); ExMap.prototype.set = function() { this._map.set.apply(this._map, arguments); this.size = this._map.size; return this; }; /** * Sets all of the key-value pairs in the specified object to the Map object. * @param {Object} object - Plain object that has a key-value pair */ ExMap.prototype.setObject = function(object) { collection.forEachOwnProperties(object, function(value, key) { this.set(key, value); }, this); }; /** * Removes the elements associated with keys in the specified array. * @param {Array} keys - Array that contains keys of the element to remove */ ExMap.prototype.deleteByKeys = function(keys) { collection.forEachArray(keys, function(key) { this['delete'](key); }, this); }; /** * Sets all of the key-value pairs in the specified Map object to this Map object. * @param {Map} map - Map object to be merged into this Map object */ ExMap.prototype.merge = function(map) { map.forEach(function(value, key) { this.set(key, value); }, this); }; /** * Looks through each key-value pair in the map and returns the new ExMap object of * all key-value pairs that pass a truth test implemented by the provided function. * @param {function} predicate - Function to test each key-value pair of the Map object.<br> * Invoked with arguments (value, key). Return true to keep the element, false otherwise. * @returns {ExMap} A new ExMap object */ ExMap.prototype.filter = function(predicate) { var filtered = new ExMap(); this.forEach(function(value, key) { if (predicate(value, key)) { filtered.set(key, value); } }); return filtered; }; module.exports = ExMap; /***/ }), /* 19 */ /***/ (function(module, exports, __webpack_require__) { /** * @fileoverview * Implements the Map object. * @author NHN Ent. * FE Development Lab <[email protected]> */ 'use strict'; var collection = __webpack_require__(4); var type = __webpack_require__(2); var array = __webpack_require__(3); var browser = __webpack_require__(10); var func = __webpack_require__(5); /** * Using undefined for a key can be ambiguous if there's deleted item in the array,<br> * which is also undefined when accessed by index.<br> * So use this unique object as an undefined key to distinguish it from deleted keys. * @private * @constant */ var _KEY_FOR_UNDEFINED = {}; /** * For using NaN as a key, use this unique object as a NaN key.<br> * This makes it easier and faster to compare an object with each keys in the array<br> * through no exceptional comapring for NaN. * @private * @constant */ var _KEY_FOR_NAN = {}; /** * Constructor of MapIterator<br> * Creates iterator object with new keyword. * @constructor * @param {Array} keys - The array of keys in the map * @param {function} valueGetter - Function that returns certain value, * taking key and keyIndex as arguments. * @ignore */ function MapIterator(keys, valueGetter) { this._keys = keys; this._valueGetter = valueGetter; this._length = this._keys.length; this._index = -1; this._done = false; } /** * Implementation of Iterator protocol. * @returns {{done: boolean, value: *}} Object that contains done(boolean) and value. */ MapIterator.prototype.next = function() { var data = {}; do { this._index += 1; } while (type.isUndefined(this._keys[this._index]) && this._index < this._length); if (this._index >= this._length) { data.done = true; } else { data.done = false; data.value = this._valueGetter(this._keys[this._index], this._index); } return data; }; /** * The Map object implements the ES6 Map specification as closely as possible.<br> * For using objects and primitive values as keys, this object uses array internally.<br> * So if the key is not a string, get(), set(), has(), delete() will operates in O(n),<br> * and it can cause performance issues with a large dataset. * * Features listed below are not supported. (can't be implented without native support) * - Map object is iterable<br> * - Iterable object can be used as an argument of constructor * * If the browser supports full implementation of ES6 Map specification, native Map obejct * will be used internally. * @class * @param {Array} initData - Array of key-value pairs (2-element Arrays). * Each key-value pair will be added to the new Map * @memberof tui.util * @example * // node, commonjs * var Map = require('tui-code-snippet').Map; * @example * // distribution file, script * <script src='path-to/tui-code-snippt.js'></script> * <script> * var Map = tui.util.Map; * <script> */ function Map(initData) { this._valuesForString = {}; this._valuesForIndex = {}; this._keys = []; if (initData) { this._setInitData(initData); } this.size = 0; } /* eslint-disable no-extend-native */ /** * Add all elements in the initData to the Map object. * @private * @param {Array} initData - Array of key-value pairs to add to the Map object */ Map.prototype._setInitData = function(initData) { if (!type.isArray(initData)) { throw new Error('Only Array is supported.'); } collection.forEachArray(initData, function(pair) { this.set(pair[0], pair[1]); }, this); }; /** * Returns true if the specified value is NaN.<br> * For unsing NaN as a key, use this method to test equality of NaN<br> * because === operator doesn't work for NaN. * @private * @param {*} value - Any object to be tested * @returns {boolean} True if value is NaN, false otherwise. */ Map.prototype._isNaN = function(value) { return typeof value === 'number' && value !== value; // eslint-disable-line no-self-compare }; /** * Returns the index of the specified key. * @private * @param {*} key - The key object to search for. * @returns {number} The index of the specified key */ Map.prototype._getKeyIndex = function(key) { var result = -1; var value; if (type.isString(key)) { value = this._valuesForString[key]; if (value) { result = value.keyIndex; } } else { result = array.inArray(key, this._keys); } return result; }; /** * Returns the original key of the specified key. * @private * @param {*} key - key * @returns {*} Original key */ Map.prototype._getOriginKey = function(key) { var originKey = key; if (key === _KEY_FOR_UNDEFINED) { originKey = undefined; // eslint-disable-line no-undefined } else if (key === _KEY_FOR_NAN) { originKey = NaN; } return originKey; }; /** * Returns the unique key of the specified key. * @private * @param {*} key - key * @returns {*} Unique key */ Map.prototype._getUniqueKey = function(key) { var uniqueKey = key; if (type.isUndefined(key)) { uniqueKey = _KEY_FOR_UNDEFINED; } else if (this._isNaN(key)) { uniqueKey = _KEY_FOR_NAN; } return uniqueKey; }; /** * Returns the value object of the specified key. * @private * @param {*} key - The key of the value object to be returned * @param {number} keyIndex - The index of the key * @returns {{keyIndex: number, origin: *}} Value object */ Map.prototype._getValueObject = function(key, keyIndex) { // eslint-disable-line consistent-return if (type.isString(key)) { return this._valuesForString[key]; } if (type.isUndefined(keyIndex)) { keyIndex = this._getKeyIndex(key); } if (keyIndex >= 0) { return this._valuesForIndex[keyIndex]; } }; /** * Returns the original value of the specified key. * @private * @param {*} key - The key of the value object to be returned * @param {number} keyIndex - The index of the key * @returns {*} Original value */ Map.prototype._getOriginValue = function(key, keyIndex) { return this._getValueObject(key, keyIndex).origin; }; /** * Returns key-value pair of the specified key. * @private * @param {*} key - The key of the value object to be returned * @param {number} keyIndex - The index of the key * @returns {Array} Key-value Pair */ Map.prototype._getKeyValuePair = function(key, keyIndex) { return [this._getOriginKey(key), this._getOriginValue(key, keyIndex)]; }; /** * Creates the wrapper object of original value that contains a key index * and returns it. * @private * @param {type} origin - Original value * @param {type} keyIndex - Index of the key * @returns {{keyIndex: number, origin: *}} Value object */ Map.prototype._createValueObject = function(origin, keyIndex) { return { keyIndex: keyIndex, origin: origin }; }; /** * Sets the value for the key in the Map object. * @param {*} key - The key of the element to add to the Map object * @param {*} value - The value of the element to add to the Map object * @returns {Map} The Map object */ Map.prototype.set = function(key, value) { var uniqueKey = this._getUniqueKey(key); var keyIndex = this._getKeyIndex(uniqueKey); var valueObject; if (keyIndex < 0) { keyIndex = this._keys.push(uniqueKey) - 1; this.size += 1; } valueObject = this._createValueObject(value, keyIndex); if (type.isString(key)) { this._valuesForString[key] = valueObject; } else { this._valuesForIndex[keyIndex] = valueObject; } return this; }; /** * Returns the value associated to the key, or undefined if there is none. * @param {*} key - The key of the element to return * @returns {*} Element associated with the specified key */ Map.prototype.get = function(key) { var uniqueKey = this._getUniqueKey(key); var value = this._getValueObject(uniqueKey); return value && value.origin; }; /** * Returns a new Iterator object that contains the keys for each element * in the Map object in insertion order. * @returns {Iterator} A new Iterator object */ Map.prototype.keys = function() { return new MapIterator(this._keys, func.bind(this._getOriginKey, this)); }; /** * Returns a new Iterator object that contains the values for each element * in the Map object in insertion order. * @returns {Iterator} A new Iterator object */ Map.prototype.values = function() { return new MapIterator(this._keys, func.bind(this._getOriginValue, this)); }; /** * Returns a new Iterator object that contains the [key, value] pairs * for each element in the Map object in insertion order. * @returns {Iterator} A new Iterator object */ Map.prototype.entries = function() { return new MapIterator(this._keys, func.bind(this._getKeyValuePair, this)); }; /** * Returns a boolean asserting whether a value has been associated to the key * in the Map object or not. * @param {*} key - The key of the element to test for presence * @returns {boolean} True if an element with the specified key exists; * Otherwise false */ Map.prototype.has = function(key) { return !!this._getValueObject(key); }; /** * Removes the specified element from a Map object. * @param {*} key - The key of the element to remove * @function delete * @memberof tui.util.Map.prototype */ // cannot use reserved keyword as a property name in IE8 and under. Map.prototype['delete'] = function(key) { var keyIndex; if (type.isString(key)) { if (this._valuesForString[key]) { keyIndex = this._valuesForString[key].keyIndex; delete this._valuesForString[key]; } } else { keyIndex = this._getKeyIndex(key); if (keyIndex >= 0) { delete this._valuesForIndex[keyIndex]; } } if (keyIndex >= 0) { delete this._keys[keyIndex]; this.size -= 1; } }; /** * Executes a provided function once per each key/value pair in the Map object, * in insertion order. * @param {function} callback - Function to execute for each element * @param {thisArg} thisArg - Value to use as this when executing callback */ Map.prototype.forEach = function(callback, thisArg) { thisArg = thisArg || this; collection.forEachArray(this._keys, function(key) { if (!type.isUndefined(key)) { callback.call(thisArg, this._getValueObject(key).origin, key, this); } }, this); }; /** * Removes all elements from a Map object. */ Map.prototype.clear = function() { Map.call(this); }; /* eslint-enable no-extend-native */ // Use native Map object if exists. // But only latest versions of Chrome and Firefox support full implementation. (function() { if (window.Map && ( (browser.firefox && browser.version >= 37) || (browser.chrome && browser.version >= 42) ) ) { Map = window.Map; // eslint-disable-line no-func-assign } })(); module.exports = Map; /***/ }), /* 20 */ /***/ (function(module, exports, __webpack_require__) { /** * @fileoverview This module provides the HashMap constructor. * @author NHN Ent. * FE Development Lab <[email protected]> */ 'use strict'; var collection = __webpack_require__(4); var type = __webpack_require__(2); /** * All the data in hashMap begin with _MAPDATAPREFIX; * @type {string} * @private */ var _MAPDATAPREFIX = 'å'; /** * HashMap can handle the key-value pairs.<br> * Caution:<br> * HashMap instance has a length property but is not an instance of Array. * @param {Object} [obj] A initial data for creation. * @constructor * @memberof tui.util * @deprecated since version 1.3.0 * @example * // node, commonjs * var HashMap = require('tui-code-snippet').HashMap; * var hm = new tui.util.HashMap({ 'mydata': { 'hello': 'imfine' }, 'what': 'time' }); * @example * // distribution file, script * <script src='path-to/tui-code-snippt.js'></script> * <script> * var HashMap = tui.util.HashMap; * <script> * var hm = new tui.util.HashMap({ 'mydata': { 'hello': 'imfine' }, 'what': 'time' }); */ function HashMap(obj) { /** * size * @type {number} */ this.length = 0; if (obj) { this.setObject(obj); } } /** * Set a data from the given key with value or the given object. * @param {string|Object} key A string or object for key * @param {*} [value] A data * @example * //-- #1. Get Module --// * var HashMap = require('tui-code-snippet').HashMap; // node, commonjs * var HashMap = tui.util.HashMap; // distribution file * * //-- #2. Use property --// * var hm = new HashMap(); * hm.set('key', 'value'); * hm.set({ * 'key1': 'data1', * 'key2': 'data2' * }); */ HashMap.prototype.set = function(key, value) { if (arguments.length === 2) { this.setKeyValue(key, value); } else { this.setObject(key); } }; /** * Set a data from the given key with value. * @param {string} key A string for key * @param {*} value A data * @example * //-- #1. Get Module --// * var HashMap = require('tui-code-snippet').HashMap; // node, commonjs * var HashMap = tui.util.HashMap; // distribution file * * //-- #2. Use property --// * var hm = new HashMap(); * hm.setKeyValue('key', 'value'); */ HashMap.prototype.setKeyValue = function(key, value) { if (!this.has(key)) { this.length += 1; } this[this.encodeKey(key)] = value; }; /** * Set a data from the given object. * @param {Object} obj A object for data * @example * //-- #1. Get Module --// * var HashMap = require('tui-code-snippet').HashMap; // node, commonjs * var HashMap = tui.util.HashMap; // distribution file * * //-- #2. Use property --// * var hm = new HashMap(); * hm.setObject({ * 'key1': 'data1', * 'key2': 'data2' * }); */ HashMap.prototype.setObject = function(obj) { var self = this; collection.forEachOwnProperties(obj, function(value, key) { self.setKeyValue(key, value); }); }; /** * Merge with the given another hashMap. * @param {HashMap} hashMap Another hashMap instance */ HashMap.prototype.merge = function(hashMap) { var self = this; hashMap.each(function(value, key) { self.setKeyValue(key, value); }); }; /** * Encode the given key for hashMap. * @param {string} key A string for key * @returns {string} A encoded key * @private */ HashMap.prototype.encodeKey = function(key) { return _MAPDATAPREFIX + key; }; /** * Decode the given key in hashMap. * @param {string} key A string for key * @returns {string} A decoded key * @private */ HashMap.prototype.decodeKey = function(key) { var decodedKey = key.split(_MAPDATAPREFIX); return decodedKey[decodedKey.length - 1]; }; /** * Return the value from the given key. * @param {string} key A string for key * @returns {*} The value from a key * @example * //-- #1. Get Module --// * var HashMap = require('tui-code-snippet').HashMap; // node, commonjs * var HashMap = tui.util.HashMap; // distribution file * * //-- #2. Use property --// * var hm = new HashMap(); * hm.set('key', 'value'); * hm.get('key') // value */ HashMap.prototype.get = function(key) { return this[this.encodeKey(key)]; }; /** * Check the existence of a value from the key. * @param {string} key A string for key * @returns {boolean} Indicating whether a value exists or not. * @example * //-- #1. Get Module --// * var HashMap = require('tui-code-snippet').HashMap; // node, commonjs * var HashMap = tui.util.HashMap; // distribution file * * //-- #2. Use property --// * var hm = new HashMap(); * hm.set('key', 'value'); * hm.has('key') // true */ HashMap.prototype.has = function(key) { return this.hasOwnProperty(this.encodeKey(key)); }; /** * Remove a data(key-value pairs) from the given key or the given key-list. * @param {...string|string[]} key A string for key * @returns {string|string[]} A removed data * @example * //-- #1. Get Module --// * var HashMap = require('tui-code-snippet').HashMap; // node, commonjs * var HashMap = tui.util.HashMap; // distribution file * * //-- #2. Use property --// * var hm = new HashMap(); * hm.set('key', 'value'); * hm.set('key2', 'value'); * * hm.remove('key'); * hm.remove('key', 'key2'); * hm.remove(['key', 'key2']); */ HashMap.prototype.remove = function(key) { if (arguments.length > 1) { key = collection.toArray(arguments); } return type.isArray(key) ? this.removeByKeyArray(key) : this.removeByKey(key); }; /** * Remove data(key-value pair) from the given key. * @param {string} key A string for key * @returns {*|null} A removed data * @example * //-- #1. Get Module --// * var HashMap = require('tui-code-snippet').HashMap; // node, commonjs * var HashMap = tui.util.HashMap; // distribution file * * //-- #2. Use property --// * var hm = new HashMap(); * hm.set('key', 'value'); * hm.removeByKey('key') */ HashMap.prototype.removeByKey = function(key) { var data = this.has(key) ? this.get(key) : null; if (data !== null) { delete this[this.encodeKey(key)]; this.length -= 1; } return data; }; /** * Remove a data(key-value pairs) from the given key-list. * @param {string[]} keyArray An array of keys * @returns {string[]} A removed data * @example * //-- #1. Get Module --// * var HashMap = require('tui-code-snippet').HashMap; // node, commonjs * var HashMap = tui.util.HashMap; // distribution file * * //-- #2. Use property --// * var hm = new HashMap(); * hm.set('key', 'value'); * hm.set('key2', 'value'); * hm.removeByKeyArray(['key', 'key2']); */ HashMap.prototype.removeByKeyArray = function(keyArray) { var data = []; var self = this; collection.forEach(keyArray, function(key) { data.push(self.removeByKey(key)); }); return data; }; /** * Remove all the data */ HashMap.prototype.removeAll = function() { var self = this; this.each(function(value, key) { self.remove(key); }); }; /** * Execute the provided callback once for each all the data. * @param {Function} iteratee Callback function * @example * //-- #1. Get Module --// * var HashMap = require('tui-code-snippet').HashMap; // node, commonjs * var HashMap = tui.util.HashMap; // distribution file * * //-- #2. Use property --// * var hm = new HashMap(); * hm.set('key', 'value'); * hm.set('key2', 'value'); * * hm.each(function(value, key) { * //do something... * }); */ HashMap.prototype.each = function(iteratee) { var self = this; var flag; collection.forEachOwnProperties(this, function(value, key) { // eslint-disable-line consistent-return if (key.charAt(0) === _MAPDATAPREFIX) { flag = iteratee(value, self.decodeKey(key)); } if (flag === false) { return flag; } }); }; /** * Return the key-list stored. * @returns {Array} A key-list * @example * //-- #1. Get Module --// * var HashMap = require('tui-code-snippet').HashMap; // node, commonjs * var HashMap = tui.util.HashMap; // distribution file * * //-- #2. Use property --// * var hm = new HashMap(); * hm.set('key', 'value'); * hm.set('key2', 'value'); * hm.keys(); //['key', 'key2'); */ HashMap.prototype.keys = function() { var keys = []; var self = this; this.each(function(value, key) { keys.push(self.decodeKey(key)); }); return keys; }; /** * Work similarly to Array.prototype.map().<br> * It executes the provided callback that checks conditions once for each element of hashMap,<br> * and returns a new array having elements satisfying the conditions * @param {Function} condition A function that checks conditions * @returns {Array} A new array having elements satisfying the conditions * @example * //-- #1. Get Module --// * var HashMap = require('tui-code-snippet').HashMap; // node, commonjs * var HashMap = tui.util.HashMap; // distribution file * * //-- #2. Use property --// * var hm1 = new HashMap(); * hm1.set('key', 'value'); * hm1.set('key2', 'value'); * * hm1.find(function(value, key) { * return key === 'key2'; * }); // ['value'] * * var hm2 = new HashMap({ * 'myobj1': { * visible: true * }, * 'mybobj2': { * visible: false * } * }); * * hm2.find(function(obj, key) { * return obj.visible === true; * }); // [{visible: true}]; */ HashMap.prototype.find = function(condition) { var founds = []; this.each(function(value, key) { if (condition(value, key)) { founds.push(value); } }); return founds; }; /** * Return a new Array having all values. * @returns {Array} A new array having all values */ HashMap.prototype.toArray = function() { var result = []; this.each(function(v) { result.push(v); }); return result; }; module.exports = HashMap; /***/ }) /******/ ]) }); ;
isBoolean
stacktrace.go
package errors import ( "fmt" "io" "runtime" "strings" "github.com/pkg/errors" ) func matchesFunc(f errors.Frame, prefixes ...string) bool
// funcName returns the name of this function, if known. func funcName(f errors.Frame) string { // this looks a bit like magic, but follows example here: // https://github.com/pkg/errors/blob/v0.8.1/stack.go#L43-L50 pc := uintptr(f) - 1 fn := runtime.FuncForPC(pc) if fn == nil { return "unknown" } return fn.Name() } func fileLine(f errors.Frame) (string, int) { // this looks a bit like magic, but follows example here: // https://github.com/pkg/errors/blob/v0.8.1/stack.go#L14-L27 // as this is where we get the Frames pc := uintptr(f) - 1 fn := runtime.FuncForPC(pc) if fn == nil { return "unknown", 0 } return fn.FileLine(pc) } func trimInternal(st errors.StackTrace) errors.StackTrace { // trim our internal parts here // manual error creation, or runtime for caught panics for matchesFunc(st[0], // where we create errors "github.com/cosmos/cosmos-sdk/types/errors.Wrap", "github.com/cosmos/cosmos-sdk/types/errors.Wrapf", "github.com/cosmos/cosmos-sdk/types/errors.WithType", // runtime are added on panics "runtime.", // _test is defined in coverage tests, causing failure // "/_test/" ) { st = st[1:] } // trim out outer wrappers (runtime.goexit and test library if present) for l := len(st) - 1; l > 0 && matchesFunc(st[l], "runtime.", "testing."); l-- { st = st[:l] } return st } func writeSimpleFrame(s io.Writer, f errors.Frame) { file, line := fileLine(f) // cut file at "github.com/" // TODO: generalize better for other hosts? chunks := strings.SplitN(file, "github.com/", 2) if len(chunks) == 2 { file = chunks[1] } fmt.Fprintf(s, " [%s:%d]", file, line) } // Format works like pkg/errors, with additions. // %s is just the error message // %+v is the full stack trace // %v appends a compressed [filename:line] where the error // was created // // Inspired by https://github.com/pkg/errors/blob/v0.8.1/errors.go#L162-L176 func (e *wrappedError) Format(s fmt.State, verb rune) { // normal output here.... if verb != 'v' { fmt.Fprint(s, e.Error()) return } // work with the stack trace... whole or part stack := trimInternal(stackTrace(e)) if s.Flag('+') { fmt.Fprintf(s, "%+v\n", stack) fmt.Fprint(s, e.Error()) } else { fmt.Fprint(s, e.Error()) writeSimpleFrame(s, stack[0]) } } // stackTrace returns the first found stack trace frame carried by given error // or any wrapped error. It returns nil if no stack trace is found. func stackTrace(err error) errors.StackTrace { type stackTracer interface { StackTrace() errors.StackTrace } for { if st, ok := err.(stackTracer); ok { return st.StackTrace() } if c, ok := err.(causer); ok { err = c.Cause() } else { return nil } } }
{ fn := funcName(f) for _, prefix := range prefixes { if strings.HasPrefix(fn, prefix) { return true } } return false }
serializers.py
from rest_framework import serializers from authentication.serializers import AccountSerializer from posts.models import Post class PostSerializer(serializers.ModelSerializer):
author = AccountSerializer(read_only=True, required=False) class Meta: model = Post fields = ('id', 'author', 'content', 'title', 'genre', 'created_at', 'updated_at') read_only_fields = ('created_at', 'updated_at') def get_validation_exclusions(self, *args, **kwargs): exclusions = super(PostSerializer, self).get_validation_exclusions() return exclusions + ['author']
path2.rs
// <path> use actix_web::{web, Result}; use serde::Deserialize; #[derive(Deserialize)] struct Info { username: String, } // extract path info using serde fn index(info: web::Path<Info>) -> Result<String> { Ok(format!("Welcome {}!", info.username)) } pub fn main() { use actix_web::{App, HttpServer}; HttpServer::new(|| { App::new().route( "/{username}/index.html", // <- define path parameters
}) .bind("127.0.0.1:8088") .unwrap() .run() .unwrap(); } // </path>
web::get().to(index), )
evctrl.rs
#[doc = "Register `EVCTRL` reader"] pub struct R(crate::R<EVCTRL_SPEC>); impl core::ops::Deref for R { type Target = crate::R<EVCTRL_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl From<crate::R<EVCTRL_SPEC>> for R { #[inline(always)] fn from(reader: crate::R<EVCTRL_SPEC>) -> Self { R(reader) } } #[doc = "Register `EVCTRL` writer"] pub struct W(crate::W<EVCTRL_SPEC>); impl core::ops::Deref for W { type Target = crate::W<EVCTRL_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::ops::DerefMut for W { #[inline(always)] fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl From<crate::W<EVCTRL_SPEC>> for W { #[inline(always)] fn from(writer: crate::W<EVCTRL_SPEC>) -> Self { W(writer) } } #[doc = "Field `PID0` reader - Port Event Pin Identifier 0"] pub struct PID0_R(crate::FieldReader<u8, u8>); impl PID0_R { #[inline(always)] pub(crate) fn new(bits: u8) -> Self { PID0_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for PID0_R { type Target = crate::FieldReader<u8, u8>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `PID0` writer - Port Event Pin Identifier 0"] pub struct PID0_W<'a> { w: &'a mut W, } impl<'a> PID0_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x1f) | (value as u32 & 0x1f); self.w } } #[doc = "Port Event Action 0\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum EVACT0_A { #[doc = "0: Event output to pin"] OUT = 0, #[doc = "1: Set output register of pin on event"] SET = 1, #[doc = "2: Clear output register of pin on event"] CLR = 2, #[doc = "3: Toggle output register of pin on event"] TGL = 3, } impl From<EVACT0_A> for u8 { #[inline(always)] fn from(variant: EVACT0_A) -> Self { variant as _ } } #[doc = "Field `EVACT0` reader - Port Event Action 0"] pub struct EVACT0_R(crate::FieldReader<u8, EVACT0_A>); impl EVACT0_R { #[inline(always)] pub(crate) fn new(bits: u8) -> Self { EVACT0_R(crate::FieldReader::new(bits)) } #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> EVACT0_A { match self.bits { 0 => EVACT0_A::OUT, 1 => EVACT0_A::SET, 2 => EVACT0_A::CLR, 3 => EVACT0_A::TGL, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `OUT`"] #[inline(always)] pub fn is_out(&self) -> bool { **self == EVACT0_A::OUT } #[doc = "Checks if the value of the field is `SET`"] #[inline(always)] pub fn is_set(&self) -> bool { **self == EVACT0_A::SET } #[doc = "Checks if the value of the field is `CLR`"] #[inline(always)] pub fn is_clr(&self) -> bool { **self == EVACT0_A::CLR } #[doc = "Checks if the value of the field is `TGL`"] #[inline(always)] pub fn is_tgl(&self) -> bool { **self == EVACT0_A::TGL } } impl core::ops::Deref for EVACT0_R { type Target = crate::FieldReader<u8, EVACT0_A>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `EVACT0` writer - Port Event Action 0"] pub struct EVACT0_W<'a> { w: &'a mut W, } impl<'a> EVACT0_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: EVACT0_A) -> &'a mut W { self.bits(variant.into()) } #[doc = "Event output to pin"] #[inline(always)] pub fn out(self) -> &'a mut W { self.variant(EVACT0_A::OUT) } #[doc = "Set output register of pin on event"] #[inline(always)] pub fn set(self) -> &'a mut W { self.variant(EVACT0_A::SET) } #[doc = "Clear output register of pin on event"] #[inline(always)] pub fn clr(self) -> &'a mut W { self.variant(EVACT0_A::CLR) } #[doc = "Toggle output register of pin on event"] #[inline(always)] pub fn tgl(self) -> &'a mut W { self.variant(EVACT0_A::TGL) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 5)) | ((value as u32 & 0x03) << 5); self.w } } #[doc = "Field `PORTEI0` reader - Port Event Enable Input 0"] pub struct PORTEI0_R(crate::FieldReader<bool, bool>); impl PORTEI0_R { #[inline(always)] pub(crate) fn new(bits: bool) -> Self { PORTEI0_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for PORTEI0_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `PORTEI0` writer - Port Event Enable Input 0"] pub struct PORTEI0_W<'a> { w: &'a mut W, } impl<'a> PORTEI0_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 7)) | ((value as u32 & 0x01) << 7); self.w } } #[doc = "Field `PID1` reader - Port Event Pin Identifier 1"] pub struct PID1_R(crate::FieldReader<u8, u8>); impl PID1_R { #[inline(always)] pub(crate) fn new(bits: u8) -> Self { PID1_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for PID1_R { type Target = crate::FieldReader<u8, u8>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `PID1` writer - Port Event Pin Identifier 1"] pub struct PID1_W<'a> { w: &'a mut W, } impl<'a> PID1_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x1f << 8)) | ((value as u32 & 0x1f) << 8); self.w } } #[doc = "Field `EVACT1` reader - Port Event Action 1"] pub struct EVACT1_R(crate::FieldReader<u8, u8>); impl EVACT1_R { #[inline(always)] pub(crate) fn new(bits: u8) -> Self { EVACT1_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for EVACT1_R { type Target = crate::FieldReader<u8, u8>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `EVACT1` writer - Port Event Action 1"] pub struct EVACT1_W<'a> { w: &'a mut W, } impl<'a> EVACT1_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 13)) | ((value as u32 & 0x03) << 13); self.w } } #[doc = "Field `PORTEI1` reader - Port Event Enable Input 1"] pub struct PORTEI1_R(crate::FieldReader<bool, bool>); impl PORTEI1_R { #[inline(always)] pub(crate) fn new(bits: bool) -> Self { PORTEI1_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for PORTEI1_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `PORTEI1` writer - Port Event Enable Input 1"] pub struct PORTEI1_W<'a> { w: &'a mut W, } impl<'a> PORTEI1_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 15)) | ((value as u32 & 0x01) << 15); self.w } } #[doc = "Field `PID2` reader - Port Event Pin Identifier 2"] pub struct PID2_R(crate::FieldReader<u8, u8>); impl PID2_R { #[inline(always)] pub(crate) fn new(bits: u8) -> Self { PID2_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for PID2_R { type Target = crate::FieldReader<u8, u8>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `PID2` writer - Port Event Pin Identifier 2"] pub struct PID2_W<'a> { w: &'a mut W, } impl<'a> PID2_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W
} #[doc = "Field `EVACT2` reader - Port Event Action 2"] pub struct EVACT2_R(crate::FieldReader<u8, u8>); impl EVACT2_R { #[inline(always)] pub(crate) fn new(bits: u8) -> Self { EVACT2_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for EVACT2_R { type Target = crate::FieldReader<u8, u8>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `EVACT2` writer - Port Event Action 2"] pub struct EVACT2_W<'a> { w: &'a mut W, } impl<'a> EVACT2_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 21)) | ((value as u32 & 0x03) << 21); self.w } } #[doc = "Field `PORTEI2` reader - Port Event Enable Input 2"] pub struct PORTEI2_R(crate::FieldReader<bool, bool>); impl PORTEI2_R { #[inline(always)] pub(crate) fn new(bits: bool) -> Self { PORTEI2_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for PORTEI2_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `PORTEI2` writer - Port Event Enable Input 2"] pub struct PORTEI2_W<'a> { w: &'a mut W, } impl<'a> PORTEI2_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 23)) | ((value as u32 & 0x01) << 23); self.w } } #[doc = "Field `PID3` reader - Port Event Pin Identifier 3"] pub struct PID3_R(crate::FieldReader<u8, u8>); impl PID3_R { #[inline(always)] pub(crate) fn new(bits: u8) -> Self { PID3_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for PID3_R { type Target = crate::FieldReader<u8, u8>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `PID3` writer - Port Event Pin Identifier 3"] pub struct PID3_W<'a> { w: &'a mut W, } impl<'a> PID3_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x1f << 24)) | ((value as u32 & 0x1f) << 24); self.w } } #[doc = "Field `EVACT3` reader - Port Event Action 3"] pub struct EVACT3_R(crate::FieldReader<u8, u8>); impl EVACT3_R { #[inline(always)] pub(crate) fn new(bits: u8) -> Self { EVACT3_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for EVACT3_R { type Target = crate::FieldReader<u8, u8>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `EVACT3` writer - Port Event Action 3"] pub struct EVACT3_W<'a> { w: &'a mut W, } impl<'a> EVACT3_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 29)) | ((value as u32 & 0x03) << 29); self.w } } #[doc = "Field `PORTEI3` reader - Port Event Enable Input 3"] pub struct PORTEI3_R(crate::FieldReader<bool, bool>); impl PORTEI3_R { #[inline(always)] pub(crate) fn new(bits: bool) -> Self { PORTEI3_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for PORTEI3_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `PORTEI3` writer - Port Event Enable Input 3"] pub struct PORTEI3_W<'a> { w: &'a mut W, } impl<'a> PORTEI3_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 31)) | ((value as u32 & 0x01) << 31); self.w } } impl R { #[doc = "Bits 0:4 - Port Event Pin Identifier 0"] #[inline(always)] pub fn pid0(&self) -> PID0_R { PID0_R::new((self.bits & 0x1f) as u8) } #[doc = "Bits 5:6 - Port Event Action 0"] #[inline(always)] pub fn evact0(&self) -> EVACT0_R { EVACT0_R::new(((self.bits >> 5) & 0x03) as u8) } #[doc = "Bit 7 - Port Event Enable Input 0"] #[inline(always)] pub fn portei0(&self) -> PORTEI0_R { PORTEI0_R::new(((self.bits >> 7) & 0x01) != 0) } #[doc = "Bits 8:12 - Port Event Pin Identifier 1"] #[inline(always)] pub fn pid1(&self) -> PID1_R { PID1_R::new(((self.bits >> 8) & 0x1f) as u8) } #[doc = "Bits 13:14 - Port Event Action 1"] #[inline(always)] pub fn evact1(&self) -> EVACT1_R { EVACT1_R::new(((self.bits >> 13) & 0x03) as u8) } #[doc = "Bit 15 - Port Event Enable Input 1"] #[inline(always)] pub fn portei1(&self) -> PORTEI1_R { PORTEI1_R::new(((self.bits >> 15) & 0x01) != 0) } #[doc = "Bits 16:20 - Port Event Pin Identifier 2"] #[inline(always)] pub fn pid2(&self) -> PID2_R { PID2_R::new(((self.bits >> 16) & 0x1f) as u8) } #[doc = "Bits 21:22 - Port Event Action 2"] #[inline(always)] pub fn evact2(&self) -> EVACT2_R { EVACT2_R::new(((self.bits >> 21) & 0x03) as u8) } #[doc = "Bit 23 - Port Event Enable Input 2"] #[inline(always)] pub fn portei2(&self) -> PORTEI2_R { PORTEI2_R::new(((self.bits >> 23) & 0x01) != 0) } #[doc = "Bits 24:28 - Port Event Pin Identifier 3"] #[inline(always)] pub fn pid3(&self) -> PID3_R { PID3_R::new(((self.bits >> 24) & 0x1f) as u8) } #[doc = "Bits 29:30 - Port Event Action 3"] #[inline(always)] pub fn evact3(&self) -> EVACT3_R { EVACT3_R::new(((self.bits >> 29) & 0x03) as u8) } #[doc = "Bit 31 - Port Event Enable Input 3"] #[inline(always)] pub fn portei3(&self) -> PORTEI3_R { PORTEI3_R::new(((self.bits >> 31) & 0x01) != 0) } } impl W { #[doc = "Bits 0:4 - Port Event Pin Identifier 0"] #[inline(always)] pub fn pid0(&mut self) -> PID0_W { PID0_W { w: self } } #[doc = "Bits 5:6 - Port Event Action 0"] #[inline(always)] pub fn evact0(&mut self) -> EVACT0_W { EVACT0_W { w: self } } #[doc = "Bit 7 - Port Event Enable Input 0"] #[inline(always)] pub fn portei0(&mut self) -> PORTEI0_W { PORTEI0_W { w: self } } #[doc = "Bits 8:12 - Port Event Pin Identifier 1"] #[inline(always)] pub fn pid1(&mut self) -> PID1_W { PID1_W { w: self } } #[doc = "Bits 13:14 - Port Event Action 1"] #[inline(always)] pub fn evact1(&mut self) -> EVACT1_W { EVACT1_W { w: self } } #[doc = "Bit 15 - Port Event Enable Input 1"] #[inline(always)] pub fn portei1(&mut self) -> PORTEI1_W { PORTEI1_W { w: self } } #[doc = "Bits 16:20 - Port Event Pin Identifier 2"] #[inline(always)] pub fn pid2(&mut self) -> PID2_W { PID2_W { w: self } } #[doc = "Bits 21:22 - Port Event Action 2"] #[inline(always)] pub fn evact2(&mut self) -> EVACT2_W { EVACT2_W { w: self } } #[doc = "Bit 23 - Port Event Enable Input 2"] #[inline(always)] pub fn portei2(&mut self) -> PORTEI2_W { PORTEI2_W { w: self } } #[doc = "Bits 24:28 - Port Event Pin Identifier 3"] #[inline(always)] pub fn pid3(&mut self) -> PID3_W { PID3_W { w: self } } #[doc = "Bits 29:30 - Port Event Action 3"] #[inline(always)] pub fn evact3(&mut self) -> EVACT3_W { EVACT3_W { w: self } } #[doc = "Bit 31 - Port Event Enable Input 3"] #[inline(always)] pub fn portei3(&mut self) -> PORTEI3_W { PORTEI3_W { w: self } } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.0.bits(bits); self } } #[doc = "Event Input Control\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [evctrl](index.html) module"] pub struct EVCTRL_SPEC; impl crate::RegisterSpec for EVCTRL_SPEC { type Ux = u32; } #[doc = "`read()` method returns [evctrl::R](R) reader structure"] impl crate::Readable for EVCTRL_SPEC { type Reader = R; } #[doc = "`write(|w| ..)` method takes [evctrl::W](W) writer structure"] impl crate::Writable for EVCTRL_SPEC { type Writer = W; } #[doc = "`reset()` method sets EVCTRL to value 0"] impl crate::Resettable for EVCTRL_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0 } }
{ self.w.bits = (self.w.bits & !(0x1f << 16)) | ((value as u32 & 0x1f) << 16); self.w }
UR5Class.py
#!/usr/bin/python # -*- coding: utf-8 -*- ## @package UR5 # Documentação para o pacote de classes UR5. # # Documentação do código produzido para controle do manipulador UR5 e geração e controle de suas posições. # Cada código aqui documentado possui uma breve descrição de sua função, suas entradas e saídas. import numpy as np from numpy.linalg import inv from numpy.linalg import norm from numpy.linalg import pinv from scipy.signal import butter,lfilter from scipy.signal import freqz import struct import time import csv import Transformations as tf import os ## Documentação da Classe UR5Class para controle remoto do manipulador Universal Robots 5 (UR5). # # Essa classe é responsável por interpretar os dados recebidos pela caixa de controle do UR5 e controlar seu funcionamento ao longo do projeto. # A ela cabe as funções dos cálculos de cinemática direta e inversa para as diversas posições do robô, interpretar os dados do robô, verificar # seu estado de segurança e funcionamento, assim como realizar qualquer cálculo de calibração ou posição necessário. class UR5Class: _standard_DH = np.mat([[0,-.425,-.39225,0,0,0], [1.570796327, 0, 0, 1.570796327, -1.570796327, 0], [.089159,0,0,.10915,.09465,.0823], [0, 0, 0, 0, 0, 0]]) # _standard_DH é a tabela DH tradicional do Robô. As linhas correspondem respectivamente a (a, alpha, d,q) _robot_data = [] # Lista vazia para receber os dados do robô _data_pack_max = 133 # Tamanho maximo e esperado de valores recebidos em lista no pacote de dados processTimeList = [] errorDB = [] error_D_DB = [] wDB = [] u = np.array([0, 0, 0, 0, 0, 0],dtype=np.float64) errorSaturation = np.array([0, 0, 0, 0, 0, 0],dtype=np.float64) errorPrevious = np.array([0, 0, 0, 0, 0, 0],dtype=np.float64) errorSum = np.array([0, 0, 0, 0, 0, 0],dtype=np.float64) normErro = np.zeros(6,dtype=np.float64) ## Construtor da classe. # @param self O ponteiro do objeto. # @param delta_DH Os dados de calibração da matriz Denavit-Hartenberg do robô a ser controlado. def __init__(self, delta_DH = np.zeros((5,6))): self.delta_standard_DH = delta_DH self._effective_a = self._standard_DH[0,:] + self.delta_standard_DH[0,:] self._effective_alpha = self._standard_DH[1,:] + self.delta_standard_DH[1,:] self._effective_d = self._standard_DH[2,:] + self.delta_standard_DH[2,:] self._effective_q = np.array(self._standard_DH[3,:] + self.delta_standard_DH[3,:]) # Os dados efetivos equivalem aos dados esperados do UR5 mais os dados de calibração do robô específico. Rot_x_1 = np.mat([[1, 0, 0, 0], [0, np.cos(self._effective_alpha[0,0]), -np.sin(self._effective_alpha[0,0]), 0], [0, np.sin(self._effective_alpha[0,0]), np.cos(self._effective_alpha[0,0]), 0], [ 0, 0, 0, 1]]) Rot_x_2 = np.mat([[1, 0, 0, 0], [0, np.cos(self._effective_alpha[0,1]), -np.sin(self._effective_alpha[0,1]), 0], [0, np.sin(self._effective_alpha[0,1]), np.cos(self._effective_alpha[0,1]), 0], [ 0, 0, 0, 1]]) Rot_x_3 = np.mat([[1, 0, 0, 0], [0, np.cos(self._effective_alpha[0,2]), -np.sin(self._effective_alpha[0,2]), 0], [0, np.sin(self._effective_alpha[0,2]), np.cos(self._effective_alpha[0,2]), 0], [ 0, 0, 0, 1]]) Rot_x_4 = np.mat([[1, 0, 0, 0], [0, np.cos(self._effective_alpha[0,3]), -np.sin(self._effective_alpha[0,3]), 0], [0, np.sin(self._effective_alpha[0,3]), np.cos(self._effective_alpha[0,3]), 0], [ 0, 0, 0, 1]]) Rot_x_5 = np.mat([[1, 0, 0, 0], [0, np.cos(self._effective_alpha[0,4]), -np.sin(self._effective_alpha[0,4]), 0], [0, np.sin(self._effective_alpha[0,4]), np.cos(self._effective_alpha[0,4]), 0], [ 0, 0, 0, 1]]) Rot_x_6 = np.mat([[1, 0, 0, 0], [0, np.cos(self._effective_alpha[0,5]), -np.sin(self._effective_alpha[0,5]), 0], [0, np.sin(self._effective_alpha[0,5]), np.cos(self._effective_alpha[0,5]), 0], [ 0, 0, 0, 1]]) Trans_d_1 = np.mat([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, self._effective_d[0,0]], [0, 0, 0, 1]]) Trans_d_2 = np.mat([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, self._effective_d[0,1]], [0, 0, 0, 1]]) Trans_d_3 = np.mat([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, self._effective_d[0,2]], [0, 0, 0, 1]]) Trans_d_4 = np.mat([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, self._effective_d[0,3]], [0, 0, 0, 1]]) Trans_d_5 = np.mat([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, self._effective_d[0,4]], [0, 0, 0, 1]]) Trans_d_6 = np.mat([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, self._effective_d[0,5]], [0, 0, 0, 1]]) Trans_a_1 = np.mat([[1, 0, 0, self._effective_a[0,0]], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]]) Trans_a_2 = np.mat([[1, 0, 0, self._effective_a[0,1]], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]]) Trans_a_3 = np.mat([[1, 0, 0, self._effective_a[0,2]], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]]) Trans_a_4 = np.mat([[1, 0, 0, self._effective_a[0,3]], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]]) Trans_a_5 = np.mat([[1, 0, 0, self._effective_a[0,4]], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]]) Trans_a_6 = np.mat([[1, 0, 0, self._effective_a[0,5]], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]]) self._A_0_1 = Trans_d_1 * Trans_a_1 * Rot_x_1 self._A_0_2 = Trans_d_2 * Trans_a_2 * Rot_x_2 self._A_0_3 = Trans_d_3 * Trans_a_3 * Rot_x_3 self._A_0_4 = Trans_d_4 * Trans_a_4 * Rot_x_4 self._A_0_5 = Trans_d_5 * Trans_a_5 * Rot_x_5 self._A_0_6 = Trans_d_6 * Trans_a_6 * Rot_x_6 # Transformações comuns, indiferentes a movimentação, utilizadas em cálculos futuros. return ## Método que recebe e configura o pacote de dados do robô. # @param self O ponteiro do objeto. # @param data O pacote de dados recebido pela conexão Ethernet com o robô. def setRobotData(self, data): size = len(data) self._robot_data = [] # O primeiro dado recebido, de tempo, é um inteiro de 4 bytes. self._robot_data.append(struct.unpack('!i', data[0:4])) i = 4 # O resto dos dados recebidos vem em formato de double de 8 bytes. while i < size: self._robot_data.append(struct.unpack('!d', data[i:i+8])[0]) i += 8 # Já atualiza os dados de juntas do robô. if (size < (4+(34*8))): print("[WARNING] Data size smaller than expected. Bytes: " + str(size)) return self._effective_q = np.array(self._robot_data[32:38]) + self.delta_standard_DH[3,:] return # setRobotData recebe o pacote de 1060 bytes e os separa nos 160 valores da lista de dados. def setRobotDataRTDE(self, data): #print(data.actual_TCP_pose) self._robot_data[1] = np.asarray(data.timestamp, dtype = np.float64) self._robot_data[2:8] = np.asarray(data.target_q, dtype = np.float64) self._robot_data[8:14] = np.asarray(data.target_qd, dtype = np.float64) self._robot_data[32:38] = np.asarray(data.actual_q, dtype = np.float64) self._robot_data[38:44] = np.asarray(data.actual_qd, dtype = np.float64) self._robot_data[56:62] = np.asarray(data.actual_TCP_pose, dtype = np.float64) self._robot_data[62:68] = np.asarray(data.actual_TCP_speed, dtype = np.float64) self._robot_data[68:74] = np.asarray(data.actual_TCP_force, dtype = np.float64) self._robot_data[74:80] = np.asarray(data.target_TCP_pose, dtype = np.float64) self._robot_data[80:86] = np.asarray(data.target_TCP_speed, dtype = np.float64) self._robot_data[102] = np.asarray(data.safety_mode, dtype = np.int32) self._robot_data[132] = np.asarray(data.runtime_state, dtype = np.uint32) q = np.asarray(data.actual_q) self._effective_q = q + self.delta_standard_DH[3,:] # <field name="timestamp" type="DOUBLE"/> # <field name="target_q" type="VECTOR6D"/> # <field name="target_qd" type="VECTOR6D"/> # <field name="target_qdd" type="VECTOR6D"/> # <field name="target_current" type="VECTOR6D"/> # <field name="target_moment" type="VECTOR6D"/> # <field name="actual_q" type="VECTOR6D"/> # <field name="actual_qd" type="VECTOR6D"/> # <field name="actual_current" type="VECTOR6D"/> # <field name="joint_control_output" type="VECTOR6D"/> # <field name="actual_TCP_pose" type="VECTOR6D"/> # <field name="actual_TCP_speed" type="VECTOR6D"/> # <field name="actual_TCP_force" type="VECTOR6D"/> # <field name="target_TCP_pose" type="VECTOR6D"/> # <field name="target_TCP_speed" type="VECTOR6D"/> # <field name="actual_digital_input_bits" type="UINT64"/> # <field name="joint_temperatures" type="VECTOR6D"/> # <field name="actual_execution_time" type="DOUBLE"/> # <field name="robot_mode" type="INT32"/> # <field name="joint_mode" type="VECTOR6INT32"/> # <field name="safety_mode" type="INT32"/> # <field name="actual_tool_accelerometer" type="VECTOR3D"/> # <field name="speed_scaling" type="DOUBLE"/> # <field name="target_speed_fraction" type="DOUBLE"/> # <field name="actual_momentum" type="DOUBLE"/> # <field name="actual_main_voltage" type="DOUBLE"/> # <field name="actual_robot_voltage" type="DOUBLE"/> # <field name="actual_robot_current" type="DOUBLE"/> # <field name="actual_joint_voltage" type="VECTOR6D"/> # <field name="actual_digital_output_bits" type="UINT64"/> # <field name="runtime_state" type="UINT32"/> return ## Retorna verdadeiro ou falso para o estado de segurança do robô. # @param self O ponteiro do objeto. def checkSafety(self): try: if self._robot_data[102] == 1: safety = True else: safety = False return safety except: print("[ERROR] Could not find Robot Data!") return None # checkSafety verifica se a variável de segurança do robô está apta a operar ## Retorna verdadeiro ou falso para o estado de operação do robô. # @param self O ponteiro do objeto. def programStateCheck(self): try: if self._robot_data[132] == 1: state = True else: state = False return state except: print("[ERROR] Could not find Robot Data!") return None # programStateCheck verifica se a variável de estado do robô está apta a operar ## Imprime em prompt de comando as 133 informações recebidas pelo pacote de dados do robô. # @param self O ponteiro do objeto. def printRobotData(self): size = len(self._robot_data) if size == self._datapackmax: print("[INFO] Message Size in Bytes: " + str(self._robot_data[0])) print("[INFO] Time: " + str(self._robot_data[1])) print("[INFO] q target" + str(self._robot_data[2:8])) print("[INFO] qd target" + str(self._robot_data[8:14])) print("[INFO] qdd target" + str(self._robot_data[14:20])) print("[INFO] I target" + str(self._robot_data[20:26])) print("[INFO] M target" + str(self._robot_data[26:32])) print("[INFO] q actual" + str(self._robot_data[32:38])) print("[INFO] qd actual" + str(self._robot_data[38:44])) print("[INFO] I actual" + str(self._robot_data[44:50])) print("[INFO] I control" + str(self._robot_data[50:56])) print("[INFO] Tool Vector Actual" + str(self._robot_data[56:62])) print("[INFO] TCP Speed Actual" + str(self._robot_data[62:68])) print("[INFO] TCP Force" + str(self._robot_data[68:74])) print("[INFO] Tool Vector Target" + str(self._robot_data[74:80])) print("[INFO] TCP Speed Target" + str(self._robot_data[80:86])) print("[INFO] digital input bits" + str(self._robot_data[86])) print("[INFO] Motor Temperatures" + str(self._robot_data[87:93])) print("[INFO] Controller Timer" + str(self._robot_data[93])) print("[INFO] Test Value" + str(self._robot_data[94])) print("[INFO] Robot Mode" + str(self._robot_data[95])) print("[INFO] Joint Modes" + str(self._robot_data[96:102])) print("[INFO] Safety Mode" + str(self._robot_data[102])) print("[INFO] Tool Acceleration Values" + str(self._robot_data[109:112])) print("[INFO] Speed Scaling" + str(self._robot_data[118])) print("[INFO] Linear Momentum Norm" + str(self._robot_data[119])) print("[INFO] V Main" + str(self._robot_data[122])) print("[INFO] V Robot" + str(self._robot_data[123])) print("[INFO] I Robot" + str(self._robot_data[124])) print("[INFO] V actual" + str(self._robot_data[125:131])) print("[INFO] Digital Outputs" + str(self._robot_data[131])) print("[INFO] Program State" + str(self._robot_data[132])) # Exceção caso o pacote venha menor que 1060 Bytes else: print("[WARNING] Size of data smaller than expected: ", size) return # printRobotData imprime em tela todos os valores do pacote de dados traduzido, para depuração ## Retorna o vetor de posição do efetuador do robô, em formato [x, y, z, rx, ry, rz]. # @param self O ponteiro do objeto. def getPositionTarget(self): try: array = np.array(self._robot_data[74:80]) return array except: print("[ERROR] Could not find Robot Data!") return None # getPosition retorna a posição atual do vetor da ferramenta. def getPosition(self): try: array = np.array(self._robot_data[56:62]) return array except: print("[ERROR] Could not find Robot Data!") return None # getPosition retorna a posição atual do vetor da ferramenta. ## Retorna o vetor de velocidade do efetuador do robô, em formato [dx, dy, dz, drx, dry, drz]. # @param self O ponteiro do objeto. def getTCPSpeed(self): try: array = np.array(self._robot_data[62:68]) return array except: print("[ERROR] Could not find Robot Data!") return None # getTCPSpeed retorna a velocidade da ferramenta. ## Retorna o vetor de velocidade do efetuador do robô, em formato [dx, dy, dz, drx, dry, drz]. # @param self O ponteiro do objeto. def getTCPSpeedTarget(self): try: array = np.array(self._robot_data[80:86]) return array except: print("[ERROR] Could not find Robot Data!") return None # getTCPSpeed retorna a velocidade da ferramenta. ## Retorna o vetor de velocidade modular do efetuador do robô, em formato [v]. # @param self O ponteiro do objeto. def getTCPSpeedMod(self): try: v = np.sqrt(self._robot_data[62]*self._robot_data[62] + self._robot_data[63]*self._robot_data[63] + self._robot_data[64]*self._robot_data[64]) return v except: print("[ERROR] Could not find Robot Data!") return None # getTCPSpeed retorna a velocidade da ferramenta. ## Retorna o vetor de posição das seis juntas do robô. # @param self O ponteiro do objeto. def getJointPosition(self): try: array = np.array(self._robot_data[32:38]) return array except: print("[ERROR] Could not find Robot Data!") return None ## Retorna o vetor de posição das seis juntas do robô. # @param self O ponteiro do objeto. def getJointPositionTarget(self): try: array = np.array(self._robot_data[2:8]) return array except: print("[ERROR] Could not find Robot Data!") return None # Retorna o valor das articulações da ferramenta ## Retorna o vetor de velocidade das seis juntas do robô. # @param self O ponteiro do objeto. def getJointSpeed(self): try: array = np.array(self._robot_data[38:44]) return array except: print("[ERROR] Could not find Robot Data!") return None # getJointSpeed retorna a velocidade da ferramenta. ## Retorna o vetor de velocidade das seis juntas do robô. # @param self O ponteiro do objeto. def getJointSpeedTarget(self): try: array = np.array(self._robot_data[8:14]) return array except: print("[ERROR] Could not find Robot Data!") return None # getJointSpeed retorna a velocidade da ferramenta. def getTCPForce(self): try: array = np.array(self._robot_data[68:74]) return array except: print("[ERROR] Could not find Robot Data!") return None # getJointSpeed retorna a velocidade da ferramenta. ## Retorna o tempo atual do robô desde que foi ligado. # @param self O ponteiro do objeto. def getTime(self): return self._robot_data[1] # Retorna o valor do tempo de uso atual ## Realiza a cinemática direta do UR5 para a posição de juntas atual. O método retorna a matriz homogênea 4x4 da posição atual, ou um vetor em RV ou RPY. # @param self O ponteiro do objeto. # @param q O vetor de coordenadas de junta. # @param vector parâmetro que define se o tipo de retorno como vetor de posições em RV. # @param rpy parâmetro que, juntamente de vector, define o retorno como vetor de posições em RPY. def ur5_direct_kinematics(self, q, vector = False, rpy = False, apply_offset = False): if (apply_offset == True): # q = q + self.delta_standard_DH[3,:] q = np.squeeze(np.asarray(q + self.delta_standard_DH[3,:])) _rot_z_1 = np.mat([[np.cos(q[0]), -np.sin(q[0]), 0, 0],[np.sin(q[0]), np.cos(q[0]), 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]]) _rot_z_2 = np.mat([[np.cos(q[1]), -np.sin(q[1]), 0, 0],[np.sin(q[1]), np.cos(q[1]), 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]]) _rot_z_3 = np.mat([[np.cos(q[2]), -np.sin(q[2]), 0, 0],[np.sin(q[2]), np.cos(q[2]), 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]]) _rot_z_4 = np.mat([[np.cos(q[3]), -np.sin(q[3]), 0, 0],[np.sin(q[3]), np.cos(q[3]), 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]]) _rot_z_5 = np.mat([[np.cos(q[4]), -np.sin(q[4]), 0, 0],[np.sin(q[4]), np.cos(q[4]), 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]]) _rot_z_6 = np.mat([[np.cos(q[5]), -np.sin(q[5]), 0, 0],[np.sin(q[5]), np.cos(q[5]), 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]]) # Utiliza as matrizes definidas no construtor e as de rotação das juntas atuais para retornar a matriz final. self._A_1 = _rot_z_1 * self._A_0_1 self._A_2 = _rot_z_2 * self._A_0_2 self._A_3 = _rot_z_3 * self._A_0_3 self._A_4 = _rot_z_4 * self._A_0_4 self._A_5 = _rot_z_5 * self._A_0_5 self._A_6 = _rot_z_6 * self._A_0_6 self._H = self._A_1 * self._A_2 * self._A_3 * self._A_4 * self._A_5 * self._A_6 #print self._H if (vector == False): return self._H else: vetor = tf.matrix2RotationVector(self._H[0:3,0:3]) array = np.array([self._H[0,3], self._H[1,3], self._H[2,3]], float) vetor = np.hstack((array,vetor)) #print vetor if (rpy == False): return vetor else: vetor[3:6] = tf.rotationVector2RollPitchYaw(vetor[3:6]) return vetor # ur5_direct_kinematics executa a cinemática direta do UR5 e retorna a matriz 4x4 de posição e orientação do UR5 def verifyDelta(self, epsilon = 10e-6): direct = self.ur5_direct_kinematics(self.getJointPosition(), vect
apply_offset = True) real = self.getPosition() diff = tf.computeDifference(real,direct) print("[INFO] Direct Kinematics calculated with Delta: " + str(direct)) print("[INFO] Direct Kinematics real: " + str(real)) error = norm(diff[0:3]) print("[INFO] Error: ", error) if (error < epsilon): print("[INFO] Correct Delta Matrix!") return True else: print("[WARNING] Incorrect Delta Matrix!") return False def _DH(self, a, alpha, d, theta): Td = np.asmatrix(np.eye(4)) Td[2,3] = d Ta = np.asmatrix(np.eye(4)) Ta[0,3] = a Rtheta = tf.Rot_z(theta) Rtheta = np.mat([[Rtheta[0,0], Rtheta[0,1], Rtheta[0,2], 0], [Rtheta[1,0], Rtheta[1,1], Rtheta[1,2], 0], [Rtheta[2,0], Rtheta[2,1], Rtheta[2,2], 0], [0,0,0,1]]) Ralpha = tf.Rot_x(alpha) Ralpha = np.mat([[Ralpha[0,0], Ralpha[0,1], Ralpha[0,2], 0], [Ralpha[1,0], Ralpha[1,1], Ralpha[1,2], 0], [Ralpha[2,0], Ralpha[2,1], Ralpha[2,2], 0], [0,0,0,1]]) G = Td * Rtheta * Ta * Ralpha return G # _DH retorna uma matrix 4x4 de junta especifica, utilizado na cinemática inversa analítica def _analytic_ur5_inverse_kinematics(self, p): rvMatrix = tf.rotationVector2Matrix(p[3:6]) gd = np.mat(([[rvMatrix[0,0], rvMatrix[0,1], rvMatrix[0,2], p[0]], [rvMatrix[1,0], rvMatrix[1,1], rvMatrix[1,2], p[1]], [rvMatrix[2,0], rvMatrix[2,1], rvMatrix[2,2], p[2]], [0, 0, 0, 1]])) theta = np.zeros((6, 8)) d1 = self._standard_DH[2,0] d2 = self._standard_DH[2,1] d3 = self._standard_DH[2,2] d4 = self._standard_DH[2,3] d5 = self._standard_DH[2,4] d6 = self._standard_DH[2,5] a1 = self._standard_DH[0,0] a2 = self._standard_DH[0,1] a3 = self._standard_DH[0,2] a4 = self._standard_DH[0,3] a5 = self._standard_DH[0,4] a6 = self._standard_DH[0,5] alpha1 = self._standard_DH[1,0] alpha2 = self._standard_DH[1,1] alpha3 = self._standard_DH[1,2] alpha4 = self._standard_DH[1,3] alpha5 = self._standard_DH[1,4] alpha6 = self._standard_DH[1,5] # Calculating theta1 p05 = gd * np.mat([[0], [0], [-d6], [1]]) p05 = p05 - np.mat([[0], [0], [0], [1]]) psi = np.arctan2(p05[1], p05[0]) p05xy = np.sqrt(p05[1]*p05[1] + p05[0]*p05[0]) if (d4 > p05xy): print ("[WARNING] No solution for Theta1: d4 > P05xy") print ("[WARNING] Creating aproximation highly inaccurate") d4 = p05xy - 1e-10 try: phi = np.arccos(d4 / p05xy) except: print("[ERROR] Division by zero: " + str(p05xy)) return None theta[0, 0:4] = np.radians(90) + psi + phi theta[0, 4:8] = np.radians(90) + psi - phi theta = np.real(theta) # Calculating theta5 cols = np.array([0, 4]) for i in range(0, cols.size): c = cols[i]; try: T10 = inv(self._DH(a1, alpha1, d1, theta[0,c])) except: print("[ERROR] Could not find inverse: " + str(self._DH(a1, alpha1, d1, theta[0,c]))) return None T16 = T10 * gd p16z = T16[2,3] try: if (((p16z-d4)/d6) > 1): print ("[WARNING] No solution for Theta5: (p16z-d4)/d6) > 1") print ("[WARNING] Creating aproximation highly inaccurate") d6 = (p16z-d4) + 1e-10 t5 = np.arccos((p16z-d4)/d6) except: print("[ERROR] Division by zero: " + str(d6)) return None theta[4, c:c+2] = t5 theta[4, c+2:c+4] = -t5 theta = np.real(theta) # Calculating theta6 cols = np.array([0, 2, 4, 6]) for i in range(0, cols.size): c = cols[i] T01 = self._DH(a1, alpha1, d1, theta[0,c]) try: T61 = inv(gd) * T01 except: print("[ERROR] Could not find inverse: " + str(gd)) return None T61zy = T61[1, 2] T61zx = T61[0, 2] t5 = theta[4, c] if (np.sin(t5) == 0): theta[5, c:c+2] = 0 else: theta[5, c:c+2] = np.arctan2(-T61zy/np.sin(t5), T61zx/np.sin(t5)) theta = np.real(theta) # Calculating theta3 cols = np.array([0, 2, 4, 6]) for i in range (0, cols.size): c = cols[i] try: T10 = inv(self._DH(a1, alpha1, d1, theta[0,c])) T65 = inv(self._DH(a6, alpha6, d6, theta[5,c])) T54 = inv(self._DH(a5, alpha5, d5, theta[4,c])) except T10: print("[ERROR] Could not find inverse: Theta3, inverse 1, " + str(T10)) return None except T65: print("[ERROR] Could not find inverse: Theta3, inverse 2, " + str(T65)) return None except T54: print("[ERROR] Could not find inverse: Theta3, inverse 3, " + str(T54)) return None T14 = T10 * gd * T65 * T54 p13 = T14 * np.mat([[0], [-d4], [0], [1]]) p13 = p13 - np.mat([[0], [0], [0], [1]]) p13norm2 = norm(p13) * norm(p13) arg = (p13norm2-a2*a2-a3*a3)/(2*a2*a3) if (arg > 1 or arg < -1): print ("[WARNING] No solution for Theta3: arg < -1 or arg > 1") print ("[WARNING] Creating aproximation highly inaccurate") if (arg >1): arg = 1 - 1e-10 else: arg = -1 + 1e-10 t3p = np.arccos(arg) theta[2, c] = t3p theta[2, c+1] = -t3p theta = np.real(theta) # Calculating theta2 and theta4 cols = np.array([0, 1, 2, 3, 4, 5, 6, 7]) for i in range (0, cols.size): c = cols[i] try: T10 = inv(self._DH(a1, alpha1, d1, theta[0,c])) T65 = inv(self._DH(a6, alpha6, d6, theta[5,c])) T54 = inv(self._DH(a5, alpha5, d5, theta[4,c])) except T10: print("[ERROR] Could not find inverse: Theta2 inverse 1, " + str(T10)) return None except T65: print("[ERROR] Could not find inverse: Theta2, inverse 2, " + str(T65)) return None except T54: print("[ERROR] Could not find inverse: Theta2, inverse 3, " + str(T54)) return None T14 = T10 * gd * T65 * T54 p13 = T14 * np.mat([[0], [-d4], [0], [1]]) - np.mat([[0], [0], [0], [1]]) p13norm = norm(p13) theta[1, c] = -np.arctan2(p13[1], -p13[0])+np.arcsin(a3*np.sin(theta[2,c])/p13norm) try: T32 = inv(self._DH(a3, alpha3, d3, theta[2,c])) T21 = inv(self._DH(a2, alpha2, d2, theta[1,c])) except T10: print("[ERROR] Could not find inverse: Theta4 inverse 1, " + str(T32)) return None except T65: print("[ERROR] Could not find inverse: Theta4, inverse 2, " + str(T21)) return None T34 = T32 * T21 * T14; theta[3, c] = np.arctan2(T34[1,0], T34[0,0]) theta = np.real(theta) for i in range (0, 5): for j in range(0,7): if theta[i,j] > np.pi: theta[i,j] -= 2*np.pi elif theta[i,j] < -np.pi: theta[i,j] += 2*np.pi return theta # _analytic_ur5_inverse_kinematics retorna a matriz 6x8 com as 8 possiveis posições de 6 angulos dos motores que inferem na posição atual do UR5 ## Cálcula a matriz Jacobiana da relação entre juntas e vetor de pose. # @param self O ponteiro do objeto. # @param q_Past Um vetor de juntas inicial a ser aplicado a derivada. # @param deltaTheta Um vetor de diferença de juntas em um tempo infinitesimal para o cálculo de derivada. def jacobian(self, q_Past, deltaTheta, rpy = False): jacobian_matrix = np.zeros((6,6)) FK_init = self.ur5_direct_kinematics(np.squeeze(np.asarray(q_Past.transpose() + self.delta_standard_DH[3,:])), vector = True, rpy = rpy) step = deltaTheta NaN_check = False for i in range(0,6): q_aux = np.array([[0],[0],[0],[0],[0],[0]], float) q_aux[i] += step[i] q_aux = q_Past + q_aux q_aux = np.squeeze(np.asarray(q_aux.transpose() + self.delta_standard_DH[3,:])) FK_next = self.ur5_direct_kinematics(q_aux, vector = True, rpy = rpy) jacobian_matrix[i,:] = (tf.computeDifference(FK_next, FK_init)/(step[i])) if(np.any(np.isnan(jacobian_matrix[i,:]))): jacobian_matrix[i,:] = np.zeros(6) NaN_check = True if(NaN_check): print("[WARNING] NaN found on Jacobian.") return jacobian_matrix.transpose() def jacobian2(self, q): jacobian_matrix = np.zeros((6,6)) # Atualiza as matrizes self.ur5_direct_kinematics(np.squeeze(np.asarray(q.transpose() + self.delta_standard_DH[3,:]))) # R^0_{i-1}dot(0,0,1)cross(d^0_n - d^0_{i-1}) auxRow = np.array([[0],[0],[1]]) # Row 1 jacobian_matrix[0:3,0] = np.cross(np.dot(np.eye(3),auxRow),self._H[0:3,3],axisa=0,axisb=0,axisc=1) jacobian_matrix[3:6,0] = np.dot(np.eye(3),auxRow).transpose() # Row 2 jacobian_matrix[0:3,1] = np.cross(np.dot(self._A_1[0:3,0:3],auxRow),(self._H[0:3,3] - self._A_1[0:3,3]),axisa=0,axisb=0,axisc=1) jacobian_matrix[3:6,1] = np.dot(self._A_1[0:3,0:3],auxRow).transpose() # Row 3 aux = self._A_1 * self._A_2 jacobian_matrix[0:3,2] = np.cross(np.dot(aux[0:3,0:3],auxRow),(self._H[0:3,3] - aux[0:3,3]),axisa=0,axisb=0,axisc=1) jacobian_matrix[3:6,2] = np.dot(aux[0:3,0:3],auxRow).transpose() # Row 4 aux = aux * self._A_3 jacobian_matrix[0:3,3] = np.cross(np.dot(aux[0:3,0:3],auxRow),(self._H[0:3,3] - aux[0:3,3]),axisa=0,axisb=0,axisc=1) jacobian_matrix[3:6,3] = np.dot(aux[0:3,0:3],auxRow).transpose() # Row 5 aux = aux * self._A_4 jacobian_matrix[0:3,4] = np.cross(np.dot(aux[0:3,0:3],auxRow),(self._H[0:3,3] - aux[0:3,3]),axisa=0,axisb=0,axisc=1) jacobian_matrix[3:6,4] = np.dot(aux[0:3,0:3],auxRow).transpose() # Row 6 aux = aux * self._A_5 jacobian_matrix[0:3,5] = np.cross(np.dot(aux[0:3,0:3],auxRow),(self._H[0:3,3] - aux[0:3,3]),axisa=0,axisb=0,axisc=1) jacobian_matrix[3:6,5] = np.dot(aux[0:3,0:3],auxRow).transpose() return jacobian_matrix def jacobianEndEffectorReference(self,jacobian): fowardKinematics = self._H jacobianTransform = np.eye(6) #jacobianTransform[0:3,0:3] = fowardKinematics[0:3,0:3].transpose() jacobianTransform[3:6,3:6] = fowardKinematics[0:3,0:3].transpose() newJacobian = np.dot(jacobianTransform,jacobian) return newJacobian def jacobianAnalytic(self, q): pose = self.ur5_direct_kinematics(np.squeeze(np.asarray(q.transpose() + self.delta_standard_DH[3,:])),vector = True, rpy = True) jacobian = self.jacobian2(q) jacobian = self.jacobianEndEffectorReference(jacobian) # r = pose[3] # p = pose[4] # #y = pose[5] # B = np.array([[1,0,np.sin(p)],[0,np.cos(r),-np.cos(p)*np.sin(r)],[0,np.sin(r),np.cos(p)*np.cos(r)]]) # invB = inv(B) # auxMat = np.eye(6) # auxMat[3:6,3:6] = invB # jacobianAnalytic = np.dot(auxMat,jacobian) #jacobianAnalytic = self.jacobianEndEffectorReference(jacobianAnalytic) return jacobian ## Esse método realiza a cinemática inversa de uma posição espacial para uma das oito configurações possíveis no espaço utilizando aproximação numérica por Newton-Raphson. # Ele retorna um vetor com as seis juntas que representam a configuração escolhida. # @param self O ponteiro do objeto. # @param cartesian_position Vetor [1x6] da posição a ser transformada. # @param chosen_theta Configuração escolhida. Default = 2. # @param theta Um parametro que pode ser usado como posição proxima inicial para aproximação numérica # @param rpy Um parâmetro que especifica se a posição cartesiana dada foi em RV ou RPY. def ur5_inverse_kinematics_newthon_raphson(self, cartesian_position, chosen_theta = 2, theta = np.zeros(6), rpy = False): #t = time.clock() if (rpy == True): cartesian_position[3:6] = tf.rollPitchYaw2RotationVector(cartesian_position[3:6]) # A cinemática inversa analitica é inicialmente calculada if (np.all(theta == 0)): theta = self._analytic_ur5_inverse_kinematics(cartesian_position) joint_analytic_IK = theta[:,chosen_theta] else: joint_analytic_IK = theta NaN_check = np.isnan(joint_analytic_IK) if (np.any(NaN_check)): joint_analytic_IK = self.getJointPosition() print ("[WARNING] Nan position found in analytic IK solution, using Actual Joint Position as start position.") # O vetor de juntas inicial a ser corrigido numéricamente é escolhido #print joint_analytic_IK q_i = np.array([0,0,0,0,0,0], float) q_i += joint_analytic_IK joint_analytic_IK = joint_analytic_IK + self.delta_standard_DH[3,:] joint_analytic_IK = np.squeeze(np.asarray(joint_analytic_IK)) FK = self.ur5_direct_kinematics(joint_analytic_IK, True) # Transformação de RV para RPY é realizada para se iniciar o cálculo. cartesian_position_rpy = cartesian_position erro = tf.computeDifference(cartesian_position_rpy, FK) norm_erro = norm(erro) episilon = 0.0001*0.0001 max_iteractions = 500 iteraction = 1 q_i = np.array([[q_i[0]], [q_i[1]],[q_i[2]], [q_i[3]],[q_i[4]], [q_i[5]]]) erro = np.array([[erro[0]], [erro[1]],[erro[2]], [erro[3]],[erro[4]], [erro[5]]]) delta_theta = np.ones(6)*0.000006 delta_theta = np.array([[delta_theta[0]], [delta_theta[1]],[delta_theta[2]], [delta_theta[3]],[delta_theta[4]], [delta_theta[5]]]) while (norm_erro > episilon): # Calcula j = self.jacobian(q_i, delta_theta) try: jt = pinv(j) except: print("[WARNING] Pseudo Inverse with SVD diverged") jt = np.dot(j.transpose(),inv(np.dot(j,j.transpose()))) q_in = np.array([[0],[0],[0],[0],[0],[0]], float) q_in = q_i + np.dot(jt,erro) delta_theta = q_in - q_i q_i = np.array([[0],[0],[0],[0],[0],[0]], float) q_i += q_in q_i = np.squeeze(np.asarray(q_i.transpose())) FK = self.ur5_direct_kinematics(np.squeeze(np.asarray(q_i + self.delta_standard_DH[3,:])), True) erro = tf.computeDifference(cartesian_position_rpy, FK) norm_erro = norm(erro) erro = np.array([[erro[0]], [erro[1]],[erro[2]], [erro[3]],[erro[4]], [erro[5]]]) q_i = np.array([[q_i[0]], [q_i[1]],[q_i[2]], [q_i[3]],[q_i[4]], [q_i[5]]]) iteraction += 1 if (iteraction > max_iteractions): print ("[ERROR] Maximum interactions reached.") break #t2 = time.clock() #print ("Tempo de convergencia NRa: ", t2 - t) q_i = q_i.transpose() q_aux = np.array([q_i[0,0],q_i[0,1],q_i[0,2],q_i[0,3],q_i[0,4],q_i[0,5]], float) return q_aux ## Esse método realiza a cinemática inversa de uma posição espacial para uma das oito configurações possíveis no espaço utilizando aproximação numérica por Cyclic Coordinate Descent. # Ele retorna um vetor com as seis juntas que representam a configuração escolhida. Obs.: Lento. # @param self O ponteiro do objeto. # @param cartesian_position Vetor [1x6] da posição a ser transformada. # @param chosen_theta Configuração escolhida. Default = 2. def ur5_inverse_kinematics_ccd(self, cartesian_position, chosen_theta = 2): # A cinemática inversa analitica é inicialmente calculada t = time.clock() theta = self._analytic_ur5_inverse_kinematics(cartesian_position) # O vetor de juntas inicial a ser corrigido numéricamente é escolhido joint_analytic_IK = theta[:,chosen_theta] self._effective_q = joint_analytic_IK + self.delta_standard_DH[3,:] Initial_DK = self.ur5_direct_kinematics(np.squeeze(np.asarray(self._effective_q.transpose())), True) Initial_DK[3:6] = tf.rotationVector2RollPitchYaw(Initial_DK[3:6]) # Cyclic Coordinate Descent cartesian_position_rpy = np.hstack((cartesian_position[0:3], tf.rotationVector2RollPitchYaw(cartesian_position[3:6]))) # Constantes a serem utilizadas epsilon = 0.0001 quad_epsilon = epsilon*epsilon joint_count = 5 max_interection = 5000 interection_count = 1 interection_count_joint = 1 direction = 1 min_step = 0.000017 max_step = 0.1 alpha_step = max_step Radius = np.sqrt(cartesian_position[0:3].transpose()*cartesian_position[0:3]) joint_interact = np.zeros(6) joint_interact += joint_analytic_IK # Erros Iniciais Error_Position = cartesian_position[0:3] - Initial_DK[0:3] Mean_Position = np.mean(np.dot(Error_Position.transpose(),Error_Position)) Error_Rotation = tf.computeDifference(cartesian_position_rpy[3:6],Initial_DK[3:6], True) Linear_Rotation_Error = Radius*Error_Rotation Mean_Rotation = np.mean(np.dot(Linear_Rotation_Error,Linear_Rotation_Error.transpose())) erro_quad = (Mean_Position + Mean_Rotation)/2 erro_quad_aux = erro_quad # Correção numérica. while erro_quad > quad_epsilon: joint_interact[joint_count] = joint_interact[joint_count] + direction*alpha_step self._effective_q = joint_interact + self.delta_standard_DH[3,:] DK = self.ur5_direct_kinematics(np.squeeze(np.asarray(self._effective_q.transpose())), True) DK[3:6] = rotationVector2RollPitchYaw(DK[3:6]) Error_Position = cartesian_position[0:3] - DK[0:3] Mean_Position = np.mean(np.dot(Error_Position.transpose(),Error_Position)) Error_Rotation = computeDifference(cartesian_position_rpy[3:6],DK[3:6], True) Linear_Rotation_Error = Radius*Error_Rotation Mean_Rotation = np.mean(np.dot(Linear_Rotation_Error,Linear_Rotation_Error.transpose())) erro_quad = (Mean_Position + Mean_Rotation)/2 if erro_quad > erro_quad_aux: if interection_count_joint == 1: direction = -1*direction joint_interact[joint_count] = joint_interact[joint_count] + direction*alpha_step interection_count_joint = 0 error_direction = erro_quad else: if alpha_step > min_step: joint_interact[joint_count] = joint_interact[joint_count] - direction*alpha_step alpha_step = alpha_step/2 interection_count_joint = 1 else: joint_interact[joint_count] = joint_interact[joint_count] - direction*alpha_step alpha_step = max_step interection_count_joint = 1 joint_count -=1 if joint_count < 0: joint_count = 5 interection_count +=1 else: alpha_step = alpha_step/2 interection_count_joint = 1 erro_quad_aux = erro_quad #if interection_count_joint == 1: #if erro_quad < erro_quad_aux: #erro_quad_aux = erro_quad #interection_count_joint += 1 #joint_interact[joint_count] = joint_interact[joint_count] - direction*alpha_step #alpha_step = alpha_step/2 #else: #direction = -1*direction #joint_interact[joint_count] = joint_interact[joint_count] + direction*alpha_step #interection_count_joint += 1 #else: #if erro_quad < erro_quad_aux: #erro_quad_aux = erro_quad #interection_count_joint += 1 #joint_interact[joint_count] = joint_interact[joint_count] - direction*alpha_step #alpha_step = alpha_step/2 #else: #if (alpha_step < 0.000017) #joint_interact[joint_count] = joint_interact[joint_count] - direction*alpha_step #alpha_step = alpha_step*2 #joint_interact[joint_count] = joint_interact[joint_count] + direction*alpha_step #alpha_step = np.pi #interection_count_joint = 1 #joint_count -=1 #if joint_count < 0: #joint_count = 5 #interection_count +=1 #else: #joint_interact[joint_count] = joint_interact[joint_count] - direction*alpha_step #interection_count_joint = 1 #joint_count -=1 #if joint_count < 0: #joint_count = 5 #interection_count +=1 if interection_count > max_interection: print ("[ERROR] Maximum interations reached.") break t2 = time.clock() print ("[INFO] CCD Total time: "+ str(t2 - t)) return joint_interact def getMeanValueVector(self, vectorArray): print("[INFO] Mean Value: Array, Mean, " + str(vectorArray) + ", " + str(np.mean(vectorArray, axis = 0, dtype=np.float64))) def controlLoopTranspose(self, desiredPose, poseActual = None): if (poseActual == None): poseActual = self.getPosition() poseActual[3:6] = tf.rotationVector2RollPitchYaw(poseActual[3:6]) poseActualFK = tf.pose2Matrix(poseActual) desiredPoseFK = tf.pose2Matrix(desiredPose) poseError = desiredPose[0:3] - poseActual[0:3] rotationError = tf.matrix2Pose(np.dot(poseActualFK[0:3,0:3].transpose(),desiredPoseFK[0:3,0:3]), True) if np.any(np.isnan(rotationError)): np.nan_to_num(rotationError, False) error = np.concatenate((poseError, rotationError),axis=0)[np.newaxis] self.normErro = norm(poseError) self.errorDB.append(error) jacob = self.jacobian(self.getJointPosition()[np.newaxis].transpose(),(np.ones(6)*10e-3)[np.newaxis].transpose()) # Control K = 0.5*np.eye(6,6) jointControl = np.dot(np.dot(jacob.transpose(),K),error.transpose()) return np.squeeze(np.asarray(jointControl)) def controlLoopPseudoInverse(self, desiredPose, poseActual = None): if (poseActual == None): poseActual = self.getPosition() poseActual[3:6] = tf.rotationVector2RollPitchYaw(poseActual[3:6]) poseActualFK = tf.pose2Matrix(poseActual) desiredPoseFK = tf.pose2Matrix(desiredPose) poseError = desiredPose[0:3] - poseActual[0:3] rotationError = tf.matrix2Pose(np.dot(poseActualFK[0:3,0:3].transpose(),desiredPoseFK[0:3,0:3]), True) if np.any(np.isnan(rotationError)): np.nan_to_num(rotationError, False) error = np.concatenate((poseError, rotationError),axis=0)[np.newaxis] self.normErro = norm(poseError) self.errorDB.append(error) jacob = self.jacobian(self.getJointPosition()[np.newaxis].transpose(),(np.ones(6)*10e-3)[np.newaxis].transpose()) # Control K = 0.5*np.eye(6,6) jointControl = np.dot(np.dot(pinv(jacob),K),error.transpose()) return np.squeeze(np.asarray(jointControl)) def controlLoopInverse(self, desiredPose, poseActual = None): if (poseActual == None): poseActual = self.getPosition() poseActual[3:6] = tf.rotationVector2RollPitchYaw(poseActual[3:6]) poseActual = self.getPosition() poseActual[3:6] = tf.rotationVector2RollPitchYaw(poseActual[3:6]) poseActualFK = tf.pose2Matrix(poseActual) desiredPoseFK = tf.pose2Matrix(desiredPose) poseError = desiredPose[0:3] - poseActual[0:3] rotationError = tf.matrix2Pose(np.dot(poseActualFK[0:3,0:3].transpose(),desiredPoseFK[0:3,0:3]), True) if np.any(np.isnan(rotationError)): np.nan_to_num(rotationError, False) error = np.concatenate((poseError, rotationError),axis=0)[np.newaxis] self.normErro = norm(poseError) self.errorDB.append(error) jacob = self.jacobian(self.getJointPosition()[np.newaxis].transpose(),(np.ones(6)*10e-6)[np.newaxis].transpose()) # Control K = 0.5*np.eye(6,6) jointControl = np.dot(np.dot(inv(jacob),K),error.transpose()) return np.squeeze(np.asarray(jointControl)) def controlLoopDLS(self, desiredPose, poseActual = None, step = 0.008, jointSpeedReference = np.array([0, 0, 0, 0, 0, 0]), cartesianSpeedReference = np.array([0, 0, 0, 0, 0, 0])): if (poseActual == None): poseActual = self.getPosition() poseActual[3:6] = tf.rotationVector2RollPitchYaw(poseActual[3:6]) #print(self.getPosition()) #print(self.getJointPosition()) poseActual = self.getPosition() poseActual[3:6] = tf.rotationVector2RollPitchYaw(poseActual[3:6]) poseActualFK = tf.pose2Matrix(poseActual) desiredPoseFK = tf.pose2Matrix(desiredPose) poseError = desiredPose[0:3] - poseActual[0:3] rotationError = tf.matrix2Pose(np.dot(poseActualFK[0:3,0:3].transpose(),desiredPoseFK[0:3,0:3]), True) if np.any(np.isnan(rotationError)): print('[INFO][ControlLoopDLS] NaN found on control') np.nan_to_num(rotationError, False) # Error Calculation #Kp error = np.hstack((poseError, rotationError)) #Kd error_D = (error - self.errorPrevious)/step self.error_D_DB.append(error_D) self.errorPrevious = error errorFiltered = butter_lowpass_filter(np.asarray(self.error_D_DB, dtype=np.float32), 3, 125, order=2) error_D = errorFiltered[errorFiltered.shape[0]-1] #Ki self.errorSum = self.errorSum + error # for i in range(0,6): # if (self.errorSum[i] > 0.1): # self.errorSum[i] = 0.1 # elif(self.errorSum[i] < -0.1): # self.errorSum[i] = -0.1 # print('Error Sum ' + str(self.errorSum)) # if (len(self.errorDB) > 1000): # self.errorSum = self.errorSum - np.asarray(self.errorDB[len(self.errorDB) - 1000], dtype=np.float32) #DB self.normErro = norm(poseError) self.errorDB.append(error) #jacob = self.jacobian(self.getJointPosition()[np.newaxis].transpose(),(np.ones(6)*10e-6)[np.newaxis].transpose(), rpy = True) #jacob = self.jacobian2(self.getJointPosition()) jacob = self.jacobianAnalytic(self.getJointPosition()) # Control Kp = 5*np.eye(6,6) #10 #5 # Kp[0,0] = 1.5 # Kp[1,1] = 1.5 # Kp[2,2] = 1.5 # Kp[0,3] = 0.2#0.5 # Kp[0,4] = 0.1#0.5 # Kp[0,5] = 0.1#0.5 # Kp[1,3] = 0#0.5 # Kp[1,4] = 0#0.5 # Kp[1,5] = 0#0.5 # Kp[2,3] = 0#0.5 # Kp[2,4] = 0#0.5 # Kp[2,5] = 0#0.5 #Kp[3,3] = 16#0.5 # Kp[3,4] = 0#0.5 # Kp[3,5] = 0#0.5 # Kp[4,3] = 0#0.5 #Kp[4,4] = 16#0.5 # Kp[4,5] = 0#0.5 # Kp[5,3] = 0#0.5 # Kp[5,4] = 0#0.5 #Kp[5,5] = 16#0.5 Kd = 2*np.eye(6,6) # Kd[3,3] = 0.1 # Kd[4,4] = 0.1 # Kd[5,5] = 0.1 Ki = 0.25*np.eye(6,6) # Ki[3,3] = 0.00055 #0.55 # Ki[4,4] = 0.00055 # Ki[5,5] = 0.00055 # WindupUpperLimit = np.array([0.15, 0.15, 0.15, 0.15, 0.15, 0.15]) # WindupLowerLimit = -np.array([0.15, 0.15, 0.15, 0.15, 0.15, 0.15]) k0 = 0.01 w0 = 0.01 KpControl = np.dot(Kp,error.transpose()) KdControl = np.dot(Kd,error_D.transpose()) KiControl = np.dot(Ki,self.errorSum.transpose()) # print(KiControl) # print('\n') # for i in range(0,6): # if (KiControl[i] > 0.02): # KiControl[i] = 0.02 # elif(KiControl[i] < -0.02): # KiControl[i] = -0.02 ControlSum = KpControl + cartesianSpeedReference #+ KiControl t1 = time.perf_counter() w = np.sqrt(np.linalg.det(np.dot(jacob,jacob.transpose()))) if (w < w0): lamb = k0*(np.power((1 - (w/w0)),2)) print('[WARNING] Near Singularity: ' + str(w)) else: lamb = 0 lamb2 = lamb*np.eye(6,6) invJacob = np.dot(jacob.transpose(),inv(np.dot(jacob,jacob.transpose()) + lamb2)) t2 = time.perf_counter() #t1 = time.perf_counter() #invJacob = inv(jacob) #t2 = time.perf_counter() JacobianProcessTime = t2 - t1 self.processTimeList.append(JacobianProcessTime) self.wDB.append(w) #invJacob = jacob.transpose() jointControl = np.dot(invJacob,ControlSum) #np.dot(np.dot(np.dot(jacob.transpose(),inv(np.dot(jacob,jacob.transpose()) + lamb2)),Kp),error.transpose()) #jointControl = jointControl + jointSpeedReference # for i in range(0,6): # if (jointControl[i] > WindupUpperLimit[i]): # self.u[i] = WindupUpperLimit[i] # elif(jointControl[i] < WindupLowerLimit[i]): # self.u[i] = WindupLowerLimit[i] # else: # self.u[i] = jointControl[i] # self.errorSaturation = jointControl - self.u # print(self.errorSaturation) # print('Error Sum windup' + str((np.dot(jacob,jointControl) - KpControl)/Ki[0,0])) # for i in range(0,6): # if (jointControl[i] > 0.4): # jointControl[i] = 0.4 # elif (jointControl[i] < -0.4): # jointControl[i] = -0.4 return np.squeeze(np.asarray(jointControl)) def speedTransform(self, desiredSpeed, q = None, step = 0.008): if (q == None): q = self.getJointPosition() #jacobian = self.jacobian(self.getJointPosition()[np.newaxis].transpose(),(np.ones(6)*10e-6)[np.newaxis].transpose(), rpy = True) #jacobian = self.jacobian2(q) jacobian = self.jacobianAnalytic(q) jointSpeed = np.dot(inv(jacobian),desiredSpeed.transpose()) return jointSpeed def butter_lowpass(cutoff, fs, order=5): nyq = 0.5 * fs normal_cutoff = cutoff / nyq b, a = butter(order, normal_cutoff, btype='low', analog=False) return b, a def butter_lowpass_filter(data, cutoff, fs, order=5): b, a = butter_lowpass(cutoff, fs, order=order) y = lfilter(b, a, data) return y
or = True,
factory.go
/* Copyright The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Code generated by informer-gen. DO NOT EDIT. package externalversions import ( reflect "reflect" sync "sync" time "time" versioned "github.com/vincent-pli/tektonpipeline-listener/pkg/generated/clientset/versioned" internalinterfaces "github.com/vincent-pli/tektonpipeline-listener/pkg/generated/informers/externalversions/internalinterfaces" samplecontroller "github.com/vincent-pli/tektonpipeline-listener/pkg/generated/informers/externalversions/samplecontroller" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" runtime "k8s.io/apimachinery/pkg/runtime" schema "k8s.io/apimachinery/pkg/runtime/schema" cache "k8s.io/client-go/tools/cache" ) // SharedInformerOption defines the functional option type for SharedInformerFactory. type SharedInformerOption func(*sharedInformerFactory) *sharedInformerFactory type sharedInformerFactory struct { client versioned.Interface namespace string tweakListOptions internalinterfaces.TweakListOptionsFunc lock sync.Mutex defaultResync time.Duration customResync map[reflect.Type]time.Duration informers map[reflect.Type]cache.SharedIndexInformer // startedInformers is used for tracking which informers have been started. // This allows Start() to be called multiple times safely. startedInformers map[reflect.Type]bool } // WithCustomResyncConfig sets a custom resync period for the specified informer types. func WithCustomResyncConfig(resyncConfig map[v1.Object]time.Duration) SharedInformerOption
// WithTweakListOptions sets a custom filter on all listers of the configured SharedInformerFactory. func WithTweakListOptions(tweakListOptions internalinterfaces.TweakListOptionsFunc) SharedInformerOption { return func(factory *sharedInformerFactory) *sharedInformerFactory { factory.tweakListOptions = tweakListOptions return factory } } // WithNamespace limits the SharedInformerFactory to the specified namespace. func WithNamespace(namespace string) SharedInformerOption { return func(factory *sharedInformerFactory) *sharedInformerFactory { factory.namespace = namespace return factory } } // NewSharedInformerFactory constructs a new instance of sharedInformerFactory for all namespaces. func NewSharedInformerFactory(client versioned.Interface, defaultResync time.Duration) SharedInformerFactory { return NewSharedInformerFactoryWithOptions(client, defaultResync) } // NewFilteredSharedInformerFactory constructs a new instance of sharedInformerFactory. // Listers obtained via this SharedInformerFactory will be subject to the same filters // as specified here. // Deprecated: Please use NewSharedInformerFactoryWithOptions instead func NewFilteredSharedInformerFactory(client versioned.Interface, defaultResync time.Duration, namespace string, tweakListOptions internalinterfaces.TweakListOptionsFunc) SharedInformerFactory { return NewSharedInformerFactoryWithOptions(client, defaultResync, WithNamespace(namespace), WithTweakListOptions(tweakListOptions)) } // NewSharedInformerFactoryWithOptions constructs a new instance of a SharedInformerFactory with additional options. func NewSharedInformerFactoryWithOptions(client versioned.Interface, defaultResync time.Duration, options ...SharedInformerOption) SharedInformerFactory { factory := &sharedInformerFactory{ client: client, namespace: v1.NamespaceAll, defaultResync: defaultResync, informers: make(map[reflect.Type]cache.SharedIndexInformer), startedInformers: make(map[reflect.Type]bool), customResync: make(map[reflect.Type]time.Duration), } // Apply all options for _, opt := range options { factory = opt(factory) } return factory } // Start initializes all requested informers. func (f *sharedInformerFactory) Start(stopCh <-chan struct{}) { f.lock.Lock() defer f.lock.Unlock() for informerType, informer := range f.informers { if !f.startedInformers[informerType] { go informer.Run(stopCh) f.startedInformers[informerType] = true } } } // WaitForCacheSync waits for all started informers' cache were synced. func (f *sharedInformerFactory) WaitForCacheSync(stopCh <-chan struct{}) map[reflect.Type]bool { informers := func() map[reflect.Type]cache.SharedIndexInformer { f.lock.Lock() defer f.lock.Unlock() informers := map[reflect.Type]cache.SharedIndexInformer{} for informerType, informer := range f.informers { if f.startedInformers[informerType] { informers[informerType] = informer } } return informers }() res := map[reflect.Type]bool{} for informType, informer := range informers { res[informType] = cache.WaitForCacheSync(stopCh, informer.HasSynced) } return res } // InternalInformerFor returns the SharedIndexInformer for obj using an internal // client. func (f *sharedInformerFactory) InformerFor(obj runtime.Object, newFunc internalinterfaces.NewInformerFunc) cache.SharedIndexInformer { f.lock.Lock() defer f.lock.Unlock() informerType := reflect.TypeOf(obj) informer, exists := f.informers[informerType] if exists { return informer } resyncPeriod, exists := f.customResync[informerType] if !exists { resyncPeriod = f.defaultResync } informer = newFunc(f.client, resyncPeriod) f.informers[informerType] = informer return informer } // SharedInformerFactory provides shared informers for resources in all known // API group versions. type SharedInformerFactory interface { internalinterfaces.SharedInformerFactory ForResource(resource schema.GroupVersionResource) (GenericInformer, error) WaitForCacheSync(stopCh <-chan struct{}) map[reflect.Type]bool Samplecontroller() samplecontroller.Interface } func (f *sharedInformerFactory) Samplecontroller() samplecontroller.Interface { return samplecontroller.New(f, f.namespace, f.tweakListOptions) }
{ return func(factory *sharedInformerFactory) *sharedInformerFactory { for k, v := range resyncConfig { factory.customResync[reflect.TypeOf(k)] = v } return factory } }
fs.rs
use crate::error::{Error, FileIOAction, FileKind, GleamExpect}; use flate2::{write::GzEncoder, Compression}; use std::{ fmt::Debug, fs::File, io::Write, path::{Path, PathBuf}, }; #[derive(Debug, PartialEq, Clone)] pub struct OutputFile { pub text: String, pub path: PathBuf, } /// A trait used to write files. /// Typically we use an implementation that writes to the file system, /// but in tests and in other places other implementations may be used. pub trait FileSystemWriter: Debug { fn open<'a>(&self, path: &'a Path) -> Result<WrappedWriter<'a>, Error>; } /// A FileWriter implementation that writes to the file system. #[derive(Debug)] pub struct FileSystemAccessor; impl FileSystemAccessor { pub fn new() -> Self { Self } } impl FileSystemWriter for FileSystemAccessor { fn open<'a>(&self, path: &'a Path) -> Result<WrappedWriter<'a>, Error> { tracing::trace!("Writing file {:?}", path); let dir_path = path.parent().ok_or_else(|| Error::FileIO { action: FileIOAction::FindParent, kind: FileKind::Directory, path: path.to_path_buf(), err: None, })?; std::fs::create_dir_all(dir_path).map_err(|e| Error::FileIO { action: FileIOAction::Create, kind: FileKind::Directory, path: dir_path.to_path_buf(), err: Some(e.to_string()), })?; let file = File::create(&path).map_err(|e| Error::FileIO { action: FileIOAction::Create, kind: FileKind::File, path: path.to_path_buf(), err: Some(e.to_string()), })?; Ok(WrappedWriter::new(path, Box::new(file))) } } pub trait Utf8Writer: std::fmt::Write { /// A wrapper around fmt::Write that has Gleam's error handling. fn str_write(&mut self, str: &str) -> Result<(), Error> { let res = self.write_str(str); self.wrap_result(res) } fn wrap_result<T, E: std::error::Error>(&self, result: Result<T, E>) -> crate::Result<()> { self.convert_err(result.map(|_| ())) } fn convert_err<T, E: std::error::Error>(&self, result: Result<T, E>) -> crate::Result<T>; } impl Utf8Writer for String { fn convert_err<T, E: std::error::Error>(&self, result: Result<T, E>) -> crate::Result<T> { result.map_err(|error| Error::FileIO { action: FileIOAction::WriteTo, kind: FileKind::File, path: PathBuf::from("<in memory>"), err: Some(error.to_string()), }) } } pub trait Writer: Write + Utf8Writer { /// A wrapper around io::Write that has Gleam's error handling. fn write(&mut self, bytes: &[u8]) -> Result<(), Error> { let res = std::io::Write::write(self, bytes); self.wrap_result(res) } } // TODO: Remove this when the Rust compiler stops incorrectly suggesting this // could be derived. It can't because Write doesn't implement Debug #[allow(missing_debug_implementations)] /// A wrapper around a Write implementing object that has Gleam's error handling. pub struct
<'a> { path: &'a Path, inner: Box<dyn Write>, } impl Writer for WrappedWriter<'_> {} impl Utf8Writer for WrappedWriter<'_> { fn convert_err<T, E: std::error::Error>(&self, result: Result<T, E>) -> crate::Result<T> { result.map_err(|error| Error::FileIO { action: FileIOAction::WriteTo, kind: FileKind::File, path: self.path.to_path_buf(), err: Some(error.to_string()), }) } } impl<'a> WrappedWriter<'a> { pub fn new(path: &'a Path, inner: Box<dyn Write>) -> Self { Self { path, inner } } pub fn write(&'a mut self, bytes: &[u8]) -> Result<(), Error> { let result = self.inner.write(bytes); self.wrap_result(result) } } impl<'a> Write for WrappedWriter<'a> { fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> { self.inner.write(buf) } fn flush(&mut self) -> std::io::Result<()> { self.inner.flush() } } impl<'a> std::fmt::Write for WrappedWriter<'a> { fn write_str(&mut self, s: &str) -> std::fmt::Result { self.inner .write(s.as_bytes()) .map(|_| ()) .map_err(|_| std::fmt::Error) } } pub fn delete_dir(dir: &PathBuf) -> Result<(), Error> { tracing::trace!("Deleting directory {:?}", dir); if dir.exists() { std::fs::remove_dir_all(&dir).map_err(|e| Error::FileIO { action: FileIOAction::Delete, kind: FileKind::Directory, path: dir.clone(), err: Some(e.to_string()), })?; } else { tracing::trace!("Did not exist for deletion: {:?}", dir); } Ok(()) } // pub fn delete(file: &PathBuf) -> Result<(), Error> { // tracing::trace!("Deleting file {:?}", file); // if file.exists() { // std::fs::remove_file(&file).map_err(|e| Error::FileIO { // action: FileIOAction::Delete, // kind: FileKind::File, // path: file.clone(), // err: Some(e.to_string()), // })?; // } else { // tracing::trace!("Did not exist for deletion: {:?}", file); // } // Ok(()) // } pub fn write_outputs(outputs: &[OutputFile]) -> Result<(), Error> { for file in outputs { write_output(file)?; } Ok(()) } pub fn write_output(file: &OutputFile) -> Result<(), Error> { let OutputFile { path, text } = file; tracing::trace!("Writing file {:?}", path); let dir_path = path.parent().ok_or_else(|| Error::FileIO { action: FileIOAction::FindParent, kind: FileKind::Directory, path: path.clone(), err: None, })?; std::fs::create_dir_all(dir_path).map_err(|e| Error::FileIO { action: FileIOAction::Create, kind: FileKind::Directory, path: dir_path.to_path_buf(), err: Some(e.to_string()), })?; let mut f = File::create(&path).map_err(|e| Error::FileIO { action: FileIOAction::Create, kind: FileKind::File, path: path.clone(), err: Some(e.to_string()), })?; f.write_all(text.as_bytes()).map_err(|e| Error::FileIO { action: FileIOAction::WriteTo, kind: FileKind::File, path: path.clone(), err: Some(e.to_string()), })?; Ok(()) } fn is_gleam_path(path: &PathBuf, dir: impl AsRef<Path>) -> bool { use regex::Regex; lazy_static! { static ref RE: Regex = Regex::new( format!( "^({module}{slash})*{module}\\.gleam$", module = "[a-z][_a-z0-9]*", slash = "(/|\\\\)", ) .as_str() ) .gleam_expect("is_gleam_path() RE regex"); } RE.is_match( path.strip_prefix(dir) .gleam_expect("is_gleam_path(): strip_prefix") .to_str() .gleam_expect("is_gleam_path(): to_str"), ) } #[test] fn is_gleam_path_test() { assert!(is_gleam_path( &PathBuf::from("/some-prefix/a.gleam"), &PathBuf::from("/some-prefix/") )); assert!(is_gleam_path( &PathBuf::from("/some-prefix/one_two/a.gleam"), &PathBuf::from("/some-prefix/") )); assert!(is_gleam_path( &PathBuf::from("/some-prefix/one_two/a123.gleam"), &PathBuf::from("/some-prefix/") )); assert!(is_gleam_path( &PathBuf::from("/some-prefix/one_2/a123.gleam"), &PathBuf::from("/some-prefix/") )); } pub fn gleam_files(dir: &Path) -> impl Iterator<Item = PathBuf> + '_ { walkdir::WalkDir::new(dir) .follow_links(true) .into_iter() .filter_map(Result::ok) .filter(|e| e.file_type().is_file()) .map(|d| d.path().to_path_buf()) .filter(move |d| is_gleam_path(d, dir)) } pub fn gleam_files_excluding_gitignore(dir: &PathBuf) -> impl Iterator<Item = PathBuf> + '_ { ignore::WalkBuilder::new(&dir) .follow_links(true) .require_git(false) .build() .into_iter() .filter_map(Result::ok) .filter(|e| e.file_type().map(|t| t.is_file()).unwrap_or(false)) .map(|d| d.into_path()) .filter(move |d| is_gleam_path(d, dir)) } pub fn create_tar_archive(outputs: Vec<OutputFile>) -> Result<Vec<u8>, Error> { tracing::trace!("Creating tarball archive"); let encoder = GzEncoder::new(vec![], Compression::default()); let mut builder = tar::Builder::new(encoder); for file in outputs { let mut header = tar::Header::new_gnu(); header.set_path(&file.path).map_err(|e| Error::Tar { path: file.path.clone(), err: e.to_string(), })?; header.set_size(file.text.as_bytes().len() as u64); header.set_cksum(); builder .append(&header, file.text.as_bytes()) .map_err(|e| Error::Tar { path: file.path.clone(), err: e.to_string(), })?; } builder .into_inner() .map_err(|e| Error::TarFinish(e.to_string()))? .finish() .map_err(|e| Error::Gzip(e.to_string())) } pub fn mkdir(path: impl AsRef<Path> + Debug) -> Result<(), Error> { tracing::trace!("Creating directory {:?}", path); std::fs::create_dir_all(&path).map_err(|err| Error::FileIO { kind: FileKind::Directory, path: PathBuf::from(path.as_ref()), action: FileIOAction::Create, err: Some(err.to_string()), }) } pub fn read_dir(path: impl AsRef<Path> + Debug) -> Result<std::fs::ReadDir, Error> { tracing::trace!("Reading directory {:?}", path); std::fs::read_dir(&path).map_err(|e| Error::FileIO { action: FileIOAction::Read, kind: FileKind::Directory, path: PathBuf::from(path.as_ref()), err: Some(e.to_string()), }) } pub fn read(path: impl AsRef<Path> + Debug) -> Result<String, Error> { tracing::trace!("Reading file {:?}", path); std::fs::read_to_string(&path).map_err(|err| Error::FileIO { action: FileIOAction::Read, kind: FileKind::File, path: PathBuf::from(path.as_ref()), err: Some(err.to_string()), }) } pub fn copy(path: impl AsRef<Path> + Debug, to: impl AsRef<Path> + Debug) -> Result<(), Error> { tracing::trace!("Copying file {:?} to {:?}", path, to); // TODO: include the destination in the error message std::fs::copy(&path, &to) .map_err(|err| Error::FileIO { action: FileIOAction::Copy, kind: FileKind::File, path: PathBuf::from(path.as_ref()), err: Some(err.to_string()), }) .map(|_| ()) } pub fn copy_dir(path: impl AsRef<Path> + Debug, to: impl AsRef<Path> + Debug) -> Result<(), Error> { tracing::trace!("Copying directory {:?} to {:?}", path, to); // TODO: include the destination in the error message fs_extra::dir::copy(&path, &to, &fs_extra::dir::CopyOptions::new()) .map_err(|err| Error::FileIO { action: FileIOAction::Copy, kind: FileKind::Directory, path: PathBuf::from(path.as_ref()), err: Some(err.to_string()), }) .map(|_| ()) } #[cfg(test)] pub mod test { use super::*; use std::{ cell::RefCell, rc::Rc, sync::mpsc::{self, Receiver, Sender}, }; #[derive(Debug, Clone)] pub struct FilesChannel(Sender<(PathBuf, InMemoryFile)>); impl FilesChannel { pub fn new() -> (Self, Receiver<(PathBuf, InMemoryFile)>) { let (sender, receiver) = mpsc::channel(); (Self(sender), receiver) } pub fn recv_utf8_files( receiver: &Receiver<(PathBuf, InMemoryFile)>, ) -> Result<Vec<OutputFile>, ()> { receiver .try_iter() .map(|(path, file)| { Ok(OutputFile { path, text: String::from_utf8(file.into_contents()?).map_err(|_| ())?, }) }) .collect() } } impl FileSystemWriter for FilesChannel { fn open<'a>(&self, path: &'a Path) -> Result<WrappedWriter<'a>, Error> { let file = InMemoryFile::new(); let _ = self.0.send((path.to_path_buf(), file.clone())); Ok(WrappedWriter::new(path, Box::new(file))) } } #[derive(Debug, Default, Clone)] pub struct InMemoryFile { contents: Rc<RefCell<Vec<u8>>>, } impl InMemoryFile { pub fn new() -> Self { Default::default() } pub fn into_contents(self) -> Result<Vec<u8>, ()> { Rc::try_unwrap(self.contents) .map_err(|_| ()) .map(|cell| cell.into_inner()) } } impl Write for InMemoryFile { fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> { self.contents.borrow_mut().write(buf) } fn flush(&mut self) -> std::io::Result<()> { self.contents.borrow_mut().flush() } } impl std::fmt::Write for InMemoryFile { fn write_str(&mut self, s: &str) -> std::fmt::Result { self.contents .borrow_mut() .write(s.as_bytes()) .map(|_| ()) .map_err(|_| std::fmt::Error) } } impl Utf8Writer for InMemoryFile { fn convert_err<T, E: std::error::Error>(&self, result: Result<T, E>) -> crate::Result<T> { result.map_err(|error| Error::FileIO { action: FileIOAction::WriteTo, kind: FileKind::File, path: PathBuf::from("<in memory test file>"), err: Some(error.to_string()), }) } } impl Writer for InMemoryFile {} }
WrappedWriter
masked_adlm.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import math import torch import torch.nn.functional as F from fairseq import metrics, utils from fairseq.criterions import FairseqCriterion, register_criterion @register_criterion('masked_adlm') class MaskedAdLmLoss(FairseqCriterion): """ Implementation for the loss used in masked language model (MLM) training. """ def
(self, args, task): super(MaskedAdLmLoss, self).__init__(args, task) self.vocab = self.task.source_dictionary print(len(self.vocab.count)) self.register_buffer('margins', torch.zeros((len(self.vocab.count), 1))) self.margins.requires_grad = False self.margin_lambda = args.margin_lambda self.margin_lr = args.margin_lr self.margin_norm = args.margin_norm @staticmethod def add_args(parser): """Add criterion-specific arguments to the parser.""" super(MaskedAdLmLoss, MaskedAdLmLoss).add_args(parser) parser.add_argument('--margin_lambda', default=0.5, type=float, metavar='D', help='weight for the adaptive margin loss') parser.add_argument('--margin_lr', default=0.0001, type=float, metavar='D', help='weight for the adaptive margin loss') parser.add_argument('--margin-norm', default='l1', type=str, help='Type of margin norm in the loss') def forward(self, model, sample, reduce=True): """Compute the loss for the given sample. Returns a tuple with three elements: 1) the loss 2) the sample size, which is used as the denominator for the gradient 3) logging outputs to display while training """ # compute MLM loss #self.margins.requires_grad = model.training masked_tokens = sample['target'].ne(self.padding_idx) sample_size = masked_tokens.int().sum().item() # (Rare case) When all tokens are masked, the model results in empty # tensor and gives CUDA error. if sample_size == 0: masked_tokens = None logits = model(**sample['net_input'], masked_tokens=masked_tokens)[0] targets = model.get_targets(sample, [logits]) #import IPython #IPython.embed() if sample_size != 0: targets = targets[masked_tokens] # targets shape: [x] # logits.shape: [x, 32769] one_hot = F.one_hot(targets, len(self.vocab.count)) # [x, 32769] #import IPython #IPython.embed() m = F.embedding(targets, self.margins) # [x, 1] #m = self.margins(targets).squeeze(dim=-1) margin = m * one_hot # [x, 32769] #import IPython #IPython.embed() logits_minus_margin = logits - margin log_softmax = F.log_softmax( logits_minus_margin.view(-1, logits.size(-1)), dim=-1, dtype=torch.float32, ) # [x, 32769] adm_loss = F.nll_loss( log_softmax, targets.view(-1), reduction='sum', ignore_index=self.padding_idx, ) # cal margin grad with torch.no_grad(): margin_log_grad = torch.gather(log_softmax.detach(), dim=-1, index=targets.unsqueeze(-1)) # [x, 1] margin_grad_cross = torch.exp(margin_log_grad) - \ torch.ones_like(margin_log_grad) if self.margin_norm == 'l1': margin_grad = margin_grad_cross - torch.ones_like(m) * self.margin_lambda else: # l2 norm margin_grad = margin_grad_cross - m * self.margin_lambda * 2.0 margin_update = -1.0 * margin_grad * self.margin_lr self.margins.scatter_add_(0, targets.unsqueeze(-1), margin_update.half()) # for logging below! margin_norm; normal loss margin_norm = torch.mean(self.margins) * sample['nsentences']# used for log! normal_loss = F.nll_loss( F.log_softmax( logits.view(-1, logits.size(-1)), dim=-1, dtype=torch.float32, ), targets.view(-1), reduction='sum', ignore_index=self.padding_idx, ) logging_output = { 'loss': utils.item(normal_loss.data) if reduce else normal_loss.data, 'margin_n':utils.item(margin_norm.data) if reduce else margin_norm.data, 'ntokens': sample['ntokens'], 'nsentences': sample['nsentences'], 'sample_size': sample_size, 'admloss': utils.item(adm_loss.data) if reduce else adm_loss.data, } return adm_loss, sample_size, logging_output @staticmethod def reduce_metrics(logging_outputs) -> None: """Aggregate logging outputs from data parallel training.""" loss_sum = sum(log.get('loss', 0) for log in logging_outputs) admloss_sum = sum(log.get('admloss', 0) for log in logging_outputs) margin_n = sum(log.get('margin_n', 0) for log in logging_outputs) sample_size = sum(log.get('sample_size', 0) for log in logging_outputs) nsentences = sum(log.get('nsentences', 0) for log in logging_outputs) metrics.log_scalar('loss', loss_sum / sample_size / math.log(2), sample_size, round=3) metrics.log_scalar('admloss', admloss_sum / sample_size / math.log(2), sample_size, round=3) metrics.log_scalar('margin_norm', margin_n / nsentences, 32, round=3) metrics.log_derived('ppl', lambda meters: round(2**meters['loss'].avg, 3)) @staticmethod def logging_outputs_can_be_summed() -> bool: """ Whether the logging outputs returned by `forward` can be summed across workers prior to calling `reduce_metrics`. Setting this to True will improves distributed training speed. """ return True
__init__
greedy_reward_prediction_agent_test.py
# coding=utf-8 # Copyright 2020 The TF-Agents Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for greedy_reward_prediction_agent.py.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np import tensorflow as tf # pylint: disable=g-explicit-tensorflow-version-import from tf_agents.bandits.agents import greedy_reward_prediction_agent as greedy_agent from tf_agents.bandits.drivers import driver_utils from tf_agents.bandits.networks import global_and_arm_feature_network from tf_agents.bandits.policies import constraints from tf_agents.bandits.specs import utils as bandit_spec_utils from tf_agents.networks import network from tf_agents.policies import utils as policy_utilities from tf_agents.specs import tensor_spec from tf_agents.trajectories import policy_step from tf_agents.trajectories import time_step as ts from tf_agents.utils import common class DummyNet(network.Network): def __init__(self, unused_observation_spec, action_spec, name=None): super(DummyNet, self).__init__( unused_observation_spec, state_spec=(), name=name) action_spec = tf.nest.flatten(action_spec)[0] num_actions = action_spec.maximum - action_spec.minimum + 1 # Store custom layers that can be serialized through the Checkpointable API. self._dummy_layers = [ tf.keras.layers.Dense( num_actions, kernel_initializer=tf.constant_initializer([[1, 1.5, 2], [1, 1.5, 4]]), bias_initializer=tf.constant_initializer([[1], [1], [-10]])) ] def call(self, inputs, step_type=None, network_state=()): del step_type inputs = tf.cast(inputs, tf.float32) for layer in self._dummy_layers: inputs = layer(inputs) return inputs, network_state def _get_initial_and_final_steps(observations, rewards): batch_size = tf.nest.flatten(observations)[0].shape[0] if isinstance(observations, np.ndarray): observations = tf.constant( observations, dtype=tf.float32, name='observation') initial_step = ts.TimeStep( tf.constant( ts.StepType.FIRST, dtype=tf.int32, shape=[batch_size], name='step_type'), tf.constant(0.0, dtype=tf.float32, shape=[batch_size], name='reward'), tf.constant(1.0, dtype=tf.float32, shape=[batch_size], name='discount'), observations) final_step = ts.TimeStep( tf.constant( ts.StepType.LAST, dtype=tf.int32, shape=[batch_size], name='step_type'), tf.constant(rewards, dtype=tf.float32, name='reward'), tf.constant(1.0, dtype=tf.float32, shape=[batch_size], name='discount'), observations) return initial_step, final_step def _get_initial_and_final_steps_nested_rewards(observations, rewards): batch_size = tf.nest.flatten(observations)[0].shape[0] if isinstance(observations, np.ndarray): observations = tf.constant( observations, dtype=tf.float32, name='observation') zero_rewards = { 'reward': tf.constant(0.0, dtype=tf.float32, shape=[batch_size]), 'constraint': tf.constant(0.0, dtype=tf.float32, shape=[batch_size]) } initial_step = ts.TimeStep( tf.constant( ts.StepType.FIRST, dtype=tf.int32, shape=[batch_size], name='step_type'), zero_rewards, tf.constant(1.0, dtype=tf.float32, shape=[batch_size], name='discount'), observations) rewards_nest = tf.nest.map_structure( lambda t: tf.convert_to_tensor(t, dtype=tf.float32), rewards) final_step = ts.TimeStep( tf.constant( ts.StepType.LAST, dtype=tf.int32, shape=[batch_size], name='step_type'), rewards_nest, tf.constant(1.0, dtype=tf.float32, shape=[batch_size], name='discount'), observations) return initial_step, final_step def _get_initial_and_final_steps_with_action_mask(observations, rewards):
def _get_initial_and_final_steps_action_mask_nested_rewards( observations, rewards): batch_size = tf.nest.flatten(observations)[0].shape[0] zero_rewards = { 'reward': tf.constant(0.0, dtype=tf.float32, shape=[batch_size]), 'constraint': tf.constant(0.0, dtype=tf.float32, shape=[batch_size]) } initial_step = ts.TimeStep( tf.constant( ts.StepType.FIRST, dtype=tf.int32, shape=[batch_size], name='step_type'), zero_rewards, tf.constant(1.0, dtype=tf.float32, shape=[batch_size], name='discount'), (observations[0], observations[1])) rewards_nest = tf.nest.map_structure( lambda t: tf.convert_to_tensor(t, dtype=tf.float32), rewards) final_step = ts.TimeStep( tf.constant( ts.StepType.LAST, dtype=tf.int32, shape=[batch_size], name='step_type'), rewards_nest, tf.constant(1.0, dtype=tf.float32, shape=[batch_size], name='discount'), (tf.nest.map_structure( lambda x: x + 100., observations[0]), observations[1])) return initial_step, final_step def _get_action_step(action): return policy_step.PolicyStep( action=tf.convert_to_tensor(action), info=policy_utilities.PolicyInfo()) def _get_experience(initial_step, action_step, final_step): single_experience = driver_utils.trajectory_for_bandit( initial_step, action_step, final_step) # Adds a 'time' dimension. return tf.nest.map_structure( lambda x: tf.expand_dims(tf.convert_to_tensor(x), 1), single_experience) class AgentTest(tf.test.TestCase): def setUp(self): super(AgentTest, self).setUp() tf.compat.v1.enable_resource_variables() self._obs_spec = tensor_spec.TensorSpec([2], tf.float32) self._time_step_spec = ts.time_step_spec(self._obs_spec) self._action_spec = tensor_spec.BoundedTensorSpec( dtype=tf.int32, shape=(), minimum=0, maximum=2) self._observation_spec = self._time_step_spec.observation def testCreateAgent(self): reward_net = DummyNet(self._observation_spec, self._action_spec) agent = greedy_agent.GreedyRewardPredictionAgent( self._time_step_spec, self._action_spec, reward_network=reward_net, optimizer=None) self.assertIsNotNone(agent.policy) def testInitializeAgent(self): reward_net = DummyNet(self._observation_spec, self._action_spec) agent = greedy_agent.GreedyRewardPredictionAgent( self._time_step_spec, self._action_spec, reward_network=reward_net, optimizer=None) init_op = agent.initialize() if not tf.executing_eagerly(): with self.cached_session() as sess: common.initialize_uninitialized_variables(sess) self.assertIsNone(sess.run(init_op)) def testLoss(self): reward_net = DummyNet(self._observation_spec, self._action_spec) observations = np.array([[1, 2], [3, 4]], dtype=np.float32) actions = np.array([0, 1], dtype=np.int32) rewards = np.array([0.5, 3.0], dtype=np.float32) initial_step, final_step = _get_initial_and_final_steps_nested_rewards( observations, rewards) action_step = _get_action_step(actions) experience = _get_experience(initial_step, action_step, final_step) agent = greedy_agent.GreedyRewardPredictionAgent( self._time_step_spec, self._action_spec, reward_network=reward_net, optimizer=None) init_op = agent.initialize() if not tf.executing_eagerly(): with self.cached_session() as sess: common.initialize_uninitialized_variables(sess) self.assertIsNone(sess.run(init_op)) loss, _ = agent._loss(experience) self.evaluate(tf.compat.v1.initialize_all_variables()) self.assertAllClose(self.evaluate(loss), 42.25) def testPolicy(self): reward_net = DummyNet(self._observation_spec, self._action_spec) agent = greedy_agent.GreedyRewardPredictionAgent( self._time_step_spec, self._action_spec, reward_network=reward_net, optimizer=None) observations = tf.constant([[1, 2], [3, 4]], dtype=tf.float32) time_steps = ts.restart(observations, batch_size=2) policy = agent.policy action_step = policy.action(time_steps) # Batch size 2. self.assertAllEqual([2], action_step.action.shape) self.evaluate(tf.compat.v1.initialize_all_variables()) actions = self.evaluate(action_step.action) self.assertAllEqual(actions, [1, 2]) def testInitializeRestoreAgent(self): reward_net = DummyNet(self._observation_spec, self._action_spec) agent = greedy_agent.GreedyRewardPredictionAgent( self._time_step_spec, self._action_spec, reward_network=reward_net, optimizer=None) observations = tf.constant([[1, 2], [3, 4]], dtype=tf.float32) time_steps = ts.restart(observations, batch_size=2) policy = agent.policy action_step = policy.action(time_steps) self.evaluate(tf.compat.v1.initialize_all_variables()) checkpoint = tf.train.Checkpoint(agent=agent) latest_checkpoint = tf.train.latest_checkpoint(self.get_temp_dir()) checkpoint_load_status = checkpoint.restore(latest_checkpoint) if tf.executing_eagerly(): self.evaluate(checkpoint_load_status.initialize_or_restore()) self.assertAllEqual(self.evaluate(action_step.action), [1, 2]) else: with self.cached_session() as sess: checkpoint_load_status.initialize_or_restore(sess) self.assertAllEqual(sess.run(action_step.action), [1, 2]) def testTrainAgent(self): reward_net = DummyNet(self._observation_spec, self._action_spec) optimizer = tf.compat.v1.train.GradientDescentOptimizer(learning_rate=0.1) agent = greedy_agent.GreedyRewardPredictionAgent( self._time_step_spec, self._action_spec, reward_network=reward_net, optimizer=optimizer) observations = np.array([[1, 2], [3, 4]], dtype=np.float32) actions = np.array([0, 1], dtype=np.int32) rewards = np.array([0.5, 3.0], dtype=np.float32) initial_step, final_step = _get_initial_and_final_steps( observations, rewards) action_step = _get_action_step(actions) experience = _get_experience(initial_step, action_step, final_step) loss_before, _ = agent.train(experience, None) loss_after, _ = agent.train(experience, None) self.evaluate(tf.compat.v1.initialize_all_variables()) self.assertAllClose(self.evaluate(loss_before), 42.25) self.assertAllClose(self.evaluate(loss_after), 93.46) def testTrainAgentWithConstraint(self): reward_net = DummyNet(self._observation_spec, self._action_spec) optimizer = tf.compat.v1.train.GradientDescentOptimizer(learning_rate=0.1) constraint_net = DummyNet(self._observation_spec, self._action_spec) neural_constraint = constraints.NeuralConstraint( self._time_step_spec, self._action_spec, constraint_network=constraint_net) reward_spec = { 'reward': tensor_spec.TensorSpec( shape=(), dtype=tf.float32, name='reward'), 'constraint': tensor_spec.TensorSpec( shape=(), dtype=tf.float32, name='constraint') } self._time_step_spec = ts.time_step_spec(self._obs_spec, reward_spec) agent = greedy_agent.GreedyRewardPredictionAgent( self._time_step_spec, self._action_spec, reward_network=reward_net, optimizer=optimizer, constraints=[neural_constraint]) observations = np.array([[1, 2], [3, 4]], dtype=np.float32) actions = np.array([0, 1], dtype=np.int32) rewards = { 'reward': np.array([0.5, 3.0], dtype=np.float32), 'constraint': np.array([6.0, 4.0], dtype=np.float32) } initial_step, final_step = _get_initial_and_final_steps_nested_rewards( observations, rewards) action_step = _get_action_step(actions) experience = _get_experience(initial_step, action_step, final_step) loss_before, _ = agent.train(experience, None) self.evaluate(tf.compat.v1.initialize_all_variables()) # The loss is the sum of the reward loss and the constraint loss. self.assertAllClose(self.evaluate(loss_before), 42.25 + 30.125) def testTrainAgentWithMask(self): reward_net = DummyNet(self._observation_spec, self._action_spec) optimizer = tf.compat.v1.train.GradientDescentOptimizer(learning_rate=0.1) time_step_spec = ts.time_step_spec((tensor_spec.TensorSpec([2], tf.float32), tensor_spec.TensorSpec([3], tf.int32))) agent = greedy_agent.GreedyRewardPredictionAgent( time_step_spec, self._action_spec, reward_network=reward_net, optimizer=optimizer, observation_and_action_constraint_splitter=lambda x: (x[0], x[1])) observations = (np.array([[1, 2], [3, 4]], dtype=np.float32), np.array([[1, 0, 0], [1, 1, 0]], dtype=np.int32)) actions = np.array([0, 1], dtype=np.int32) rewards = np.array([0.5, 3.0], dtype=np.float32) initial_step, final_step = _get_initial_and_final_steps_with_action_mask( observations, rewards) action_step = _get_action_step(actions) experience = _get_experience(initial_step, action_step, final_step) loss_before, _ = agent.train(experience, None) loss_after, _ = agent.train(experience, None) self.evaluate(tf.compat.v1.initialize_all_variables()) self.assertAllClose(self.evaluate(loss_before), 42.25) self.assertAllClose(self.evaluate(loss_after), 93.46) def testTrainAgentWithMaskAndConstraint(self): reward_net = DummyNet(self._observation_spec, self._action_spec) optimizer = tf.compat.v1.train.GradientDescentOptimizer(learning_rate=0.1) reward_spec = { 'reward': tensor_spec.TensorSpec( shape=(), dtype=tf.float32, name='reward'), 'constraint': tensor_spec.TensorSpec( shape=(), dtype=tf.float32, name='constraint') } observation_and_mask_spec = (tensor_spec.TensorSpec([2], tf.float32), tensor_spec.TensorSpec([3], tf.int32)) time_step_spec = ts.time_step_spec(observation_and_mask_spec, reward_spec) constraint_net = DummyNet(self._observation_spec, self._action_spec) neural_constraint = constraints.NeuralConstraint( self._time_step_spec, self._action_spec, constraint_network=constraint_net) agent = greedy_agent.GreedyRewardPredictionAgent( time_step_spec, self._action_spec, reward_network=reward_net, optimizer=optimizer, observation_and_action_constraint_splitter=lambda x: (x[0], x[1]), constraints=[neural_constraint]) observations = (np.array([[1, 2], [3, 4]], dtype=np.float32), np.array([[1, 0, 0], [1, 1, 0]], dtype=np.int32)) actions = np.array([0, 1], dtype=np.int32) rewards = { 'reward': np.array([0.5, 3.0], dtype=np.float32), 'constraint': np.array([6.0, 4.0], dtype=np.float32) } initial_step, final_step = ( _get_initial_and_final_steps_action_mask_nested_rewards( observations, rewards)) action_step = _get_action_step(actions) experience = _get_experience(initial_step, action_step, final_step) loss_before, _ = agent.train(experience, None) self.evaluate(tf.compat.v1.initialize_all_variables()) # The loss is the sum of the reward loss and the constraint loss. self.assertAllClose(self.evaluate(loss_before), 42.25 + 30.125) def testTrainAgentWithLaplacianSmoothing(self): reward_net = DummyNet(self._observation_spec, self._action_spec) optimizer = tf.compat.v1.train.GradientDescentOptimizer(learning_rate=0.1) laplacian_matrix = tf.constant([[1.0, -1.0, 0.0], [-1.0, 2.0, -1.0], [0.0, -1.0, 1.0]]) agent = greedy_agent.GreedyRewardPredictionAgent( self._time_step_spec, self._action_spec, reward_network=reward_net, optimizer=optimizer, laplacian_matrix=laplacian_matrix, laplacian_smoothing_weight=1.0) observations = np.array([[1, 2], [3, 4]], dtype=np.float32) actions = np.array([0, 1], dtype=np.int32) rewards = np.array([0.5, 3.0], dtype=np.float32) initial_step, final_step = _get_initial_and_final_steps( observations, rewards) action_step = _get_action_step(actions) experience = _get_experience(initial_step, action_step, final_step) loss_before, _ = agent.train(experience, None) self.evaluate(tf.compat.v1.initialize_all_variables()) # The Laplacian smoothing term ends up adding 22.5 to the loss. self.assertAllClose(self.evaluate(loss_before), 42.25 + 22.5) def testTrainAgentWithLaplacianSmoothingInvalidMatrix(self): if tf.executing_eagerly: return observations = np.array([[1, 2], [3, 4]], dtype=np.float32) actions = np.array([0, 1], dtype=np.int32) rewards = np.array([0.5, 3.0], dtype=np.float32) initial_step, final_step = _get_initial_and_final_steps( observations, rewards) action_step = _get_action_step(actions) experience = _get_experience(initial_step, action_step, final_step) with self.assertRaisesRegexp(tf.errors.InvalidArgumentError, ''): reward_net = DummyNet(self._observation_spec, self._action_spec) optimizer = tf.compat.v1.train.GradientDescentOptimizer(learning_rate=0.1) # Set the Laplacian matrix to be the identity, which is not a valid # Laplacian. laplacian_matrix = tf.eye(3) agent = greedy_agent.GreedyRewardPredictionAgent( self._time_step_spec, self._action_spec, reward_network=reward_net, optimizer=optimizer, laplacian_matrix=laplacian_matrix, laplacian_smoothing_weight=1.0) self.evaluate(tf.compat.v1.initialize_all_variables()) loss_before, _ = agent.train(experience, None) self.evaluate(loss_before) def testTrainPerArmAgent(self): obs_spec = bandit_spec_utils.create_per_arm_observation_spec( 2, 3, 4, add_num_actions_feature=True) time_step_spec = ts.time_step_spec(obs_spec) reward_net = ( global_and_arm_feature_network.create_feed_forward_common_tower_network( obs_spec, (4, 3), (3, 4), (4, 2))) optimizer = tf.compat.v1.train.GradientDescentOptimizer(learning_rate=0.1) agent = greedy_agent.GreedyRewardPredictionAgent( time_step_spec, self._action_spec, reward_network=reward_net, accepts_per_arm_features=True, optimizer=optimizer) observations = { bandit_spec_utils.GLOBAL_FEATURE_KEY: tf.constant([[1, 2], [3, 4]], dtype=tf.float32), bandit_spec_utils.PER_ARM_FEATURE_KEY: tf.cast( tf.reshape(tf.range(24), shape=[2, 4, 3]), dtype=tf.float32), bandit_spec_utils.NUM_ACTIONS_FEATURE_KEY: tf.ones([2], dtype=tf.int32) } actions = np.array([0, 3], dtype=np.int32) rewards = np.array([0.5, 3.0], dtype=np.float32) initial_step, final_step = _get_initial_and_final_steps( observations, rewards) action_step = policy_step.PolicyStep( action=tf.convert_to_tensor(actions), info=policy_utilities.PerArmPolicyInfo( chosen_arm_features=np.array([[1, 2, 3], [3, 2, 1]], dtype=np.float32))) experience = _get_experience(initial_step, action_step, final_step) agent.train(experience, None) self.evaluate(tf.compat.v1.initialize_all_variables()) def testTrainPerArmAgentWithConstraint(self): obs_spec = bandit_spec_utils.create_per_arm_observation_spec(2, 3, 4) reward_spec = { 'reward': tensor_spec.TensorSpec( shape=(), dtype=tf.float32, name='reward'), 'constraint': tensor_spec.TensorSpec( shape=(), dtype=tf.float32, name='constraint') } time_step_spec = ts.time_step_spec(obs_spec, reward_spec) reward_net = ( global_and_arm_feature_network.create_feed_forward_common_tower_network( obs_spec, (4, 3), (3, 4), (4, 2))) optimizer = tf.compat.v1.train.GradientDescentOptimizer(learning_rate=0.1) constraint_net = ( global_and_arm_feature_network.create_feed_forward_common_tower_network( obs_spec, (4, 3), (3, 4), (4, 2))) neural_constraint = constraints.NeuralConstraint( time_step_spec, self._action_spec, constraint_network=constraint_net) agent = greedy_agent.GreedyRewardPredictionAgent( time_step_spec, self._action_spec, reward_network=reward_net, accepts_per_arm_features=True, optimizer=optimizer, constraints=[neural_constraint]) observations = { bandit_spec_utils.GLOBAL_FEATURE_KEY: tf.constant([[1, 2], [3, 4]], dtype=tf.float32), bandit_spec_utils.PER_ARM_FEATURE_KEY: tf.cast( tf.reshape(tf.range(24), shape=[2, 4, 3]), dtype=tf.float32) } actions = np.array([0, 3], dtype=np.int32) rewards = { 'reward': np.array([0.5, 3.0], dtype=np.float32), 'constraint': np.array([6.0, 4.0], dtype=np.float32) } initial_step, final_step = _get_initial_and_final_steps_nested_rewards( observations, rewards) action_step = policy_step.PolicyStep( action=tf.convert_to_tensor(actions), info=policy_utilities.PerArmPolicyInfo( chosen_arm_features=np.array([[1, 2, 3], [3, 2, 1]], dtype=np.float32))) experience = _get_experience(initial_step, action_step, final_step) agent.train(experience, None) self.evaluate(tf.compat.v1.initialize_all_variables()) if __name__ == '__main__': tf.test.main()
batch_size = tf.nest.flatten(observations)[0].shape[0] initial_step = ts.TimeStep( tf.constant( ts.StepType.FIRST, dtype=tf.int32, shape=[batch_size], name='step_type'), tf.constant(0.0, dtype=tf.float32, shape=[batch_size], name='reward'), tf.constant(1.0, dtype=tf.float32, shape=[batch_size], name='discount'), (observations[0], observations[1])) final_step = ts.TimeStep( tf.constant( ts.StepType.LAST, dtype=tf.int32, shape=[batch_size], name='step_type'), tf.constant(rewards, dtype=tf.float32, name='reward'), tf.constant(1.0, dtype=tf.float32, shape=[batch_size], name='discount'), (tf.nest.map_structure( lambda x: x + 100., observations[0]), observations[1])) return initial_step, final_step
vgg16.py
# -*- coding: utf-8 -*- """VGG16 model for Keras. # Reference - [Very Deep Convolutional Networks for Large-Scale Image Recognition](https://arxiv.org/abs/1409.1556) """ from __future__ import print_function from __future__ import absolute_import import os import warnings from ..models import Model from ..layers import Flatten from ..layers import Dense from ..layers import Input from ..layers import Conv2D from ..layers import MaxPooling2D from ..layers import GlobalAveragePooling2D from ..layers import GlobalMaxPooling2D from ..engine.topology import get_source_inputs from ..utils import layer_utils from ..utils.data_utils import get_file from .. import backend as K from .imagenet_utils import decode_predictions from .imagenet_utils import preprocess_input from .imagenet_utils import _obtain_input_shape WEIGHTS_PATH = 'https://github.com/fchollet/deep-learning-models/releases/download/v0.1/vgg16_weights_tf_dim_ordering_tf_kernels.h5' WEIGHTS_PATH_NO_TOP = 'https://github.com/fchollet/deep-learning-models/releases/download/v0.1/vgg16_weights_tf_dim_ordering_tf_kernels_notop.h5' def VGG16(include_top=True, weights='imagenet', input_tensor=None, input_shape=None, pooling=None, classes=1000): """Instantiates the VGG16 architecture. Optionally loads weights pre-trained on ImageNet. Note that when using TensorFlow, for best performance you should set `image_data_format='channels_last'` in your Keras config at ~/.keras/keras.json. The model and the weights are compatible with both TensorFlow and Theano. The data format convention used by the model is the one specified in your Keras config file. # Arguments include_top: whether to include the 3 fully-connected layers at the top of the network. weights: one of `None` (random initialization), 'imagenet' (pre-training on ImageNet), or the path to the weights file to be loaded. input_tensor: optional Keras tensor (i.e. output of `layers.Input()`) to use as image input for the model. input_shape: optional shape tuple, only to be specified if `include_top` is False (otherwise the input shape has to be `(224, 224, 3)` (with `channels_last` data format) or `(3, 224, 224)` (with `channels_first` data format).
pooling: Optional pooling mode for feature extraction when `include_top` is `False`. - `None` means that the output of the model will be the 4D tensor output of the last convolutional layer. - `avg` means that global average pooling will be applied to the output of the last convolutional layer, and thus the output of the model will be a 2D tensor. - `max` means that global max pooling will be applied. classes: optional number of classes to classify images into, only to be specified if `include_top` is True, and if no `weights` argument is specified. # Returns A Keras model instance. # Raises ValueError: in case of invalid argument for `weights`, or invalid input shape. """ if not (weights in {'imagenet', None} or os.path.exists(weights)): raise ValueError('The `weights` argument should be either ' '`None` (random initialization), `imagenet` ' '(pre-training on ImageNet), ' 'or the path to the weights file to be loaded.') if weights == 'imagenet' and include_top and classes != 1000: raise ValueError('If using `weights` as imagenet with `include_top`' ' as true, `classes` should be 1000') # Determine proper input shape input_shape = _obtain_input_shape(input_shape, default_size=224, min_size=48, data_format=K.image_data_format(), require_flatten=include_top, weights=weights) if input_tensor is None: img_input = Input(shape=input_shape) else: if not K.is_keras_tensor(input_tensor): img_input = Input(tensor=input_tensor, shape=input_shape) else: img_input = input_tensor # Block 1 x = Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv1')(img_input) x = Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv2')(x) x = MaxPooling2D((2, 2), strides=(2, 2), name='block1_pool')(x) # Block 2 x = Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv1')(x) x = Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv2')(x) x = MaxPooling2D((2, 2), strides=(2, 2), name='block2_pool')(x) # Block 3 x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv1')(x) x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv2')(x) x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv3')(x) x = MaxPooling2D((2, 2), strides=(2, 2), name='block3_pool')(x) # Block 4 x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv1')(x) x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv2')(x) x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv3')(x) x = MaxPooling2D((2, 2), strides=(2, 2), name='block4_pool')(x) # Block 5 x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv1')(x) x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv2')(x) x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv3')(x) x = MaxPooling2D((2, 2), strides=(2, 2), name='block5_pool')(x) if include_top: # Classification block x = Flatten(name='flatten')(x) x = Dense(4096, activation='relu', name='fc1')(x) x = Dense(4096, activation='relu', name='fc2')(x) x = Dense(classes, activation='softmax', name='predictions')(x) else: if pooling == 'avg': x = GlobalAveragePooling2D()(x) elif pooling == 'max': x = GlobalMaxPooling2D()(x) # Ensure that the model takes into account # any potential predecessors of `input_tensor`. if input_tensor is not None: inputs = get_source_inputs(input_tensor) else: inputs = img_input # Create model. model = Model(inputs, x, name='vgg16') # load weights if weights == 'imagenet': if include_top: weights_path = get_file('vgg16_weights_tf_dim_ordering_tf_kernels.h5', WEIGHTS_PATH, cache_subdir='models', file_hash='64373286793e3c8b2b4e3219cbf3544b') else: weights_path = get_file('vgg16_weights_tf_dim_ordering_tf_kernels_notop.h5', WEIGHTS_PATH_NO_TOP, cache_subdir='models', file_hash='6d6bbae143d832006294945121d1f1fc') model.load_weights(weights_path) if K.backend() == 'theano': layer_utils.convert_all_kernels_in_model(model) if K.image_data_format() == 'channels_first': if include_top: maxpool = model.get_layer(name='block5_pool') shape = maxpool.output_shape[1:] dense = model.get_layer(name='fc1') layer_utils.convert_dense_weights_data_format(dense, shape, 'channels_first') if K.backend() == 'tensorflow': warnings.warn('You are using the TensorFlow backend, yet you ' 'are using the Theano ' 'image data format convention ' '(`image_data_format="channels_first"`). ' 'For best performance, set ' '`image_data_format="channels_last"` in ' 'your Keras config ' 'at ~/.keras/keras.json.') elif weights is not None: model.load_weights(weights) return model
It should have exactly 3 input channels, and width and height should be no smaller than 48. E.g. `(200, 200, 3)` would be one valid value.
p02_slice_operator.py
#!/usr/bin/env python # -*- coding: utf-8 -*- def example1(): """Slice operator. seq[::stride] # [seq[0], seq[stride], ..., seq[-1] ] seq[low::stride] # [seq[low], seq[low+stride], ..., seq[-1] ] seq[:high:stride] # [seq[0], seq[stride], ..., seq[high-1]] seq[low:high:stride] # [seq[low], seq[low+stride], ..., seq[high-1]] """ l = list("01234567") assert l[::2] == list("0246") # 从 index(0) 开始, 隔2个取一个 assert l[1::2] == list("1357") # 从 index(1) 开始, 隔2个取一个 assert l[:4:2] == list("02") # 从头开始到 index(4-1) 为止,隔2个取一个 assert l[2:6:2] == list("24") # 从index(2)开始到index(6-1)为止,隔2个取一个 example1() def example2(): """Reversed slice operator """ l = list("01234567") assert l[::-1] ==
list("76543210") # 从最后一个开始,逆序排列 assert l[::-2] == list("7531") # 从最后一个开始,隔2个取一个 assert l[-2::-2] == list("6420") # 从-2开始,隔2个取一个 assert l[:3:-2] == list("75") # 从最后开始,到3为止,隔2个取一个 example2()
error.rs
use std::fmt::Display; /// Errors that can occur when loading or streaming an audio file. #[derive(Debug)] #[non_exhaustive] #[cfg_attr( docsrs, doc(cfg(any(feature = "mp3", feature = "ogg", feature = "flac", feature = "wav"))) )] pub enum
{ /// Could not determine the default audio track in the file. NoDefaultTrack, /// Could not determine the sample rate of the audio. UnknownSampleRate, /// The audio uses an unsupported channel configuration. Only /// mono and stereo audio is supported. UnsupportedChannelConfiguration, /// An error occurred while reading the file from the filesystem. IoError(std::io::Error), /// An error occurred when parsing the file. SymphoniaError(symphonia::core::errors::Error), } impl Display for FromFileError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { FromFileError::NoDefaultTrack => { f.write_str("Could not determine the default audio track") } FromFileError::UnknownSampleRate => { f.write_str("Could not detect the sample rate of the audio") } FromFileError::UnsupportedChannelConfiguration => { f.write_str("Only mono and stereo audio is supported") } FromFileError::IoError(error) => error.fmt(f), FromFileError::SymphoniaError(error) => error.fmt(f), } } } impl std::error::Error for FromFileError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { match self { FromFileError::IoError(error) => Some(error), FromFileError::SymphoniaError(error) => Some(error), _ => None, } } } impl From<std::io::Error> for FromFileError { fn from(v: std::io::Error) -> Self { Self::IoError(v) } } impl From<symphonia::core::errors::Error> for FromFileError { fn from(v: symphonia::core::errors::Error) -> Self { Self::SymphoniaError(v) } }
FromFileError
sodium_ctx.rs
use std::cell::UnsafeCell; use std::collections::BinaryHeap; use std::collections::HashSet; use std::mem::swap; use std::rc::Rc; use std::rc::Weak; use crate::sodium::gc::Finalize; use crate::sodium::gc::GcCtx; use crate::sodium::gc::Trace; use crate::sodium::impl_::IsLambda0; use crate::sodium::impl_::MemoLazy; use crate::sodium::impl_::Node; pub struct SodiumCtx { pub data: Rc<UnsafeCell<SodiumCtxData>> } pub struct WeakSodiumCtx { pub data: Weak<UnsafeCell<SodiumCtxData>> } pub struct SodiumCtxData { pub gc_ctx: GcCtx, pub next_id: u32, pub transaction_depth: u32, pub callback_depth: u32, pub to_be_updated: BinaryHeap<Node>, pub to_be_updated_set: HashSet<Node>, pub resort_required: bool, pub pre_trans: Vec<Box<FnMut()>>, pub post_trans: Vec<Box<FnMut()>>, pub node_count: u32, pub keep_alive: HashSet<Node>, } impl SodiumCtx { pub fn new() -> SodiumCtx { SodiumCtx { data: Rc::new(UnsafeCell::new(SodiumCtxData { gc_ctx: GcCtx::new(), next_id: 0, transaction_depth: 0, callback_depth: 0, to_be_updated: BinaryHeap::new(), to_be_updated_set: HashSet::new(), resort_required: false, pre_trans: Vec::new(), post_trans: Vec::new(), node_count: 0, keep_alive: HashSet::new(), })) } } pub fn gc_ctx(&self) -> GcCtx { let self_ = unsafe { &*(*self.data).get() }; self_.gc_ctx.clone() } pub fn downgrade(&self) -> WeakSodiumCtx { WeakSodiumCtx { data: Rc::downgrade(&self.data) } } pub fn new_lazy<A: Trace + Finalize + Clone + 'static, THUNK: IsLambda0<A> + 'static>(&self, thunk: THUNK) -> MemoLazy<A> { let mut gc_ctx = self.gc_ctx(); let gc_ctx = &mut gc_ctx; MemoLazy::new(gc_ctx, thunk) } pub fn new_id(&self) -> u32 { let self_ = unsafe { &mut *(*self.data).get() }; let id = self_.next_id; self_.next_id = self_.next_id + 1; id } pub fn add_keep_alive(&self, node: Node) { let self_ = unsafe { &mut *(*self.data).get() }; self_.keep_alive.insert(node); } pub fn remove_keep_alive(&self, node: &Node) { let self_ = unsafe { &mut *(*self.data).get() }; self_.keep_alive.remove(node); } pub fn inc_node_count(&self) { let self_ = unsafe { &mut *(*self.data).get() }; self_.node_count = self_.node_count + 1; } pub fn dec_node_count(&self) { let self_ = unsafe { &mut *(*self.data).get() }; self_.node_count = self_.node_count - 1; } pub fn node_count(&self) -> u32
pub fn inc_callback_depth(&self) { let self_ = unsafe { &mut *(*self.data).get() }; self_.callback_depth = self_.callback_depth + 1; } pub fn dec_callback_depth(&self) { let self_ = unsafe { &mut *(*self.data).get() }; self_.callback_depth = self_.callback_depth - 1; } pub fn callback_depth(&self) -> u32 { let self_ = unsafe { &*(*self.data).get() }; self_.callback_depth } pub fn pre<F: FnMut() + 'static>(&self, f: F) { self.transaction(|| { let self_ = unsafe { &mut *(*self.data).get() }; self_.pre_trans.push(Box::new(f)); }); } pub fn post<F: FnMut() + 'static>(&self, f: F) { self.transaction(|| { let self_ = unsafe { &mut *(*self.data).get() }; self_.post_trans.push(Box::new(f)); }); } pub fn transaction<A, CODE: FnOnce() -> A>(&self, code: CODE) -> A { let self_ = unsafe { &mut *(*self.data).get() }; self_.transaction_depth = self_.transaction_depth + 1; let result = code(); self_.transaction_depth = self_.transaction_depth - 1; if self_.transaction_depth == 0 { self.propergate(); } result } pub fn schedule_update_sort(&self) { let self_ = unsafe { &mut *(*self.data).get() }; self_.resort_required = true; } fn propergate(&self) { let self_ = unsafe { &mut *(*self.data).get() }; if self_.resort_required { self_.to_be_updated.clear(); for node in &self_.to_be_updated_set { self_.to_be_updated.push(node.clone()); } self_.resort_required = false; } loop { let mut pre_trans = Vec::new(); swap(&mut self_.pre_trans, &mut pre_trans); for mut f in pre_trans { f(); } if self_.pre_trans.is_empty() { break; } } self_.transaction_depth = self_.transaction_depth + 1; loop { let node_op = self_.to_be_updated.pop(); match node_op { Some(node) => { self_.to_be_updated_set.remove(&node); let mark_dependents_dirty = node.update(); if mark_dependents_dirty { node.mark_dependents_dirty(); } } None => break } } self_.transaction_depth = self_.transaction_depth - 1; loop { let mut post_trans = Vec::new(); swap(&mut self_.post_trans, &mut post_trans); for mut f in post_trans { f(); } if self_.post_trans.is_empty() { break; } } } } impl WeakSodiumCtx { pub fn upgrade(&self) -> Option<SodiumCtx> { self.data.upgrade().map(|data| SodiumCtx { data }) } } impl Clone for SodiumCtx { fn clone(&self) -> Self { SodiumCtx { data: self.data.clone() } } } impl Clone for WeakSodiumCtx { fn clone(&self) -> Self { WeakSodiumCtx { data: self.data.clone() } } }
{ let self_ = unsafe { &*(*self.data).get() }; self_.node_count }
table_aws_ec2_application_load_balancer.go
package aws import ( "context" "strings" "github.com/turbot/steampipe-plugin-sdk/grpc/proto" "github.com/turbot/steampipe-plugin-sdk/plugin/transform" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/service/elbv2" "github.com/turbot/steampipe-plugin-sdk/plugin" ) //// TABLE DEFINITION func tableAwsEc2ApplicationLoadBalancer(_ context.Context) *plugin.Table { return &plugin.Table{ Name: "aws_ec2_application_load_balancer", Description: "AWS EC2 Application Load Balancer", Get: &plugin.GetConfig{ KeyColumns: plugin.SingleColumn("arn"), ShouldIgnoreError: isNotFoundError([]string{"LoadBalancerNotFound"}), Hydrate: getEc2ApplicationLoadBalancer, }, List: &plugin.ListConfig{ Hydrate: listEc2ApplicationLoadBalancers, }, GetMatrixItem: BuildRegionList, Columns: awsRegionalColumns([]*plugin.Column{ { Name: "name", Description: "The friendly name of the Load Balancer that was provided during resource creation.", Type: proto.ColumnType_STRING, Transform: transform.FromField("LoadBalancerName"), }, { Name: "arn", Description: "The Amazon Resource Name (ARN) of the load balancer.", Type: proto.ColumnType_STRING, Transform: transform.FromField("LoadBalancerArn"), }, { Name: "type", Description: "The type of load balancer.", Type: proto.ColumnType_STRING, }, { Name: "scheme", Description: "The load balancing scheme of load balancer.", Type: proto.ColumnType_STRING, }, { Name: "canonical_hosted_zone_id", Description: "The ID of the Amazon Route 53 hosted zone associated with the load balancer.", Type: proto.ColumnType_STRING, }, { Name: "vpc_id", Description: "The ID of the VPC for the load balancer.", Type: proto.ColumnType_STRING, }, { Name: "created_time", Description: "The date and time the load balancer was created.", Type: proto.ColumnType_TIMESTAMP, }, { Name: "customer_owned_ipv4_pool", Description: "The ID of the customer-owned address pool.", Type: proto.ColumnType_STRING, }, { Name: "dns_name", Description: "The public DNS name of the load balancer.", Type: proto.ColumnType_STRING, Transform: transform.FromField("DNSName"), }, { Name: "ip_address_type", Description: "The type of IP addresses used by the subnets for your load balancer.", Type: proto.ColumnType_STRING, }, { Name: "state_code", Description: "Current state of the load balancer.", Type: proto.ColumnType_STRING, Transform: transform.FromField("State.Code"), }, { Name: "state_reason", Description: "A description of the state.", Type: proto.ColumnType_STRING, Transform: transform.FromField("State.Reason"), }, { Name: "availability_zones", Description: "The subnets for the load balancer.", Type: proto.ColumnType_JSON, }, { Name: "security_groups", Description: "The IDs of the security groups for the load balancer.", Type: proto.ColumnType_JSON, }, { Name: "load_balancer_attributes", Description: "The AWS account ID of the image owner.", Type: proto.ColumnType_JSON, Hydrate: getAwsEc2ApplicationLoadBalancerAttributes, Transform: transform.FromField("Attributes"), }, { Name: "tags_src", Description: "A list of tags attached to the load balancer.", Type: proto.ColumnType_JSON, Hydrate: getAwsEc2ApplicationLoadBalancerTags, Transform: transform.FromValue(), }, // Standard columns { Name: "tags", Description: resourceInterfaceDescription("tags"), Type: proto.ColumnType_JSON, Hydrate: getAwsEc2ApplicationLoadBalancerTags, Transform: transform.From(getEc2ApplicationLoadBalancerTurbotTags), }, { Name: "title", Description: resourceInterfaceDescription("title"), Type: proto.ColumnType_STRING, Transform: transform.FromField("LoadBalancerName"), }, { Name: "akas", Description: resourceInterfaceDescription("akas"), Type: proto.ColumnType_JSON, Transform: transform.FromField("LoadBalancerArn").Transform(arnToAkas), }, }), } } //// LIST FUNCTION func listEc2ApplicationLoadBalancers(ctx context.Context, d *plugin.QueryData, _ *plugin.HydrateData) (interface{}, error) { // TODO put me in helper function var region string matrixRegion := plugin.GetMatrixItem(ctx)[matrixKeyRegion] if matrixRegion != nil { region = matrixRegion.(string) } plugin.Logger(ctx).Trace("listEc2ApplicationLoadBalancers", "AWS_REGION", region) // Create Session svc, err := ELBv2Service(ctx, d, region) if err != nil { return nil, err } // List call err = svc.DescribeLoadBalancersPages( &elbv2.DescribeLoadBalancersInput{}, func(page *elbv2.DescribeLoadBalancersOutput, isLast bool) bool { for _, applicationLoadBalancer := range page.LoadBalancers { // Filtering the response to return only application load balancers if strings.ToLower(*applicationLoadBalancer.Type) == "application" { d.StreamListItem(ctx, applicationLoadBalancer) } } return !isLast }, ) return nil, err } //// HYDRATE FUNCTIONS func getEc2ApplicationLoadBalancer(ctx context.Context, d *plugin.QueryData, _ *plugin.HydrateData) (interface{}, error)
func getAwsEc2ApplicationLoadBalancerAttributes(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { plugin.Logger(ctx).Trace("getAwsEc2ApplicationLoadBalancerAttributes") // TODO put me in helper function var region string matrixRegion := plugin.GetMatrixItem(ctx)[matrixKeyRegion] if matrixRegion != nil { region = matrixRegion.(string) } applicationLoadBalancer := h.Item.(*elbv2.LoadBalancer) // Create service svc, err := ELBv2Service(ctx, d, region) if err != nil { return nil, err } params := &elbv2.DescribeLoadBalancerAttributesInput{ LoadBalancerArn: applicationLoadBalancer.LoadBalancerArn, } loadBalancerData, err := svc.DescribeLoadBalancerAttributes(params) if err != nil { return nil, err } return loadBalancerData, nil } func getAwsEc2ApplicationLoadBalancerTags(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { plugin.Logger(ctx).Trace("getAwsEc2ApplicationLoadBalancerTags") // TODO put me in helper function var region string matrixRegion := plugin.GetMatrixItem(ctx)[matrixKeyRegion] if matrixRegion != nil { region = matrixRegion.(string) } applicationLoadBalancer := h.Item.(*elbv2.LoadBalancer) // Create service svc, err := ELBv2Service(ctx, d, region) if err != nil { return nil, err } params := &elbv2.DescribeTagsInput{ ResourceArns: []*string{aws.String(*applicationLoadBalancer.LoadBalancerArn)}, } loadBalancerData, err := svc.DescribeTags(params) if err != nil { return nil, err } if loadBalancerData.TagDescriptions != nil && len(loadBalancerData.TagDescriptions) > 0 { return loadBalancerData.TagDescriptions[0].Tags, nil } return nil, nil } //// TRANSFORM FUNCTIONS //// func getEc2ApplicationLoadBalancerTurbotTags(_ context.Context, d *transform.TransformData) (interface{}, error) { applicationLoadBalancerTags := d.HydrateItem.([]*elbv2.Tag) if applicationLoadBalancerTags != nil { turbotTagsMap := map[string]string{} for _, i := range applicationLoadBalancerTags { turbotTagsMap[*i.Key] = *i.Value } return turbotTagsMap, nil } return nil, nil }
{ // TODO put me in helper function var region string matrixRegion := plugin.GetMatrixItem(ctx)[matrixKeyRegion] if matrixRegion != nil { region = matrixRegion.(string) } loadBalancerArn := d.KeyColumnQuals["arn"].GetStringValue() // Create service svc, err := ELBv2Service(ctx, d, region) if err != nil { return nil, err } params := &elbv2.DescribeLoadBalancersInput{ LoadBalancerArns: []*string{aws.String(loadBalancerArn)}, } op, err := svc.DescribeLoadBalancers(params) if err != nil { return nil, err } if op.LoadBalancers != nil && len(op.LoadBalancers) > 0 { return op.LoadBalancers[0], nil } return nil, nil }
unwind-box.rs
// xfail-win32 use std; fn f() { let a = @0; fail; }
fn main() { task::spawn_unlinked(f); }
utils_test.go
package headers_test import ( "testing" "github.com/9seconds/httransform/v2/headers" "github.com/stretchr/testify/suite" ) type ValuesTestSuite struct { suite.Suite } func (suite *ValuesTestSuite) TestNothing() { suite.Empty(headers.Values("")) } func (suite *ValuesTestSuite) TestSingle() { suite.Equal([]string{"value"}, headers.Values("value")) } func (suite *ValuesTestSuite) TestMany() { suite.Equal([]string{"value", "hello"}, headers.Values("value, hello")) } func (suite *ValuesTestSuite) TestManySpace() { suite.Equal([]string{"value ", "hello"}, headers.Values("value , hello")) } func
(t *testing.T) { suite.Run(t, &ValuesTestSuite{}) }
TestValues
logger.go
import ( "io" "log" "net/http" "time" ) func NewLogger(w io.Writer, format string) HandlerFunc { logger := log.New(w, format, 0) return HandlerFunc(func(w http.ResponseWriter, r *http.Request, next http.HandlerFunc) { start := time.Now() logger.Printf(" Started %s %s", r.Method, r.URL.Path) rw := &responseWriter{w, 0} next(rw, r) logger.Printf(" Completed %d %s in %v", rw.status, http.StatusText(rw.status), time.Since(start)) }) }
package helm
veml6075-uv-display-f3.rs
//! Continuously measure the ultraviolet A and ultraviolet B light sensor data //! and print it to an SSD1306 OLED display together with the calculated //! UV index. //! //! Introductory blog post with some pictures here: //! https://blog.eldruin.com/veml6075-uva-uvb-uv-index-light-sensor-driver-in-rust/ //! //! This example is runs on the STM32F3 Discovery board using I2C1. //! //! ``` //! F3 <-> VEML6075 <-> Display //! GND <-> GND <-> GND //! 3.3V <-> VCC <-> VDD //! PB7 <-> SDA <-> SDA //! PB6 <-> SCL <-> SCL //! ``` //! //! Beware that the VEML6075 runs on 3.3V but PB6 and PB7 run on 5V level //! so make sure to put a logic level shifter in between. //! //! Run with: //! `cargo run --example veml6075-uv-display-f3 --target thumbv7em-none-eabihf`, #![deny(unsafe_code)] #![no_std] #![no_main] use core::convert::TryInto; use core::fmt::Write; use cortex_m_rt::entry; use embedded_graphics::{ mono_font::{ascii::FONT_6X10, MonoTextStyleBuilder}, pixelcolor::BinaryColor, prelude::*, text::{Baseline, Text}, }; use panic_rtt_target as _; use rtt_target::{rprintln, rtt_init_print}; use ssd1306::{prelude::*, I2CDisplayInterface, Ssd1306}; use stm32f3xx_hal::{self as hal, delay::Delay, pac, prelude::*}; use veml6075::{Calibration, Measurement, Veml6075}; #[entry] fn main() -> ! { rtt_init_print!(); rprintln!("VEML6075 example"); let cp = cortex_m::Peripherals::take().unwrap(); let dp = pac::Peripherals::take().unwrap(); let mut flash = dp.FLASH.constrain(); let mut rcc = dp.RCC.constrain(); let mut gpioe = dp.GPIOE.split(&mut rcc.ahb); let clocks = rcc.cfgr.freeze(&mut flash.acr); let mut led = gpioe .pe9 .into_push_pull_output(&mut gpioe.moder, &mut gpioe.otyper); let mut delay = Delay::new(cp.SYST, clocks); let mut gpiob = dp.GPIOB.split(&mut rcc.ahb); let mut scl = gpiob .pb6 .into_af4_open_drain(&mut gpiob.moder, &mut gpiob.otyper, &mut gpiob.afrl); let mut sda = gpiob .pb7 .into_af4_open_drain(&mut gpiob.moder, &mut gpiob.otyper, &mut gpiob.afrl); scl.internal_pull_up(&mut gpiob.pupdr, true); sda.internal_pull_up(&mut gpiob.pupdr, true); let i2c = hal::i2c::I2c::new( dp.I2C1, (scl, sda), 100.kHz().try_into().unwrap(), clocks, &mut rcc.apb1, ); let manager = shared_bus::BusManager::<cortex_m::interrupt::Mutex<_>, _>::new(i2c); let interface = I2CDisplayInterface::new(manager.acquire()); let mut disp = Ssd1306::new(interface, DisplaySize128x64, DisplayRotation::Rotate0) .into_buffered_graphics_mode(); disp.init().unwrap(); disp.flush().unwrap(); let text_style = MonoTextStyleBuilder::new() .font(&FONT_6X10) .text_color(BinaryColor::On) .build();
let mut lines: [heapless::String<32>; 3] = [ heapless::String::new(), heapless::String::new(), heapless::String::new(), ]; sensor.enable().unwrap(); loop { // Blink LED 0 to check that everything is actually running. // If the LED 0 is off, something went wrong. led.set_high().unwrap(); delay.delay_ms(50_u16); led.set_low().unwrap(); delay.delay_ms(50_u16); // If there was an error, it will print 0.00, 0.00, 0.00. let Measurement { uva, uvb, uv_index } = sensor.read().unwrap_or(Measurement { uva: 0.0, uvb: 0.0, uv_index: 0.0, }); lines[0].clear(); lines[1].clear(); lines[2].clear(); write!(lines[0], "UVA: {}", uva).unwrap(); write!(lines[1], "UVB: {}", uvb).unwrap(); write!(lines[2], "UV index: {}", uv_index).unwrap(); disp.clear(); for (i, line) in lines.iter().enumerate() { Text::with_baseline( line, Point::new(0, i as i32 * 16), text_style, Baseline::Top, ) .draw(&mut disp) .unwrap(); } disp.flush().unwrap(); } }
let mut sensor = Veml6075::new(manager.acquire(), Calibration::default());