file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
doc.go | /*
Package pebble contains the Gobot adaptor and driver for Pebble smart watch.
Installing:
It requires the 2.x iOS or Android app, and "watchbot" app (https://gobot.io/x/watchbot)
installed on Pebble watch. Then install running:
go get github.com/matipan/gobot/platforms/pebble
Example:
Before running the example, make sure configuration settings match with your program. In the example, api host is your computer IP, robot name is 'pebble' and robot api port is 8080
| package main
import (
"fmt"
"github.com/matipan/gobot"
"github.com/matipan/gobot/api"
"github.com/matipan/gobot/platforms/pebble"
)
func main() {
master := gobot.NewMaster()
api.NewAPI(master).Start()
pebbleAdaptor := pebble.NewAdaptor()
watch := pebble.NewDriver(pebbleAdaptor)
work := func() {
watch.SendNotification("Hello Pebble!")
watch.On(watch.Event("button"), func(data interface{}) {
fmt.Println("Button pushed: " + data.(string))
})
watch.On(watch.Event("tap"), func(data interface{}) {
fmt.Println("Tap event detected")
})
}
robot := gobot.NewRobot("pebble",
[]gobot.Connection{pebbleAdaptor},
[]gobot.Device{watch},
work,
)
master.AddRobot(robot)
master.Start()
}
For more information refer to the pebble README:
https://github.com/hybridgroup/gobot/blob/master/platforms/pebble/README.md
*/
package pebble // import "github.com/matipan/gobot/platforms/pebble" | |
receive.py | #!/usr/bin/env python
import sys
import struct
import os
from scapy.all import sniff, sendp, hexdump, get_if_list, get_if_hwaddr
from scapy.all import Packet, IPOption
from scapy.all import ShortField, IntField, LongField, BitField, FieldListField, FieldLenField
from scapy.all import IP, TCP, UDP, Raw
from scapy.layers.inet import _IPOption_HDR
def get_if():
ifs=get_if_list()
iface=None
for i in get_if_list():
if "eth0" in i:
iface=i
break;
if not iface:
print "Cannot find eth0 interface"
exit(1)
return iface
class IPOption_MRI(IPOption): | FieldLenField("length", None, fmt="B",
length_of="swids",
adjust=lambda pkt,l:l+4),
ShortField("count", 0),
FieldListField("swids",
[],
IntField("", 0),
length_from=lambda pkt:pkt.count*4) ]
def handle_pkt(pkt):
if TCP in pkt and pkt[TCP].dport == 1234:
print "got a packet"
pkt.show2()
# hexdump(pkt)
sys.stdout.flush()
def main():
ifaces = filter(lambda i: 'eth' in i, os.listdir('/sys/class/net/'))
iface = ifaces[0]
print "sniffing on %s" % iface
sys.stdout.flush()
sniff(iface = iface,
prn = lambda x: handle_pkt(x))
if __name__ == '__main__':
main() | name = "MRI"
option = 31
fields_desc = [ _IPOption_HDR, |
setup_linux.go | package main
import (
"fmt"
"net"
"os"
"strings"
"github.com/songgao/water"
)
func setup(ips []string) *water.Interface | {
addr := strings.Split(ips[0], " ")[1]
ip, _, err := net.ParseCIDR(addr)
if err != nil {
fmt.Println(err)
os.Exit(1)
}
peer := strings.Split(ips[1], " ")[1]
config := water.Config{
DeviceType: water.TUN,
}
iface, err := water.New(config)
if err != nil {
fmt.Println(err)
os.Exit(1)
}
fmt.Printf("interface => %v\n", iface.Name())
runOutCmd(fmt.Sprintf("%s link set dev %s up qlen 100", "ip", iface.Name()))
runOutCmd(fmt.Sprintf("%s addr add dev %s local %s peer %s", "ip", iface.Name(), ip, peer))
for _, val := range ips {
vals := strings.Split(val, " ")
fmt.Printf("control => %s\n", val)
if vals[0] == "route" {
if _, _, err := net.ParseCIDR(vals[1]); err != nil {
fmt.Printf("unknown route => %v\n", vals[1])
} else if strings.Contains(exclude, vals[1]) {
fmt.Printf("exclude route => %v\n", vals[1])
} else {
rk := fmt.Sprintf("%s via %s dev %s", vals[1], ip, iface.Name())
clears[fmt.Sprintf("ip route del %s", rk)] = true
runCmd(fmt.Sprintf("ip route add %s", rk))
}
} else if vals[0] == "mtu" {
runOutCmd(fmt.Sprintf("%s link set dev %s mtu %s", "ip", iface.Name(), vals[1]))
}
}
return iface
} |
|
dateComponent.d.ts | // Type definitions for ag-grid v18.1.1
// Project: http://www.ag-grid.com/ | export interface IDate {
/** Returns the current date represented by this editor */
getDate(): Date;
/** Sets the date represented by this component */
setDate(date: Date): void;
}
export interface IDateParams {
/** Method for component to tell ag-Grid that the date has changed. */
onDateChanged: () => void;
filterParams: IDateFilterParams;
}
export interface IDateComp extends IComponent<IDateParams>, IDate {
} | // Definitions by: Niall Crosby <https://github.com/ag-grid/>
import { IComponent } from "../interfaces/iComponent";
import { IDateFilterParams } from "../filter/dateFilter"; |
mod.rs | // Copyright 2014 Johannes Köster.
// Licensed under the MIT license (http://opensource.org/licenses/MIT)
// This file may not be copied, modified, or distributed
// except according to those terms.
//! Module for working with VCF and BCF files.
//!
//! # Performance Remarks
//!
//! Note that BCF corresponds to the in-memory representation of BCF/VCF records in Htslib
//! itself. Thus, it comes without a runtime penalty for parsing, in contrast to reading VCF
//! files.
//! # Example
//! - Obtaining 0-based locus index of the VCF record.
//! - Obtaining alleles of the VCF record.
//! - calculate alt-allele dosage in a mutli-sample VCF / BCF
//!
//! ```
//! use crate::rust_htslib::bcf::{Reader, Read};
//! use std::convert::TryFrom;
//!
//! let path = &"test/test_string.vcf";
//! let mut bcf = Reader::from_path(path).expect("Error opening file.");
//! // iterate through each row of the vcf body.
//! for (i, record_result) in bcf.records().enumerate() {
//! let mut record = record_result.expect("Fail to read record");
//! let mut s = String::new();
//! for allele in record.alleles() {
//! for c in allele {
//! s.push(char::from(*c))
//! }
//! s.push(' ')
//! }
//! // 0-based position and the list of alleles
//! println!("Locus: {}, Alleles: {}", record.pos(), s);
//! // number of sample in the vcf
//! let sample_count = usize::try_from(record.sample_count()).unwrap();
//!
//! // Counting ref, alt and missing alleles for each sample
//! let mut n_ref = vec![0; sample_count];
//! let mut n_alt = vec![0; sample_count];
//! let mut n_missing = vec![0; sample_count];
//! let gts = record.genotypes().expect("Error reading genotypes");
//! for sample_index in 0..sample_count {
//! // for each sample
//! for gta in gts.get(sample_index).iter() {
//! // for each allele
//! match gta.index() {
//! Some(0) => n_ref[sample_index] += 1, // reference allele
//! Some(_) => n_alt[sample_index] += 1, // alt allele
//! None => n_missing[sample_index] += 1, // missing allele
//! }
//! }
//! }
//! }
//! ```
use std::ffi;
use std::path::Path;
use std::rc::Rc;
use std::str;
use url::Url;
pub mod buffer;
pub mod header;
pub mod record;
use crate::bcf::header::{HeaderView, SampleSubset};
use crate::errors::{Error, Result};
use crate::htslib;
pub use crate::bcf::header::{Header, HeaderRecord};
pub use crate::bcf::record::Record;
/// A trait for a BCF reader with a read method.
pub trait Read: Sized {
/// Read the next record.
///
/// # Arguments
/// * record - an empty record, that can be created with `bcf::Reader::empty_record`.
///
/// # Returns
/// None if end of file was reached, otherwise Some will contain
/// a result with an error in case of failure.
fn read(&mut self, record: &mut record::Record) -> Option<Result<()>>;
/// Return an iterator over all records of the VCF/BCF file.
fn records(&mut self) -> Records<'_, Self>;
/// Return the header.
fn header(&self) -> &HeaderView;
/// Return empty record. Can be reused multiple times.
fn empty_record(&self) -> Record;
/// Activate multi-threaded BCF/VCF read support in htslib. This should permit faster
/// reading of large VCF files.
///
/// Setting `nthreads` to `0` does not change the current state. Note that it is not
/// possible to set the number of background threads below `1` once it has been set.
///
/// # Arguments
///
/// * `n_threads` - number of extra background writer threads to use, must be `> 0`.
fn set_threads(&mut self, n_threads: usize) -> Result<()>;
}
/// A VCF/BCF reader.
#[derive(Debug)]
pub struct Reader {
inner: *mut htslib::htsFile,
header: Rc<HeaderView>,
}
unsafe impl Send for Reader {}
/// # Safety
///
/// Implementation for `Reader::set_threads()` and `Writer::set_threads`.
pub unsafe fn set_threads(hts_file: *mut htslib::htsFile, n_threads: usize) -> Result<()> {
assert!(n_threads > 0, "n_threads must be > 0");
let r = htslib::hts_set_threads(hts_file, n_threads as i32);
if r != 0 {
Err(Error::SetThreads)
} else {
Ok(())
}
}
impl Reader {
/// Create a new reader from a given path.
pub fn from_path<P: AsRef<Path>>(path: P) -> Result<Self> {
match path.as_ref().to_str() {
Some(p) if !path.as_ref().exists() => Err(Error::FileNotFound { path: p.into() }),
Some(p) => Self::new(p.as_bytes()),
_ => Err(Error::NonUnicodePath),
}
}
pub fn copy_header_reader(&mut self, other_reader: &IndexedReader) {
let new_header_view = Rc::clone(&other_reader.header);
self.header = new_header_view;
}
pub fn copy_header_writer(&mut self, writer: &Writer) {
let new_header_view = Rc::clone(&writer.header);
self.header = new_header_view;
}
/// Create a new reader from a given URL.
pub fn from_url(url: &Url) -> Result<Self> {
Self::new(url.as_str().as_bytes())
}
/// Create a new reader from standard input.
pub fn from_stdin() -> Result<Self> {
Self::new(b"-")
}
fn new(path: &[u8]) -> Result<Self> {
let htsfile = bcf_open(path, b"r")?;
let header = unsafe { htslib::bcf_hdr_read(htsfile) };
Ok(Reader {
inner: htsfile,
header: Rc::new(HeaderView::new(header)),
})
}
}
impl Read for Reader {
fn read(&mut self, record: &mut record::Record) -> Option<Result<()>> {
match unsafe { htslib::bcf_read(self.inner, self.header.inner, record.inner) } {
0 => {
unsafe {
// Always unpack record.
htslib::bcf_unpack(record.inner_mut(), htslib::BCF_UN_ALL as i32);
}
record.set_header(Rc::clone(&self.header));
Some(Ok(()))
}
-1 => None,
_ => Some(Err(Error::BcfInvalidRecord)),
}
}
fn records(&mut self) -> Records<'_, Self> {
Records { reader: self }
}
fn set_threads(&mut self, n_threads: usize) -> Result<()> {
unsafe { set_threads(self.inner, n_threads) }
}
fn header(&self) -> &HeaderView {
&self.header
}
/// Return empty record. Can be reused multiple times.
fn empty_record(&self) -> Record {
Record::new(Rc::clone(&self.header))
}
}
impl Drop for Reader {
fn drop(&mut self) {
unsafe {
htslib::hts_close(self.inner);
}
}
}
/// An indexed VCF/BCF reader.
#[derive(Debug)]
pub struct IndexedReader {
/// The synced VCF/BCF reader to use internally.
inner: *mut htslib::bcf_srs_t,
/// The header.
header: Rc<HeaderView>,
/// The position of the previous fetch, if any.
current_region: Option<(u32, u64, u64)>,
}
unsafe impl Send for IndexedReader {}
impl IndexedReader {
/// Create a new `IndexedReader` from path.
///
/// # Arguments
///
/// * `path` - the path to open.
pub fn from_path<P: AsRef<Path>>(path: P) -> Result<Self> {
let path = path.as_ref();
match path.to_str() {
Some(p) if path.exists() => {
Self::new(&ffi::CString::new(p).map_err(|_| Error::NonUnicodePath)?)
}
Some(p) => Err(Error::FileNotFound { path: p.into() }),
None => Err(Error::NonUnicodePath),
}
}
pub fn copy_header_reader(&mut self, other_reader: &IndexedReader) {
let new_header_view = Rc::clone(&other_reader.header);
self.header = new_header_view;
}
pub fn copy_header_writer(&mut self, writer: &Writer) {
let new_header_view = Rc::clone(&writer.header);
self.header = new_header_view;
}
/// Create a new `IndexedReader` from an URL.
pub fn from_url(url: &Url) -> Result<Self> {
Self::new(&ffi::CString::new(url.as_str()).unwrap())
}
/// Create a new `IndexedReader`.
///
/// # Arguments
///
/// * `path` - the path. Use "-" for stdin.
fn new(path: &ffi::CStr) -> Result<Self> {
// Create reader and require existence of index file.
let ser_reader = unsafe { htslib::bcf_sr_init() };
unsafe {
htslib::bcf_sr_set_opt(ser_reader, 0);
} // 0: BCF_SR_REQUIRE_IDX
// Attach a file with the path from the arguments.
if unsafe { htslib::bcf_sr_add_reader(ser_reader, path.as_ptr()) } >= 0 {
let header = Rc::new(HeaderView::new(unsafe {
htslib::bcf_hdr_dup((*(*ser_reader).readers.offset(0)).header)
}));
Ok(IndexedReader {
inner: ser_reader,
header,
current_region: None,
})
} else {
Err(Error::BcfOpen {
target: path.to_str().unwrap().to_owned(),
})
}
}
/// Jump to the given region.
///
/// # Arguments
///
/// * `rid` - numeric ID of the reference to jump to; use `HeaderView::name2rid` for resolving
/// contig name to ID.
/// * `start` - `0`-based start coordinate of region on reference.
/// * `end` - `0`-based end coordinate of region on reference.
pub fn fetch(&mut self, rid: u32, start: u64, end: u64) -> Result<()> {
let contig = self.header.rid2name(rid).unwrap();
let contig = ffi::CString::new(contig).unwrap();
if unsafe { htslib::bcf_sr_seek(self.inner, contig.as_ptr(), start as i64) } != 0 {
Err(Error::GenomicSeek {
contig: contig.to_str().unwrap().to_owned(),
start,
})
} else {
self.current_region = Some((rid, start, end));
Ok(())
}
}
}
impl Read for IndexedReader {
fn read(&mut self, record: &mut record::Record) -> Option<Result<()>> {
match unsafe { htslib::bcf_sr_next_line(self.inner) } {
0 => {
if unsafe { (*self.inner).errnum } != 0 {
Some(Err(Error::BcfInvalidRecord))
} else {
None
}
}
i => {
assert!(i > 0, "Must not be negative");
// Note that the sync BCF reader has a different interface than the others
// as it keeps its own buffer already for each record. An alternative here
// would be to replace the `inner` value by an enum that can be a pointer
// into a synced reader or an owning popinter to an allocated record.
unsafe {
htslib::bcf_copy(
record.inner,
*(*(*self.inner).readers.offset(0)).buffer.offset(0),
);
}
unsafe {
// Always unpack record.
htslib::bcf_unpack(record.inner_mut(), htslib::BCF_UN_ALL as i32);
}
record.set_header(Rc::clone(&self.header));
match self.current_region {
Some((rid, _start, end)) => {
if record.rid().is_some()
&& rid == record.rid().unwrap()
&& record.pos() as u64 <= end
{
Some(Ok(()))
} else {
None
}
}
None => Some(Ok(())),
}
}
}
}
fn records(&mut self) -> Records<'_, Self> {
Records { reader: self }
}
fn set_threads(&mut self, n_threads: usize) -> Result<()> {
assert!(n_threads > 0, "n_threads must be > 0");
let r = unsafe { htslib::bcf_sr_set_threads(self.inner, n_threads as i32) };
if r != 0 {
Err(Error::SetThreads)
} else {
Ok(())
}
}
fn header(&self) -> &HeaderView {
&self.header
}
fn empty_record(&self) -> Record {
Record::new(Rc::clone(&self.header))
}
}
impl Drop for IndexedReader {
fn drop(&mut self) {
unsafe { htslib::bcf_sr_destroy(self.inner) };
}
}
/// This module contains the `SyncedReader` class and related code.
pub mod synced {
use super::*;
/// This module contains bitmask constants for `SyncedReader`.
pub mod pairing {
/// Allow different alleles, as long as they all are SNPs.
pub const SNPS: u32 = crate::htslib::BCF_SR_PAIR_SNPS;
/// The same as above, but with indels.
pub const INDELS: u32 = crate::htslib::BCF_SR_PAIR_INDELS;
/// Any combination of alleles can be returned by `bcf_sr_next_line()`.
pub const ANY: u32 = crate::htslib::BCF_SR_PAIR_ANY;
/// At least some of multiallelic ALTs must match. Implied by all the others with the exception of `EXACT`.
pub const SOME: u32 = crate::htslib::BCF_SR_PAIR_SOME;
/// Allow REF-only records with SNPs.
pub const SNP_REF: u32 = crate::htslib::BCF_SR_PAIR_SNP_REF;
/// Allow REF-only records with indels.
pub const INDEL_REF: u32 = crate::htslib::BCF_SR_PAIR_INDEL_REF;
/// Require the exact same set of alleles in all files.
pub const EXACT: u32 = crate::htslib::BCF_SR_PAIR_EXACT;
/// `SNPS | INDELS`.
pub const BOTH: u32 = crate::htslib::BCF_SR_PAIR_BOTH;
/// `SNPS | INDELS | SNP_REF | INDEL_REF`.
pub const BOTH_REF: u32 = crate::htslib::BCF_SR_PAIR_BOTH_REF;
}
/// A wrapper for `bcf_srs_t`; allows joint traversal of multiple VCF and/or BCF files.
#[derive(Debug)]
pub struct SyncedReader {
/// Internal handle for the synced reader.
inner: *mut crate::htslib::bcf_srs_t,
/// RC's of `HeaderView`s of the readers.
headers: Vec<Rc<HeaderView>>,
/// The position of the previous fetch, if any.
current_region: Option<(u32, u64, u64)>,
}
// TODO: add interface for setting threads, ensure that the pool is freed properly
impl SyncedReader {
pub fn new() -> Result<Self> {
let inner = unsafe { crate::htslib::bcf_sr_init() };
if inner.is_null() {
return Err(Error::BcfAllocationError);
}
Ok(SyncedReader {
inner,
headers: Vec::new(),
current_region: None,
})
}
/// Enable or disable requiring of index
pub fn set_require_index(&mut self, do_require: bool) {
unsafe {
(*self.inner).require_index = if do_require { 1 } else { 0 };
}
}
/// Set the given bitmask of values from `sr_pairing` module.
pub fn set_pairing(&mut self, bitmask: u32) {
unsafe {
// TODO: 1 actually is BCF_SR_PAIR_LOGIC but is not available here?
crate::htslib::bcf_sr_set_opt(self.inner, 1, bitmask);
}
}
/// Add new reader with the path to the file.
pub fn add_reader<P: AsRef<Path>>(&mut self, path: P) -> Result<()> {
match path.as_ref().to_str() {
Some(p) if path.as_ref().exists() => {
let p_cstring = ffi::CString::new(p).unwrap();
let res =
unsafe { crate::htslib::bcf_sr_add_reader(self.inner, p_cstring.as_ptr()) };
if res == 0 {
return Err(Error::BcfOpen {
target: p.to_owned(),
});
}
let i = (self.reader_count() - 1) as isize;
let header = Rc::new(HeaderView::new(unsafe {
crate::htslib::bcf_hdr_dup((*(*self.inner).readers.offset(i)).header)
}));
self.headers.push(header);
Ok(())
}
_ => Err(Error::NonUnicodePath),
}
}
/// Remove reader with the given index.
pub fn remove_reader(&mut self, idx: u32) {
if idx >= self.reader_count() {
panic!("Invalid reader!");
} else {
unsafe {
crate::htslib::bcf_sr_remove_reader(self.inner, idx as i32);
}
self.headers.remove(idx as usize);
}
}
/// Return number of open files/readers.
pub fn reader_count(&self) -> u32 {
unsafe { (*self.inner).nreaders as u32 }
}
/// Read next line and return number of readers that have the given line (0 if end of all files is reached).
pub fn read_next(&mut self) -> Result<u32> {
let num = unsafe { crate::htslib::bcf_sr_next_line(self.inner) as u32 };
if num == 0 {
if unsafe { (*self.inner).errnum } != 0 {
return Err(Error::BcfInvalidRecord);
}
Ok(0)
} else {
assert!(num > 0, "num returned by htslib must not be negative");
match self.current_region {
Some((rid, _start, end)) => {
for idx in 0..self.reader_count() {
if !self.has_line(idx) {
continue;
}
unsafe {
let record = *(*(*self.inner).readers.offset(idx as isize))
.buffer
.offset(0);
if (*record).rid != (rid as i32) || (*record).pos >= (end as i64) {
return Ok(0);
}
}
}
Ok(num)
}
None => Ok(num),
}
}
}
/// Return whether the given reader has the line.
pub fn has_line(&self, idx: u32) -> bool {
if idx >= self.reader_count() {
panic!("Invalid reader!");
} else {
unsafe { (*(*self.inner).has_line.offset(idx as isize)) != 0 }
}
}
/// Return record from the given reader, if any.
pub fn record(&self, idx: u32) -> Option<Record> {
if self.has_line(idx) {
let record = Record::new(self.headers[idx as usize].clone());
unsafe {
crate::htslib::bcf_copy(
record.inner,
*(*(*self.inner).readers.offset(idx as isize))
.buffer
.offset(0),
);
}
Some(record)
} else {
None
}
}
/// Return header from the given reader.
pub fn header(&self, idx: u32) -> &HeaderView {
// TODO: is the mutability here correct?
if idx >= self.reader_count() {
panic!("Invalid reader!");
} else {
&self.headers[idx as usize]
}
}
/// Jump to the given region.
///
/// # Arguments
///
/// * `rid` - numeric ID of the reference to jump to; use `HeaderView::name2rid` for resolving
/// contig name to ID.
/// * `start` - `0`-based start coordinate of region on reference.
/// * `end` - `0`-based end coordinate of region on reference.
pub fn fetch(&mut self, rid: u32, start: u64, end: u64) -> Result<()> {
let contig = {
let contig = self.header(0).rid2name(rid).unwrap(); //.clone();
ffi::CString::new(contig).unwrap()
};
if unsafe { htslib::bcf_sr_seek(self.inner, contig.as_ptr(), start as i64) } != 0 {
Err(Error::GenomicSeek {
contig: contig.to_str().unwrap().to_owned(),
start,
})
} else {
self.current_region = Some((rid, start, end));
Ok(())
}
}
}
impl Drop for SyncedReader {
fn drop(&mut self) {
unsafe { crate::htslib::bcf_sr_destroy(self.inner) };
}
}
}
#[derive(Clone, Copy, Debug)]
pub enum Format {
VCF,
BCF,
}
/// A VCF/BCF writer.
#[derive(Debug)]
pub struct Writer {
inner: *mut htslib::htsFile,
header: Rc<HeaderView>,
subset: Option<SampleSubset>,
}
unsafe impl Send for Writer {}
impl Writer {
/// Create a new writer that writes to the given path.
///
/// # Arguments
///
/// * `path` - the path
/// * `header` - header definition to use
/// * `uncompressed` - disable compression
/// * `vcf` - write VCF instead of BCF
pub fn from_path<P: AsRef<Path>>(
path: P,
header: &Header,
uncompressed: bool,
format: Format,
) -> Result<Self> {
if let Some(p) = path.as_ref().to_str() {
Ok(Self::new(p.as_bytes(), header, uncompressed, format)?)
} else {
Err(Error::NonUnicodePath)
}
}
/// Create a new writer from a URL.
///
/// # Arguments
///
/// * `url` - the URL
/// * `header` - header definition to use
/// * `uncompressed` - disable compression
/// * `vcf` - write VCF instead of BCF
pub fn from_url(
url: &Url,
header: &Header,
uncompressed: bool,
format: Format,
) -> Result<Self> {
Self::new(url.as_str().as_bytes(), header, uncompressed, format)
}
/// Create a new writer to stdout.
///
/// # Arguments
///
/// * `header` - header definition to use
/// * `uncompressed` - disable compression
/// * `vcf` - write VCF instead of BCF
pub fn from_stdout(header: &Header, uncompressed: bool, format: Format) -> Result<Self> {
Self::new(b"-", header, uncompressed, format)
}
fn new(path: &[u8], header: &Header, uncompressed: bool, format: Format) -> Result<Self> {
let mode: &[u8] = match (uncompressed, format) {
(true, Format::VCF) => b"w",
(false, Format::VCF) => b"wz",
(true, Format::BCF) => b"wbu",
(false, Format::BCF) => b"wb",
};
let htsfile = bcf_open(path, mode)?;
unsafe { htslib::bcf_hdr_write(htsfile, header.inner) };
Ok(Writer {
inner: htsfile,
header: Rc::new(HeaderView::new(unsafe {
htslib::bcf_hdr_dup(header.inner)
})),
subset: header.subset.clone(),
})
}
/// Obtain reference to the lightweight `HeaderView` of the BCF header.
pub fn header(&self) -> &HeaderView {
&self.header
}
/// Create empty record for writing to this writer.
///
/// This record can then be reused multiple times.
pub fn empty_record(&self) -> Record {
record::Record::new(Rc::clone(&self.header))
}
/// Translate record to header of this writer.
///
/// # Arguments
///
/// - `record` - The `Record` to translate.
pub fn translate(&mut self, record: &mut record::Record) {
unsafe {
htslib::bcf_translate(self.header.inner, record.header().inner, record.inner);
}
record.set_header(Rc::clone(&self.header));
}
/// Subset samples of record to match header of this writer.
///
/// # Arguments
///
/// - `record` - The `Record` to modify.
pub fn subset(&mut self, record: &mut record::Record) {
if let Some(ref mut subset) = self.subset {
unsafe {
htslib::bcf_subset(
self.header.inner,
record.inner,
subset.len() as i32,
subset.as_mut_ptr(),
);
}
}
}
/// Write `record` to the Writer.
///
/// # Arguments
///
/// - `record` - The `Record` to write.
pub fn write(&mut self, record: &record::Record) -> Result<()> {
if unsafe { htslib::bcf_write(self.inner, self.header.inner, record.inner) } == -1 {
Err(Error::WriteRecord)
} else {
Ok(())
}
}
/// Activate multi-threaded BCF write support in htslib. This should permit faster
/// writing of large BCF files.
///
/// # Arguments
///
/// * `n_threads` - number of extra background writer threads to use, must be `> 0`.
pub fn set_threads(&mut self, n_threads: usize) -> Result<()> {
unsafe { set_threads(self.inner, n_threads) }
}
}
impl Drop for Writer {
fn drop(&mut self) {
unsafe {
htslib::hts_close(self.inner);
}
}
}
#[derive(Debug)]
pub struct Records<'a, R: Read> {
reader: &'a mut R,
}
impl<'a, R: Read> Iterator for Records<'a, R> {
type Item = Result<record::Record>;
fn next(&mut self) -> Option<Result<record::Record>> {
let mut record = self.reader.empty_record();
match self.reader.read(&mut record) {
Some(Err(e)) => Some(Err(e)),
Some(Ok(_)) => Some(Ok(record)),
None => None,
}
}
}
/// Wrapper for opening a BCF file.
fn bcf_open(target: &[u8], mode: &[u8]) -> Result<*mut htslib::htsFile> {
let p = ffi::CString::new(target).unwrap();
let c_str = ffi::CString::new(mode).unwrap();
let ret = unsafe { htslib::hts_open(p.as_ptr(), c_str.as_ptr()) };
if ret.is_null() {
return Err(Error::BcfOpen {
target: str::from_utf8(target).unwrap().to_owned(),
});
}
unsafe {
if !(mode.contains(&b'w')
|| (*ret).format.category == htslib::htsFormatCategory_variant_data)
{
return Err(Error::BcfOpen {
target: str::from_utf8(target).unwrap().to_owned(),
});
}
}
Ok(ret)
}
#[cfg(test)]
mod tests {
use super::record::Buffer;
use super::*;
use crate::bcf::header::Id;
use crate::bcf::record::GenotypeAllele;
use crate::bcf::record::Numeric;
use crate::bcf::Reader;
use std::convert::TryFrom;
use std::fs::File;
use std::io::prelude::Read as IoRead;
use std::path::Path;
use std::str;
fn _test_read<P: AsRef<Path>>(path: &P) {
let mut bcf = Reader::from_path(path).expect("Error opening file.");
assert_eq!(bcf.header.samples(), [b"NA12878.subsample-0.25-0"]);
for (i, rec) in bcf.records().enumerate() {
let record = rec.expect("Error reading record.");
assert_eq!(record.sample_count(), 1);
assert_eq!(record.rid().expect("Error reading rid."), 0);
assert_eq!(record.pos(), 10021 + i as i64);
assert!((record.qual() - 0f32).abs() < std::f32::EPSILON);
let mut buffer = Buffer::new();
assert!(
(record
.info_shared_buffer(b"MQ0F", &mut buffer)
.float()
.expect("Error reading info.")
.expect("Missing tag")[0]
- 1.0)
.abs()
< std::f32::EPSILON
);
if i == 59 {
assert!(
(record
.info_shared_buffer(b"SGB", &mut buffer)
.float()
.expect("Error reading info.")
.expect("Missing tag")[0]
- -0.379885)
.abs()
< std::f32::EPSILON
);
}
// the artificial "not observed" allele is present in each record.
assert_eq!(record.alleles().iter().last().unwrap(), b"<X>");
let mut fmt = record.format(b"PL");
let pl = fmt.integer().expect("Error reading format.");
assert_eq!(pl.len(), 1);
if i == 59 {
assert_eq!(pl[0].len(), 6);
} else {
assert_eq!(pl[0].len(), 3);
}
}
}
#[test]
fn test_read() {
_test_read(&"test/test.bcf");
}
#[test]
fn test_reader_set_threads() {
let path = &"test/test.bcf";
let mut bcf = Reader::from_path(path).expect("Error opening file.");
bcf.set_threads(2).unwrap();
}
#[test]
fn test_writer_set_threads() {
let path = &"test/test.bcf";
let tmp = tempfile::Builder::new()
.prefix("rust-htslib")
.tempdir()
.expect("Cannot create temp dir");
let bcfpath = tmp.path().join("test.bcf");
let bcf = Reader::from_path(path).expect("Error opening file.");
let header = Header::from_template_subset(&bcf.header, &[b"NA12878.subsample-0.25-0"])
.expect("Error subsetting samples.");
let mut writer =
Writer::from_path(&bcfpath, &header, false, Format::BCF).expect("Error opening file.");
writer.set_threads(2).unwrap();
}
#[test]
fn test_fetch() {
let mut bcf = IndexedReader::from_path(&"test/test.bcf").expect("Error opening file.");
bcf.set_threads(2).unwrap();
let rid = bcf
.header()
.name2rid(b"1")
.expect("Translating from contig '1' to ID failed.");
bcf.fetch(rid, 10_033, 10_060).expect("Fetching failed");
assert_eq!(bcf.records().count(), 28);
}
#[test]
fn test_write() {
let mut bcf = Reader::from_path(&"test/test_multi.bcf").expect("Error opening file.");
let tmp = tempfile::Builder::new()
.prefix("rust-htslib")
.tempdir()
.expect("Cannot create temp dir");
let bcfpath = tmp.path().join("test.bcf");
println!("{:?}", bcfpath);
{
let header = Header::from_template_subset(&bcf.header, &[b"NA12878.subsample-0.25-0"])
.expect("Error subsetting samples.");
let mut writer = Writer::from_path(&bcfpath, &header, false, Format::BCF)
.expect("Error opening file.");
for rec in bcf.records() {
let mut record = rec.expect("Error reading record.");
writer.translate(&mut record);
writer.subset(&mut record);
record.trim_alleles().expect("Error trimming alleles.");
writer.write(&record).expect("Error writing record");
}
}
{
_test_read(&bcfpath);
}
tmp.close().expect("Failed to delete temp dir");
}
#[test]
fn test_strings() {
let mut vcf = Reader::from_path(&"test/test_string.vcf").expect("Error opening file.");
let fs1 = [
&b"LongString1"[..],
&b"LongString2"[..],
&b"."[..],
&b"LongString4"[..],
&b"evenlength"[..],
&b"ss6"[..],
];
let mut buffer = Buffer::new();
for (i, rec) in vcf.records().enumerate() {
println!("record {}", i);
let mut record = rec.expect("Error reading record.");
assert_eq!(
record
.info_shared_buffer(b"S1", &mut buffer)
.string()
.expect("Error reading string.")
.expect("Missing tag")[0],
format!("string{}", i + 1).as_bytes()
);
println!(
"{}",
String::from_utf8_lossy(
record
.format(b"FS1")
.string()
.expect("Error reading string.")[0]
)
);
assert_eq!(
record
.format(b"FS1")
.string()
.expect("Error reading string.")[0],
fs1[i]
);
}
}
#[test]
fn test_missing() {
let mut vcf = Reader::from_path(&"test/test_missing.vcf").expect("Error opening file.");
let fn4 = [
&[
i32::missing(),
i32::missing(),
i32::missing(),
i32::missing(),
][..],
&[i32::missing()][..],
];
let f1 = [false, true];
let mut buffer = Buffer::new();
for (i, rec) in vcf.records().enumerate() {
let mut record = rec.expect("Error reading record.");
assert_eq!(
record
.info_shared_buffer(b"F1", &mut buffer)
.float()
.expect("Error reading float.")
.expect("Missing tag")[0]
.is_nan(),
f1[i]
);
assert_eq!(
record
.format(b"FN4")
.integer()
.expect("Error reading integer.")[1],
fn4[i]
);
assert!(
record.format(b"FF4").float().expect("Error reading float.")[1]
.iter()
.all(|&v| v.is_missing())
);
}
}
#[test]
fn test_genotypes() {
let mut vcf = Reader::from_path(&"test/test_string.vcf").expect("Error opening file.");
let expected = ["./1", "1|1", "0/1", "0|1", "1|.", "1/1"];
for (rec, exp_gt) in vcf.records().zip(expected.iter()) {
let mut rec = rec.expect("Error reading record.");
let genotypes = rec.genotypes().expect("Error reading genotypes");
assert_eq!(&format!("{}", genotypes.get(0)), exp_gt);
}
}
#[test]
fn test_header_ids() {
let vcf = Reader::from_path(&"test/test_string.vcf").expect("Error opening file.");
let header = &vcf.header();
use crate::bcf::header::Id;
assert_eq!(header.id_to_name(Id(4)), b"GT");
assert_eq!(header.name_to_id(b"GT").unwrap(), Id(4));
assert!(header.name_to_id(b"XX").is_err());
}
#[test]
fn test_header_samples() {
let vcf = Reader::from_path(&"test/test_string.vcf").expect("Error opening file.");
let header = &vcf.header();
assert_eq!(header.id_to_sample(Id(0)), b"one");
assert_eq!(header.id_to_sample(Id(1)), b"two");
assert_eq!(header.sample_to_id(b"one").unwrap(), Id(0));
assert_eq!(header.sample_to_id(b"two").unwrap(), Id(1));
assert!(header.sample_to_id(b"three").is_err());
}
#[test]
fn test_header_contigs() {
let vcf = Reader::from_path(&"test/test_multi.bcf").expect("Error opening file.");
let header = &vcf.header();
assert_eq!(header.contig_count(), 86);
// test existing contig names and IDs
assert_eq!(header.rid2name(0).unwrap(), b"1");
assert_eq!(header.name2rid(b"1").unwrap(), 0);
assert_eq!(header.rid2name(85).unwrap(), b"hs37d5");
assert_eq!(header.name2rid(b"hs37d5").unwrap(), 85);
// test nonexistent contig names and IDs
assert!(header.name2rid(b"nonexistent_contig").is_err());
assert!(header.rid2name(100).is_err());
}
#[test]
fn test_header_records() {
let vcf = Reader::from_path(&"test/test_string.vcf").expect("Error opening file.");
let records = vcf.header().header_records();
assert_eq!(records.len(), 10);
match records[1] {
HeaderRecord::Filter {
ref key,
ref values,
} => {
assert_eq!(key, "FILTER");
assert_eq!(values["ID"], "PASS");
}
_ => {
panic!("Invalid HeaderRecord");
}
}
}
#[test]
fn test_header_info_types() {
let vcf = Reader::from_path(&"test/test.bcf").unwrap();
let header = vcf.header();
let truth = vec![
(
// INFO=<ID=INDEL,Number=0,Type=Flag>
"INDEL",
header::TagType::Flag,
header::TagLength::Fixed(0),
),
(
// INFO=<ID=DP,Number=1,Type=Integer>
"DP",
header::TagType::Integer,
header::TagLength::Fixed(1),
),
(
// INFO=<ID=QS,Number=R,Type=Float>
"QS",
header::TagType::Float,
header::TagLength::Alleles,
),
(
// INFO=<ID=I16,Number=16,Type=Float>
"I16",
header::TagType::Float,
header::TagLength::Fixed(16),
),
];
for (ref_name, ref_type, ref_length) in truth {
let (tag_type, tag_length) = header.info_type(ref_name.as_bytes()).unwrap();
assert_eq!(tag_type, ref_type);
assert_eq!(tag_length, ref_length);
}
let vcf = Reader::from_path(&"test/test_svlen.vcf").unwrap();
let header = vcf.header();
let truth = vec![
(
// INFO=<ID=IMPRECISE,Number=0,Type=Flag>
"IMPRECISE",
header::TagType::Flag,
header::TagLength::Fixed(0),
),
(
// INFO=<ID=SVTYPE,Number=1,Type=String>
"SVTYPE",
header::TagType::String,
header::TagLength::Fixed(1),
),
(
// INFO=<ID=SVLEN,Number=.,Type=Integer>
"SVLEN",
header::TagType::Integer,
header::TagLength::Variable,
),
(
// INFO<ID=CIGAR,Number=A,Type=String>
"CIGAR",
header::TagType::String,
header::TagLength::AltAlleles,
),
];
for (ref_name, ref_type, ref_length) in truth {
let (tag_type, tag_length) = header.info_type(ref_name.as_bytes()).unwrap();
assert_eq!(tag_type, ref_type);
assert_eq!(tag_length, ref_length);
}
assert!(header.info_type(b"NOT_THERE").is_err());
}
#[test]
fn test_remove_alleles() {
let mut bcf = Reader::from_path(&"test/test_multi.bcf").unwrap();
for res in bcf.records() {
let mut record = res.unwrap();
if record.pos() == 10080 {
record.remove_alleles(&[false, false, true]).unwrap();
assert_eq!(record.alleles(), [b"A", b"C"]);
}
}
}
// Helper function reading full file into string.
fn read_all<P: AsRef<Path>>(path: P) -> String {
let mut file = File::open(path.as_ref())
.unwrap_or_else(|_| panic!("Unable to open the file: {:?}", path.as_ref()));
let mut contents = String::new();
file.read_to_string(&mut contents)
.unwrap_or_else(|_| panic!("Unable to read the file: {:?}", path.as_ref()));
contents
}
// Open `test_various.vcf`, add a record from scratch to it and write it out again.
//
// This exercises the full functionality of updating information in a `record::Record`.
#[test]
fn test_write_various() {
// Open reader, then create writer.
let tmp = tempfile::Builder::new()
.prefix("rust-htslib")
.tempdir()
.expect("Cannot create temp dir");
let out_path = tmp.path().join("test_various.out.vcf");
let vcf = Reader::from_path(&"test/test_various.vcf").expect("Error opening file.");
// The writer goes into its own block so we can ensure that the file is closed and
// all data is written below.
{
let mut writer = Writer::from_path(
&out_path,
&Header::from_template(&vcf.header()),
true,
Format::VCF,
)
.expect("Error opening file.");
let header = writer.header().clone();
// Setup empty record, filled below.
let mut record = writer.empty_record();
record.set_rid(Some(0));
assert_eq!(record.rid().unwrap(), 0);
record.set_pos(12);
assert_eq!(record.pos(), 12);
assert_eq!(str::from_utf8(record.id().as_ref()).unwrap(), ".");
record.set_id(b"to_be_cleared").unwrap();
assert_eq!(
str::from_utf8(record.id().as_ref()).unwrap(),
"to_be_cleared"
);
record.clear_id().unwrap();
assert_eq!(str::from_utf8(record.id().as_ref()).unwrap(), ".");
record.set_id(b"first_id").unwrap();
record.push_id(b"second_id").unwrap();
record.push_id(b"first_id").unwrap();
assert!(record.filters().next().is_none());
record.set_filters(&[header.name_to_id(b"q10").unwrap()]);
record.push_filter(header.name_to_id(b"s50").unwrap());
record.remove_filter(header.name_to_id(b"q10").unwrap(), true);
record.push_filter(header.name_to_id(b"q10").unwrap());
record.set_alleles(&[b"C", b"T", b"G"]).unwrap();
record.set_qual(10.0);
record.push_info_integer(b"N1", &[32]).unwrap();
record.push_info_float(b"F1", &[33.0]).unwrap();
record.push_info_string(b"S1", &[b"fourtytwo"]).unwrap();
record.push_info_flag(b"X1").unwrap();
record
.push_genotypes(&[
GenotypeAllele::Unphased(0),
GenotypeAllele::Unphased(1),
GenotypeAllele::Unphased(1),
GenotypeAllele::Phased(1),
])
.unwrap();
record
.push_format_string(b"FS1", &[&b"yes"[..], &b"no"[..]])
.unwrap();
record.push_format_integer(b"FF1", &[43, 11]).unwrap();
record.push_format_float(b"FN1", &[42.0, 10.0]).unwrap();
record
.push_format_char(b"CH1", &[b"A"[0], b"B"[0]])
.unwrap();
// Finally, write out the record.
writer.write(&record).unwrap();
}
// Now, compare expected and real output.
let expected = read_all("test/test_various.out.vcf");
let actual = read_all(&out_path);
assert_eq!(expected, actual);
}
#[test]
fn test_remove_headers() {
let vcf = Reader::from_path(&"test/test_headers.vcf").expect("Error opening file.");
let tmp = tempfile::Builder::new()
.prefix("rust-htslib")
.tempdir()
.expect("Cannot create temp dir");
let vcfpath = tmp.path().join("test.vcf");
let mut header = Header::from_template(&vcf.header);
header
.remove_contig(b"contig2")
.remove_info(b"INFO2")
.remove_format(b"FORMAT2")
.remove_filter(b"FILTER2")
.remove_structured(b"Foo2")
.remove_generic(b"Bar2");
{
let mut _writer = Writer::from_path(&vcfpath, &header, true, Format::VCF)
.expect("Error opening output file.");
// Note that we don't need to write anything, we are just looking at the header.
}
let expected = read_all("test/test_headers.out.vcf");
let actual = read_all(&vcfpath);
assert_eq!(expected, actual);
}
#[test]
fn test_synced_reader() {
let mut reader = synced::SyncedReader::new().unwrap();
reader.set_require_index(true);
reader.set_pairing(synced::pairing::SNPS);
assert_eq!(reader.reader_count(), 0);
reader.add_reader(&"test/test_left.vcf.gz").unwrap();
reader.add_reader(&"test/test_right.vcf.gz").unwrap();
assert_eq!(reader.reader_count(), 2);
let res1 = reader.read_next();
assert_eq!(res1.unwrap(), 2);
assert!(reader.has_line(0));
assert!(reader.has_line(1));
let res2 = reader.read_next();
assert_eq!(res2.unwrap(), 1);
assert!(reader.has_line(0));
assert!(!reader.has_line(1));
let res3 = reader.read_next();
assert_eq!(res3.unwrap(), 1);
assert!(!reader.has_line(0));
assert!(reader.has_line(1));
let res4 = reader.read_next();
assert_eq!(res4.unwrap(), 0);
}
#[test]
fn test_synced_reader_fetch() {
let mut reader = synced::SyncedReader::new().unwrap();
reader.set_require_index(true);
reader.set_pairing(synced::pairing::SNPS);
assert_eq!(reader.reader_count(), 0);
reader.add_reader(&"test/test_left.vcf.gz").unwrap();
reader.add_reader(&"test/test_right.vcf.gz").unwrap();
assert_eq!(reader.reader_count(), 2);
reader.fetch(0, 0, 1000).unwrap();
let res1 = reader.read_next();
assert_eq!(res1.unwrap(), 2);
assert!(reader.has_line(0));
assert!(reader.has_line(1));
let res2 = reader.read_next();
assert_eq!(res2.unwrap(), 1);
assert!(reader.has_line(0));
assert!(!reader.has_line(1));
let res3 = reader.read_next();
assert_eq!(res3.unwrap(), 1);
assert!(!reader.has_line(0));
assert!(reader.has_line(1));
let res4 = reader.read_next();
assert_eq!(res4.unwrap(), 0);
}
#[test]
fn test_svlen() {
let mut reader = Reader::from_path("test/test_svlen.vcf").unwrap();
let mut record = reader.empty_record();
reader.read(&mut record).unwrap().unwrap();
assert_eq!(
*record.info(b"SVLEN").integer().unwrap().unwrap(),
&[-127][..]
);
}
#[test]
fn test_fails_on_bam() {
let reader = Reader::from_path("test/test.bam");
assert!(reader.is_err());
}
#[test]
fn test_fails_on_non_existiant() {
let reader = Reader::from_path("test/no_such_file");
assert!(reader.is_err());
}
#[test]
fn test_multi_string_info_tag() {
let mut reader = Reader::from_path("test/test-info-multi-string.vcf").unwrap();
let mut rec = reader.empty_record();
let _ = reader.read(&mut rec);
assert_eq!(
rec.info_shared_buffer(b"ANN", Buffer::new())
.string()
.unwrap()
.unwrap()
.len(),
14
);
}
#[test]
fn test_multi_string_info_tag_number_a() {
let mut reader = Reader::from_path("test/test-info-multi-string-number=A.vcf").unwrap();
let mut rec = reader.empty_record();
let _ = reader.read(&mut rec);
assert_eq!(
rec.info_shared_buffer(b"X", Buffer::new())
.string()
.unwrap()
.unwrap()
.len(),
2
);
}
#[test]
fn test_genotype_allele_conversion() {
let allele = GenotypeAllele::Unphased(1);
let converted: i32 = allele.into();
let expected = 4;
assert_eq!(converted, expected);
}
#[test]
fn test_genotype_missing_allele_conversion() {
let allele = GenotypeAllele::PhasedMissing;
let converted: i32 = allele.into();
let expected = 1;
assert_eq!(converted, expected);
}
| let mut bcf = Reader::from_path(path).expect("Error opening file.");
let _header = bcf.header();
// FORMAT fields of first record of the vcf should look like:
// GT:FS1:FN1 ./1:LongString1:1 1/1:ss1:2
let mut first_record = bcf.records().next().unwrap().expect("Fail to read record");
let sample_count = usize::try_from(first_record.sample_count()).unwrap();
assert_eq!(sample_count, 2);
let mut n_ref = vec![0; sample_count];
let mut n_alt = vec![0; sample_count];
let mut n_missing = vec![0; sample_count];
let gts = first_record.genotypes().expect("Error reading genotypes");
for sample_index in 0..sample_count {
// for each sample
for gta in gts.get(sample_index).iter() {
// for each allele
match gta.index() {
Some(0) => n_ref[sample_index] += 1, // reference allele
Some(_) => n_alt[sample_index] += 1, // alt allele
None => n_missing[sample_index] += 1, // missing allele
}
}
}
assert_eq!(n_ref, [0, 0]);
assert_eq!(n_alt, [1, 2]);
assert_eq!(n_missing, [1, 0]);
}
#[test]
fn test_obs_cornercase() {
let mut reader = Reader::from_path("test/obs-cornercase.vcf").unwrap();
let first_record = reader
.records()
.next()
.unwrap()
.expect("Fail to read record");
assert_eq!(
*first_record.info(b"EVENT").string().unwrap().unwrap(),
[b"gridss33fb_1085"]
);
assert_eq!(
*first_record.info(b"MATEID").string().unwrap().unwrap(),
[b"gridss33fb_1085h"]
);
}
// #[test]
// fn test_buffer_lifetime() {
// let mut reader = Reader::from_path("test/obs-cornercase.vcf").unwrap();
// let first_record = reader
// .records()
// .next()
// .unwrap()
// .expect("Fail to read record");
// fn get_value<'a, 'b>(record: &'a Record) -> &'b [u8] {
// // FIXME: this should not be possible, because the slice outlives the buffer.
// let buffer: BufferBacked<'b, _, _> = record.info(b"EVENT").string().unwrap().unwrap();
// let value: &'b [u8] = buffer[0];
// value
// }
// let buffered = first_record.info(b"EVENT").string().unwrap().unwrap();
// assert_eq!(get_value(&first_record), buffered[0]);
// }
} | #[test]
fn test_alt_allele_dosage() {
let path = &"test/test_string.vcf"; |
hstier.rs | #[doc = "Writer for register HSTIER"]
pub type W = crate::W<u32, super::HSTIER>;
#[doc = "Write proxy for field `DCONNIES`"]
pub struct DCONNIES_W<'a> { w: &'a mut W }
impl<'a> DCONNIES_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W { self.bit(true) }
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W { self.bit(false) }
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Write proxy for field `DDISCIES`"]
pub struct DDISCIES_W<'a> { w: &'a mut W }
impl<'a> DDISCIES_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn | (self) -> &'a mut W { self.bit(true) }
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W { self.bit(false) }
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Write proxy for field `RSTIES`"]
pub struct RSTIES_W<'a> { w: &'a mut W }
impl<'a> RSTIES_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W { self.bit(true) }
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W { self.bit(false) }
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Write proxy for field `RSMEDIES`"]
pub struct RSMEDIES_W<'a> { w: &'a mut W }
impl<'a> RSMEDIES_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W { self.bit(true) }
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W { self.bit(false) }
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "Write proxy for field `RXRSMIES`"]
pub struct RXRSMIES_W<'a> { w: &'a mut W }
impl<'a> RXRSMIES_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W { self.bit(true) }
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W { self.bit(false) }
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Write proxy for field `HSOFIES`"]
pub struct HSOFIES_W<'a> { w: &'a mut W }
impl<'a> HSOFIES_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W { self.bit(true) }
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W { self.bit(false) }
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
#[doc = "Write proxy for field `HWUPIES`"]
pub struct HWUPIES_W<'a> { w: &'a mut W }
impl<'a> HWUPIES_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W { self.bit(true) }
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W { self.bit(false) }
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);
self.w
}
}
#[doc = "Write proxy for field `PEP_0`"]
pub struct PEP_0_W<'a> { w: &'a mut W }
impl<'a> PEP_0_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W { self.bit(true) }
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W { self.bit(false) }
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);
self.w
}
}
#[doc = "Write proxy for field `PEP_1`"]
pub struct PEP_1_W<'a> { w: &'a mut W }
impl<'a> PEP_1_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W { self.bit(true) }
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W { self.bit(false) }
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);
self.w
}
}
#[doc = "Write proxy for field `PEP_2`"]
pub struct PEP_2_W<'a> { w: &'a mut W }
impl<'a> PEP_2_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W { self.bit(true) }
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W { self.bit(false) }
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);
self.w
}
}
#[doc = "Write proxy for field `PEP_3`"]
pub struct PEP_3_W<'a> { w: &'a mut W }
impl<'a> PEP_3_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W { self.bit(true) }
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W { self.bit(false) }
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);
self.w
}
}
#[doc = "Write proxy for field `PEP_4`"]
pub struct PEP_4_W<'a> { w: &'a mut W }
impl<'a> PEP_4_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W { self.bit(true) }
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W { self.bit(false) }
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);
self.w
}
}
#[doc = "Write proxy for field `PEP_5`"]
pub struct PEP_5_W<'a> { w: &'a mut W }
impl<'a> PEP_5_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W { self.bit(true) }
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W { self.bit(false) }
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13);
self.w
}
}
#[doc = "Write proxy for field `PEP_6`"]
pub struct PEP_6_W<'a> { w: &'a mut W }
impl<'a> PEP_6_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W { self.bit(true) }
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W { self.bit(false) }
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 14)) | (((value as u32) & 0x01) << 14);
self.w
}
}
#[doc = "Write proxy for field `PEP_7`"]
pub struct PEP_7_W<'a> { w: &'a mut W }
impl<'a> PEP_7_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W { self.bit(true) }
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W { self.bit(false) }
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 15)) | (((value as u32) & 0x01) << 15);
self.w
}
}
#[doc = "Write proxy for field `PEP_8`"]
pub struct PEP_8_W<'a> { w: &'a mut W }
impl<'a> PEP_8_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W { self.bit(true) }
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W { self.bit(false) }
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16);
self.w
}
}
#[doc = "Write proxy for field `PEP_9`"]
pub struct PEP_9_W<'a> { w: &'a mut W }
impl<'a> PEP_9_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W { self.bit(true) }
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W { self.bit(false) }
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17);
self.w
}
}
#[doc = "Write proxy for field `DMA_1`"]
pub struct DMA_1_W<'a> { w: &'a mut W }
impl<'a> DMA_1_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W { self.bit(true) }
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W { self.bit(false) }
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 25)) | (((value as u32) & 0x01) << 25);
self.w
}
}
#[doc = "Write proxy for field `DMA_2`"]
pub struct DMA_2_W<'a> { w: &'a mut W }
impl<'a> DMA_2_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W { self.bit(true) }
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W { self.bit(false) }
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 26)) | (((value as u32) & 0x01) << 26);
self.w
}
}
#[doc = "Write proxy for field `DMA_3`"]
pub struct DMA_3_W<'a> { w: &'a mut W }
impl<'a> DMA_3_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W { self.bit(true) }
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W { self.bit(false) }
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 27)) | (((value as u32) & 0x01) << 27);
self.w
}
}
#[doc = "Write proxy for field `DMA_4`"]
pub struct DMA_4_W<'a> { w: &'a mut W }
impl<'a> DMA_4_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W { self.bit(true) }
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W { self.bit(false) }
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 28)) | (((value as u32) & 0x01) << 28);
self.w
}
}
#[doc = "Write proxy for field `DMA_5`"]
pub struct DMA_5_W<'a> { w: &'a mut W }
impl<'a> DMA_5_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W { self.bit(true) }
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W { self.bit(false) }
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 29)) | (((value as u32) & 0x01) << 29);
self.w
}
}
#[doc = "Write proxy for field `DMA_6`"]
pub struct DMA_6_W<'a> { w: &'a mut W }
impl<'a> DMA_6_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W { self.bit(true) }
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W { self.bit(false) }
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30);
self.w
}
}
impl W {
#[doc = "Bit 0 - Device Connection Interrupt Enable"]
#[inline(always)]
pub fn dconnies(&mut self) -> DCONNIES_W { DCONNIES_W { w: self } }
#[doc = "Bit 1 - Device Disconnection Interrupt Enable"]
#[inline(always)]
pub fn ddiscies(&mut self) -> DDISCIES_W { DDISCIES_W { w: self } }
#[doc = "Bit 2 - USB Reset Sent Interrupt Enable"]
#[inline(always)]
pub fn rsties(&mut self) -> RSTIES_W { RSTIES_W { w: self } }
#[doc = "Bit 3 - Downstream Resume Sent Interrupt Enable"]
#[inline(always)]
pub fn rsmedies(&mut self) -> RSMEDIES_W { RSMEDIES_W { w: self } }
#[doc = "Bit 4 - Upstream Resume Received Interrupt Enable"]
#[inline(always)]
pub fn rxrsmies(&mut self) -> RXRSMIES_W { RXRSMIES_W { w: self } }
#[doc = "Bit 5 - Host Start of Frame Interrupt Enable"]
#[inline(always)]
pub fn hsofies(&mut self) -> HSOFIES_W { HSOFIES_W { w: self } }
#[doc = "Bit 6 - Host Wake-Up Interrupt Enable"]
#[inline(always)]
pub fn hwupies(&mut self) -> HWUPIES_W { HWUPIES_W { w: self } }
#[doc = "Bit 8 - Pipe 0 Interrupt Enable"]
#[inline(always)]
pub fn pep_0(&mut self) -> PEP_0_W { PEP_0_W { w: self } }
#[doc = "Bit 9 - Pipe 1 Interrupt Enable"]
#[inline(always)]
pub fn pep_1(&mut self) -> PEP_1_W { PEP_1_W { w: self } }
#[doc = "Bit 10 - Pipe 2 Interrupt Enable"]
#[inline(always)]
pub fn pep_2(&mut self) -> PEP_2_W { PEP_2_W { w: self } }
#[doc = "Bit 11 - Pipe 3 Interrupt Enable"]
#[inline(always)]
pub fn pep_3(&mut self) -> PEP_3_W { PEP_3_W { w: self } }
#[doc = "Bit 12 - Pipe 4 Interrupt Enable"]
#[inline(always)]
pub fn pep_4(&mut self) -> PEP_4_W { PEP_4_W { w: self } }
#[doc = "Bit 13 - Pipe 5 Interrupt Enable"]
#[inline(always)]
pub fn pep_5(&mut self) -> PEP_5_W { PEP_5_W { w: self } }
#[doc = "Bit 14 - Pipe 6 Interrupt Enable"]
#[inline(always)]
pub fn pep_6(&mut self) -> PEP_6_W { PEP_6_W { w: self } }
#[doc = "Bit 15 - Pipe 7 Interrupt Enable"]
#[inline(always)]
pub fn pep_7(&mut self) -> PEP_7_W { PEP_7_W { w: self } }
#[doc = "Bit 16 - Pipe 8 Interrupt Enable"]
#[inline(always)]
pub fn pep_8(&mut self) -> PEP_8_W { PEP_8_W { w: self } }
#[doc = "Bit 17 - Pipe 9 Interrupt Enable"]
#[inline(always)]
pub fn pep_9(&mut self) -> PEP_9_W { PEP_9_W { w: self } }
#[doc = "Bit 25 - DMA Channel 1 Interrupt Enable"]
#[inline(always)]
pub fn dma_1(&mut self) -> DMA_1_W { DMA_1_W { w: self } }
#[doc = "Bit 26 - DMA Channel 2 Interrupt Enable"]
#[inline(always)]
pub fn dma_2(&mut self) -> DMA_2_W { DMA_2_W { w: self } }
#[doc = "Bit 27 - DMA Channel 3 Interrupt Enable"]
#[inline(always)]
pub fn dma_3(&mut self) -> DMA_3_W { DMA_3_W { w: self } }
#[doc = "Bit 28 - DMA Channel 4 Interrupt Enable"]
#[inline(always)]
pub fn dma_4(&mut self) -> DMA_4_W { DMA_4_W { w: self } }
#[doc = "Bit 29 - DMA Channel 5 Interrupt Enable"]
#[inline(always)]
pub fn dma_5(&mut self) -> DMA_5_W { DMA_5_W { w: self } }
#[doc = "Bit 30 - DMA Channel 6 Interrupt Enable"]
#[inline(always)]
pub fn dma_6(&mut self) -> DMA_6_W { DMA_6_W { w: self } }
} | set_bit |
interactive_map.py | import pygame
import math
import glob
import os
tilesize = 128 # pixels per tile
def tiletosurface(tile): | pass
def maptosurface(sx,sy,ex,ey,oholmap):
pass
def main(windowsize,tilepipe,OHOLMap):
wt = math.floor(windowsize/tilesize)
cx,cy,first = 0,0,True
if OHOLMap.data != {}:
for x in OHOLMap.data:
for y in OHOLMap.data[x]:
if not first:
break
cx,cy = x,y
first = False
print("Loading sprites")
sprites = glob.glob("./OneLifeData/sprites/*.tga")
loadedsprites = {}
print("Found {} sprites, loading...".format(len(sprites)))
for sprite in sprites:
spriteid = os.path.basename(sprite).split(".")[0]
loadedsprites[spriteid] = pygame.image.load(sprite)
# do other loading things...
tilepipe.send("READY")
# main loop goes here | |
build_data.rs | //! Handles build script specific information
use std::{
io::BufReader,
path::PathBuf,
process::{Command, Stdio},
sync::Arc,
};
use anyhow::Result;
use cargo_metadata::camino::Utf8Path;
use cargo_metadata::{BuildScript, Message};
use itertools::Itertools;
use paths::{AbsPath, AbsPathBuf};
use rustc_hash::FxHashMap;
use stdx::JodChild;
use crate::{cfg_flag::CfgFlag, CargoConfig};
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub(crate) struct BuildData {
/// List of config flags defined by this package's build script
pub(crate) cfgs: Vec<CfgFlag>,
/// List of cargo-related environment variables with their value
///
/// If the package has a build script which defines environment variables,
/// they can also be found here.
pub(crate) envs: Vec<(String, String)>,
/// Directory where a build script might place its output
pub(crate) out_dir: Option<AbsPathBuf>,
/// Path to the proc-macro library file if this package exposes proc-macros
pub(crate) proc_macro_dylib_path: Option<AbsPathBuf>,
}
#[derive(Clone, Debug)]
pub(crate) struct BuildDataConfig {
cargo_toml: AbsPathBuf,
cargo_features: CargoConfig,
packages: Arc<Vec<cargo_metadata::Package>>,
}
impl PartialEq for BuildDataConfig {
fn eq(&self, other: &Self) -> bool {
Arc::ptr_eq(&self.packages, &other.packages)
}
}
impl Eq for BuildDataConfig {}
#[derive(Debug, Default)]
pub struct BuildDataCollector {
configs: FxHashMap<AbsPathBuf, BuildDataConfig>,
}
#[derive(Debug, Default, PartialEq, Eq)]
pub struct BuildDataResult {
data: FxHashMap<AbsPathBuf, BuildDataMap>,
}
pub(crate) type BuildDataMap = FxHashMap<String, BuildData>;
impl BuildDataCollector {
pub(crate) fn add_config(&mut self, workspace_root: &AbsPath, config: BuildDataConfig) {
self.configs.insert(workspace_root.to_path_buf(), config);
}
pub fn collect(&mut self, progress: &dyn Fn(String)) -> Result<BuildDataResult> {
let mut res = BuildDataResult::default();
for (path, config) in self.configs.iter() {
res.data.insert(
path.clone(),
collect_from_workspace(
&config.cargo_toml,
&config.cargo_features,
&config.packages,
progress,
)?,
);
}
Ok(res)
}
}
impl BuildDataResult {
pub(crate) fn get(&self, root: &AbsPath) -> Option<&BuildDataMap> {
self.data.get(&root.to_path_buf())
}
}
impl BuildDataConfig {
pub(crate) fn | (
cargo_toml: AbsPathBuf,
cargo_features: CargoConfig,
packages: Arc<Vec<cargo_metadata::Package>>,
) -> Self {
Self { cargo_toml, cargo_features, packages }
}
}
fn collect_from_workspace(
cargo_toml: &AbsPath,
cargo_features: &CargoConfig,
packages: &Vec<cargo_metadata::Package>,
progress: &dyn Fn(String),
) -> Result<BuildDataMap> {
let mut cmd = Command::new(toolchain::cargo());
cmd.args(&["check", "--workspace", "--message-format=json", "--manifest-path"])
.arg(cargo_toml.as_ref());
// --all-targets includes tests, benches and examples in addition to the
// default lib and bins. This is an independent concept from the --targets
// flag below.
cmd.arg("--all-targets");
if let Some(target) = &cargo_features.target {
cmd.args(&["--target", target]);
}
if cargo_features.all_features {
cmd.arg("--all-features");
} else {
if cargo_features.no_default_features {
// FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures`
// https://github.com/oli-obk/cargo_metadata/issues/79
cmd.arg("--no-default-features");
}
if !cargo_features.features.is_empty() {
cmd.arg("--features");
cmd.arg(cargo_features.features.join(" "));
}
}
cmd.stdout(Stdio::piped()).stderr(Stdio::null()).stdin(Stdio::null());
let mut child = cmd.spawn().map(JodChild)?;
let child_stdout = child.stdout.take().unwrap();
let stdout = BufReader::new(child_stdout);
let mut res = BuildDataMap::default();
for message in cargo_metadata::Message::parse_stream(stdout).flatten() {
match message {
Message::BuildScriptExecuted(BuildScript {
package_id, out_dir, cfgs, env, ..
}) => {
let cfgs = {
let mut acc = Vec::new();
for cfg in cfgs {
match cfg.parse::<CfgFlag>() {
Ok(it) => acc.push(it),
Err(err) => {
anyhow::bail!("invalid cfg from cargo-metadata: {}", err)
}
};
}
acc
};
let res = res.entry(package_id.repr.clone()).or_default();
// cargo_metadata crate returns default (empty) path for
// older cargos, which is not absolute, so work around that.
if !out_dir.as_str().is_empty() {
let out_dir = AbsPathBuf::assert(PathBuf::from(out_dir.into_os_string()));
res.out_dir = Some(out_dir);
res.cfgs = cfgs;
}
res.envs = env;
}
Message::CompilerArtifact(message) => {
progress(format!("metadata {}", message.target.name));
if message.target.kind.contains(&"proc-macro".to_string()) {
let package_id = message.package_id;
// Skip rmeta file
if let Some(filename) = message.filenames.iter().find(|name| is_dylib(name)) {
let filename = AbsPathBuf::assert(PathBuf::from(&filename));
let res = res.entry(package_id.repr.clone()).or_default();
res.proc_macro_dylib_path = Some(filename);
}
}
}
Message::CompilerMessage(message) => {
progress(message.target.name.clone());
}
Message::BuildFinished(_) => {}
Message::TextLine(_) => {}
_ => {}
}
}
for package in packages {
let build_data = res.entry(package.id.repr.clone()).or_default();
inject_cargo_env(package, build_data);
if let Some(out_dir) = &build_data.out_dir {
// NOTE: cargo and rustc seem to hide non-UTF-8 strings from env! and option_env!()
if let Some(out_dir) = out_dir.to_str().map(|s| s.to_owned()) {
build_data.envs.push(("OUT_DIR".to_string(), out_dir));
}
}
}
Ok(res)
}
// FIXME: File a better way to know if it is a dylib
fn is_dylib(path: &Utf8Path) -> bool {
match path.extension().map(|e| e.to_string().to_lowercase()) {
None => false,
Some(ext) => matches!(ext.as_str(), "dll" | "dylib" | "so"),
}
}
/// Recreates the compile-time environment variables that Cargo sets.
///
/// Should be synced with <https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-crates>
fn inject_cargo_env(package: &cargo_metadata::Package, build_data: &mut BuildData) {
let env = &mut build_data.envs;
// FIXME: Missing variables:
// CARGO_PKG_HOMEPAGE, CARGO_CRATE_NAME, CARGO_BIN_NAME, CARGO_BIN_EXE_<name>
let mut manifest_dir = package.manifest_path.clone();
manifest_dir.pop();
env.push(("CARGO_MANIFEST_DIR".into(), manifest_dir.into_string()));
// Not always right, but works for common cases.
env.push(("CARGO".into(), "cargo".into()));
env.push(("CARGO_PKG_VERSION".into(), package.version.to_string()));
env.push(("CARGO_PKG_VERSION_MAJOR".into(), package.version.major.to_string()));
env.push(("CARGO_PKG_VERSION_MINOR".into(), package.version.minor.to_string()));
env.push(("CARGO_PKG_VERSION_PATCH".into(), package.version.patch.to_string()));
let pre = package.version.pre.iter().map(|id| id.to_string()).format(".");
env.push(("CARGO_PKG_VERSION_PRE".into(), pre.to_string()));
let authors = package.authors.join(";");
env.push(("CARGO_PKG_AUTHORS".into(), authors));
env.push(("CARGO_PKG_NAME".into(), package.name.clone()));
env.push(("CARGO_PKG_DESCRIPTION".into(), package.description.clone().unwrap_or_default()));
//env.push(("CARGO_PKG_HOMEPAGE".into(), package.homepage.clone().unwrap_or_default()));
env.push(("CARGO_PKG_REPOSITORY".into(), package.repository.clone().unwrap_or_default()));
env.push(("CARGO_PKG_LICENSE".into(), package.license.clone().unwrap_or_default()));
let license_file = package.license_file.as_ref().map(|buf| buf.to_string()).unwrap_or_default();
env.push(("CARGO_PKG_LICENSE_FILE".into(), license_file));
}
| new |
tnt-Spot-SuccessfulAuth.js | sap.ui.define(function () { 'use strict';
var spotSvg = `<svg width="128" height="128" viewBox="0 0 128 128" id="tnt-Spot-SuccessfulAuth">
<path fill="var(--sapIllus_PatternShadow)" d="M109.9558,33.5663 L108.9998,33.5663 L108.9998,41.5163 L108.9998,44.5593 L108.9998,95.5663 C108.9998,97.2223 107.6568,98.5663 105.9998,98.5663 L23.0438,98.5663 L23.0448,100.5653 C23.0448,102.2223 24.3878,103.5663 26.0438,103.5663 L109.9998,103.5663 C111.6568,103.5663 112.9998,102.2223 112.9998,100.5663 L112.9998,36.5663 C112.9998,34.9093 111.6568,33.5663 109.9998,33.5663 L109.9558,33.5663 Z" class="sapIllus_PatternShadow"/>
<path fill="var(--sapIllus_ObjectFillColor)" d="M105.999,99 L22,99 C20.343,99 19,97.657 19,96 L19,32.001 C19,30.344 20.343,29 22,29 L105.999,29 C107.656,29 109,30.344 109,32.001 L109,96 C109,97.657 107.656,99 105.999,99" class="sapIllus_ObjectFillColor"/> | <path fill="var(--sapIllus_BrandColorSecondary)" d="M109,42 L19,42 L19,31.969 C19,30.329 20.329,29 21.968,29 L106.005,29 C107.659,29 109,30.342 109,31.996 L109,42 Z" class="sapIllus_BrandColorSecondary"/>
<path fill="var(--sapIllus_StrokeDetailColor)" d="M106,98 L22,98 C20.896,98 20,97.105 20,96 L20,42 L21.821,42 L108,42 L108,96 C108,97.105 107.104,98 106,98 M108,32 L108,41 L20,41 L20,32 C20,30.896 20.896,30 22,30 L106,30 C107.104,30 108,30.896 108,32 M106,29 L105.526,29 L21.59,29 L21.544,29 C19.817,29 19,30.692 19,31.897 L19,42 L19,96 C19,97.657 20.343,99 22,99 L106,99 C107.657,99 109,97.657 109,96 L109,44.994 L109,41.951 L109,32 C109,30.344 107.657,29 106,29" class="sapIllus_StrokeDetailColor"/>
<path fill="var(--sapIllus_ObjectFillColor)" d="M28.5332 35.5C28.5332 36.574 27.6622 37.445 26.5882 37.445 25.5142 37.445 24.6432 36.574 24.6432 35.5 24.6432 34.427 25.5142 33.556 26.5882 33.556 27.6622 33.556 28.5332 34.427 28.5332 35.5M34.5142 35.5C34.5142 36.574 33.6432 37.445 32.5692 37.445 31.4952 37.445 30.6242 36.574 30.6242 35.5 30.6242 34.427 31.4952 33.556 32.5692 33.556 33.6432 33.556 34.5142 34.427 34.5142 35.5M40.4956 35.5C40.4956 36.574 39.6246 37.445 38.5506 37.445 37.4766 37.445 36.6056 36.574 36.6056 35.5 36.6056 34.427 37.4766 33.556 38.5506 33.556 39.6246 33.556 40.4956 34.427 40.4956 35.5" class="sapIllus_ObjectFillColor"/>
<path fill="var(--sapIllus_AccentColor)" d="M49.9136,72.7959 C47.2576,72.7959 45.1036,70.6439 45.1036,67.9869 C45.1036,65.3309 47.2576,63.1779 49.9136,63.1779 C52.5696,63.1779 54.7236,65.3309 54.7236,67.9869 C54.7236,70.6439 52.5696,72.7959 49.9136,72.7959 M86.0136,65.6869 L59.6786,65.6869 C58.6076,61.2389 54.6136,57.9309 49.8376,57.9309 C44.2416,57.9309 39.7056,62.4679 39.7056,68.0639 C39.7056,73.6589 44.2416,78.1949 49.8376,78.1949 C54.6826,78.1949 58.7266,74.7929 59.7266,70.2489 L74.1896,70.2489 L74.1896,75.3119 C74.1896,75.5249 74.3626,75.6979 74.5766,75.6979 L77.5036,75.6979 C77.7196,75.6979 77.8926,75.5229 77.8906,75.3079 L77.8876,74.0089 C77.8846,73.7939 78.0586,73.6179 78.2736,73.6179 L81.1726,73.6179 C81.3866,73.6179 81.5596,73.7909 81.5596,74.0049 L81.5596,75.3119 C81.5596,75.5249 81.7326,75.6979 81.9456,75.6979 L84.8026,75.6979 C85.0166,75.6979 85.1896,75.5249 85.1896,75.3119 L85.1896,70.2489 L86.0136,70.2489 C87.2736,70.2489 88.2946,69.2279 88.2946,67.9679 C88.2946,66.7079 87.2736,65.6869 86.0136,65.6869" class="sapIllus_AccentColor"/>
<path fill="var(--sapIllus_BrandColorSecondary)" d="M16.5438 20.0267C13.6508 18.8867 12.2478 16.1177 11.1618 13.2957 11.0968 13.1207 10.9308 13.0037 10.7438 13.0007L10.7388 13.0007C10.5538 13.0017 10.3878 13.1137 10.3188 13.2857 8.8718 16.7937 6.7188 18.5877 4.2268 20.6637L4.1688 20.7127C4.0318 20.8267 3.9718 21.0087 4.0128 21.1827 4.0528 21.3557 4.1888 21.4907 4.3618 21.5287 6.5608 22.0057 9.9628 26.2997 9.9628 28.5977 9.9608 28.8277 10.1268 29.0237 10.3538 29.0597 10.3758 29.0627 10.3968 29.0637 10.4188 29.0637 10.6228 29.0627 10.8008 28.9257 10.8558 28.7297 11.5858 26.1857 13.7028 22.0027 16.5428 20.8977 16.7838 20.7997 16.8998 20.5257 16.8018 20.2857 16.7548 20.1687 16.6618 20.0747 16.5438 20.0267zM123.3153 100.8734C121.7803 100.2464 121.0283 98.7004 120.4423 97.1234 120.3823 96.9514 120.2223 96.8344 120.0393 96.8284L120.0343 96.8284C119.8543 96.8314 119.6933 96.9444 119.6293 97.1134 118.9133 98.7694 117.7673 100.2034 116.3113 101.2694L116.2793 101.2984C116.1473 101.4154 116.0893 101.5964 116.1293 101.7694 116.1663 101.9384 116.2963 102.0734 116.4653 102.1144 117.5973 102.3684 119.4183 104.7514 119.4183 105.9774 119.4133 106.2024 119.5733 106.3974 119.7943 106.4374 119.8153 106.4404 119.8363 106.4424 119.8573 106.4424 120.0573 106.4384 120.2293 106.3024 120.2783 106.1084 120.6723 104.6864 121.8053 102.3524 123.3143 101.7434 123.5553 101.6374 123.6633 101.3554 123.5563 101.1144 123.5083 101.0084 123.4233 100.9224 123.3163 100.8744L123.3153 100.8734z" class="sapIllus_BrandColorSecondary"/>
</svg>`;
return spotSvg;
}); | |
807.js | const function807 = function (t, e, i) {
"use strict";
var n = this && this.__extends || function () {
var t = Object.setPrototypeOf || { __proto__: [] } instanceof Array && function (t, e) {
t.__proto__ = e
} || function (t, e) {
for (var i in e) e.hasOwnProperty(i) && (t[i] = e[i])
};
return function (e, i) {
function n() {
this.constructor = e
}
t(e, i), e.prototype = null === i ? Object.create(i) : (n.prototype = i.prototype, new n)
}
}();
Object.defineProperty(e, "__esModule", { value: !0 });
var o = i(5), r = i(19), s = i(8), a = i(40), _ = i(12), u = i(6), l = i(3), c = i(338), h = i(1),
p = function (t) {
function e(e, i) {
var n = t.call(this, e) || this;
return n._mainView = i, n._telopBG = new PIXI.Sprite, n._telopLineTop = new PIXI.Sprite, n._telopLineBottom = new PIXI.Sprite, n._telopText = new PIXI.Sprite, n._particles = new d, n
}
return n(e, t), e.prototype._03_link = function () {
this._04_showTelop()
}, e.prototype._04_showTelop = function () {
var t = this;
this._telopBG.texture = l.REMODEL_ANIMATION.getTexture(2), this._telopBG.alpha = .35, this._telopBG.anchor.set(.5), this._telopBG.position.set(o.default.width / 2, o.default.height / 2), this._telopBG.scale.y = 0, this.addChild(this._telopBG);
var e = l.REMODEL_ANIMATION.getTexture(3);
this._telopLineTop.texture = e, this._telopLineTop.anchor.set(.5), this._telopLineTop.position.set(o.default.width / 2, o.default.height / 2), this.addChild(this._telopLineTop), this._telopLineBottom.texture = e, this._telopLineBottom.anchor.set(.5), this._telopLineBottom.position.set(o.default.width / 2, o.default.height / 2), this.addChild(this._telopLineBottom);
var i, n = new r.TweenTask;
i = createjs.Tween.get(this._telopBG.scale).to({ y: 1 }, 300), n.addTween(i), i = createjs.Tween.get(this._telopLineTop).to({ y: o.default.height / 2 - 155 }, 300), n.addTween(i), i = createjs.Tween.get(this._telopLineBottom).to({ y: o.default.height / 2 + 155 }, 300), n.addTween(i), n.start(function () {
t._telopText.texture = l.REMODEL_ANIMATION.getTexture(9), t._telopText.x = o.default.width, t._telopText.y = Math.round(o.default.height / 2 - t._telopText.height / 2), t.addChild(t._telopText), createjs.Tween.get(t._telopText).to({ x: 178 }, 400).call(function () {
u.SE.play("226"), t.addChild(t._particles), t._particles.play()
}).to({ x: 63 }, 1700).call(function () {
t._05_waitClick()
})
})
}, e.prototype._05_waitClick = function () {
var t = this, e = new a.GearBtnHome;
e.initialize(), e.activate(), e.position.set(1140, 660), this.addChild(e);
var i = new s.AreaBox(0);
i.buttonMode = !0, this.addChild(i), i.once(h.EventType.CLICK, function () {
t.removeChild(i), t._06_whiteInOut(e)
})
}, e.prototype._06_whiteInOut = function (t) {
var e = this;
this._white.alpha = 0, this.addChild(this._white), createjs.Tween.get(this._white).to({ alpha: 1 }, 500).call(function () {
e.removeChild(e._background), e.removeChild(e._ship), e.removeChild(e._telopBG), e.removeChild(e._telopLineTop), e.removeChild(e._telopLineBottom), e.removeChild(e._telopText), e.removeChild(e._particles), e.removeChild(t), t.dispose(), e._mainView.visible = !0
}).to({ alpha: 0 }, 500).call(function () {
e.removeChild(e._white), null != e._cb_onComplete && e._cb_onComplete()
})
}, e.prototype.dispose = function () {
this.removeChildren(), t.prototype.dispose.call(this), this._telopBG = null, this._telopLineTop = null, this._telopLineBottom = null, this._telopText = null, this._particles.dispose(), this._particles = null
}, e
}(c.PowerUpAnimation);
e.PowerUpAnimationSuccess = p;
var d = function (t) {
function e() { | var e = t.call(this) || this;
e._particles = [];
for (var i = [[-459, -59], [-392, -74], [-332, 51], [-237, -89], [-158, 66], [-96, -54], [-8, 60], [39, -65], [141, -35], [239, 63], [308, -54], [420, 45]], n = l.REMODEL_ANIMATION.getTexture(8), o = 0; o < 12; o++) {
var r = new _.Sprite(n);
r.anchor.set(.5), r.scale.set(0), r.x = i[o][0] + 600, r.y = i[o][1] + 360, e.addChild(r), e._particles.push(r)
}
return e
}
return n(e, t), e.prototype.play = function () {
this._removeTweens(), this._tweens = [];
for (var t = 0; t < this._particles.length; t++) {
var e = this._particles[t];
e.scale.set(0);
var i = createjs.Tween.get(e).wait(100 * t).to({ scaleX: 1.5, scaleY: 1.5 }, 100).to({
scaleX: 0,
scaleY: 0
}, 100);
this._tweens.push(i)
}
}, e.prototype.dispose = function () {
this._removeTweens(), this.removeChildren(), this._particles = null
}, e.prototype._removeTweens = function () {
if (null != this._tweens) {
for (var t = 0, e = this._tweens; t < e.length; t++) {
e[t].setPaused(!0)
}
this._tweens = null
}
}, e
}(PIXI.Container)
} | |
rotate_useragent.py | # -*-coding:utf-8-*-
import logging
"""避免被ban策略之一:使用useragent池。
使用注意:需在settings.py中进行相应的设置。
"""
import random
from scrapy.downloadermiddlewares.useragent import UserAgentMiddleware
class RotateUserAgentMiddleware(UserAgentMiddleware):
def __init__(self, user_agent=''):
self.user_agent = user_agent
def process_request(self, request, spider):
ua = random.choice(self.user_agent_list)
if ua:
| ra,netscape
#for more user agent strings,you can find it in http://www.useragentstring.com/pages/useragentstring.php
user_agent_list = [
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 ",
"(KHTML, like Gecko) Chrome/22.0.1207.1 Safari/537.1",
"Mozilla/5.0 (X11; CrOS i686 2268.111.0) AppleWebKit/536.11 ",
"(KHTML, like Gecko) Chrome/20.0.1132.57 Safari/536.11",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 ",
"(KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.6 ",
"(KHTML, like Gecko) Chrome/20.0.1090.0 Safari/536.6",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1",
"(KHTML, like Gecko) Chrome/19.77.34.5 Safari/537.1",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/536.5",
"(KHTML, like Gecko) Chrome/19.0.1084.9 Safari/536.5",
"Mozilla/5.0 (Windows NT 6.0) AppleWebKit/536.5",
"(KHTML, like Gecko) Chrome/19.0.1084.36 Safari/536.5",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3",
"(KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/536.3",
"(KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/536.3",
"(KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3",
"(KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3",
"(KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3",
"(KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3",
"(KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/536.3",
"(KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3",
"(KHTML, like Gecko) Chrome/19.0.1061.0 Safari/536.3",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.24",
"(KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.24",
"(KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24",
]
| #显示当前使用的useragent
#print "********Current UserAgent:%s************" %ua
#记录
logging.log(msg='Current UserAgent: ' + ua, level=logging.DEBUG)
request.headers.setdefault('User-Agent', ua)
#the default user_agent_list composes chrome,I E,firefox,Mozilla,ope |
m1_run_this_on_robot.py | """
Capstone Project. Code to run on the EV3 robot (NOT on a laptop).
Author: Your professors (for the framework)
and Zhicheng Kai.
Winter term, 2018-2019.
"""
| import mqtt_remote_method_calls as com
import time
import shared_gui_delegate_on_robot
def main():
"""
This code, which must run on the EV3 ROBOT:
1. Makes the EV3 robot to various things.
2. Communicates via MQTT with the GUI code that runs on the LAPTOP.
"""
real_thing()
def real_thing():
robot = rosebot.RoseBot()
delegate = shared_gui_delegate_on_robot.Handler(robot)
mqtt_receiver = com.MqttClient(delegate)
robot.drive_system.mqtt_sender = mqtt_receiver
mqtt_receiver.connect_to_pc()
while True:
time.sleep(0.01)
if delegate.need_to_stop:
print('quit')
break
# -----------------------------------------------------------------------------
# Calls main to start the ball rolling.
# -----------------------------------------------------------------------------
main() | import rosebot |
astar-path-finding.rs | extern crate tcod;
use tcod::AStarPath;
fn create_path() -> AStarPath<'static> {
let chess_board: [[i32; 8]; 8] = [
[1, 0, 1, 0, 1, 0, 1, 0],
[0, 1, 0, 1, 0, 1, 0, 1],
[1, 0, 1, 0, 1, 0, 1, 0],
[0, 1, 0, 1, 0, 1, 0, 1],
[1, 0, 1, 0, 1, 0, 1, 0],
[0, 1, 0, 1, 0, 1, 0, 1],
[1, 0, 1, 0, 1, 0, 1, 0],
[0, 1, 0, 1, 0, 1, 0, 1],
];
// Movement like in Checkers: you can only move to the square of the same colour
let can_move = move |from: (i32, i32), to: (i32, i32)| -> f32 {
let (fx, fy) = from;
let (tx, ty) = to;
if chess_board[fy as usize][fx as usize] == chess_board[ty as usize][tx as usize] {
1.0
} else {
0.0 | };
AStarPath::new_from_callback(8, 8, can_move, 1.0)
}
fn main() {
let mut path = create_path();
assert_eq!(path.find((0, 0), (1, 1)), true);
assert_eq!(path.len(), 1);
assert_eq!(path.is_empty(), false);
assert_eq!(path.find((0, 0), (0, 1)), false);
assert_eq!(path.len(), 0);
assert_eq!(path.is_empty(), true);
assert_eq!(path.find((0, 0), (0, 6)), true);
assert_eq!(path.len(), 6);
assert_eq!(path.origin(), (0, 0));
assert_eq!(path.destination(), (0, 6));
// Explore the path:
assert_eq!(path.get(0), Some((1, 1)));
assert_eq!(path.get(1), Some((0, 2)));
assert_eq!(path.get(2), Some((1, 3)));
assert_eq!(path.get(3), Some((0, 4)));
assert_eq!(path.get(4), Some((1, 5)));
assert_eq!(path.get(5), Some((0, 6)));
// Make sure we don't segfault on invalid index
assert_eq!(path.get(-1), None);
assert_eq!(path.get(6), None);
assert_eq!(path.get(7), None);
// Walk the path (consuming it):
for pos in path.walk() {
println!("Walking to {:?}", pos);
}
assert_eq!(path.len(), 0);
assert_eq!(path.is_empty(), true);
// Note: origin has moved to the destination:
assert_eq!(path.origin(), (0, 6));
assert_eq!(path.destination(), (0, 6));
assert_eq!(path.get(0), None);
} | } |
operations.rs | #![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use crate::models::*;
pub mod query {
use crate::models::*;
pub async fn get(
operation_config: &crate::OperationConfig,
workspace_id: &str,
query: &str,
timespan: Option<&str>,
) -> std::result::Result<QueryResults, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/workspaces/{}/query", operation_config.base_path(), workspace_id);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("query", query);
if let Some(timespan) = timespan {
url.query_pairs_mut().append_pair("timespan", timespan);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: QueryResults =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn | (
operation_config: &crate::OperationConfig,
workspace_id: &str,
body: &QueryBody,
) -> std::result::Result<QueryResults, execute::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/workspaces/{}/query", operation_config.base_path(), workspace_id);
let mut url = url::Url::parse(url_str).map_err(execute::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(execute::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(execute::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(execute::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(execute::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: QueryResults =
serde_json::from_slice(rsp_body).map_err(|source| execute::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| execute::Error::DeserializeError(source, rsp_body.clone()))?;
Err(execute::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod execute {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod metadata {
use crate::models::*;
pub async fn get(operation_config: &crate::OperationConfig, workspace_id: &str) -> std::result::Result<MetadataResults, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/workspaces/{}/metadata", operation_config.base_path(), workspace_id);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: MetadataResults =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn post(operation_config: &crate::OperationConfig, workspace_id: &str) -> std::result::Result<MetadataResults, post::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/workspaces/{}/metadata", operation_config.base_path(), workspace_id);
let mut url = url::Url::parse(url_str).map_err(post::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(post::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(post::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(post::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: MetadataResults =
serde_json::from_slice(rsp_body).map_err(|source| post::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| post::Error::DeserializeError(source, rsp_body.clone()))?;
Err(post::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod post {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
| execute |
tipobrote.repository.ts | import { EntityRepository, Repository } from 'typeorm';
import { Evento } from '../entities/evento.entity';
import { TipoBrote } from '../entities/tipobrote.entity'; | @EntityRepository(Evento)
export class TipoBroteRepository extends Repository<TipoBrote> {} |
|
application_pc.py | # -*- coding: utf-8 -*-
"""
Created on Wed Dec 1 18:09:27 2021
@author: lukas
"""
import cv2
import dlib
import numpy as np
import timeit
import utils
import queue
import multiprocessing
import pathlib
import argparse
import time
def _create_parser():
parser = argparse.ArgumentParser()
parser.add_argument('--descriptor_file', type=str,
default='./saved_descriptors', help='path to descriptor file')
parser.add_argument('--threshold', type=float, default=0.6,
help='Threshold of euclidean distance to distinguish persons.')
parser.add_argument('--max_angle', type=float, default=4.0, help='maximum rotation angle of the face.')
parser.add_argument('--max_fps', type=float, default=5.0, help='maximum frame rate of the application.')
parser.add_argument(
'--cam_url', type=str,
default="http://10.0.0.241/zm/cgi-bin/nph-zms?mode=jpeg&monitor=2&maxfps=5&scale=100&user=admin&pass=admin",
help="IP camera url including username and password") | parser.add_argument('--faceDescriptor', type=str,
default="../dlib_models/dlib_face_recognition_resnet_model_v1.dat",
help="Path to dlibs face recognition model: dlib_face_recognition_resnet_model_v1.dat")
return parser.parse_args()
class StateMachine():
def __init__(self, url, predictorPath, facerecPath, descriptorFilePath,
threshold=0.6, maxFps: float = 5.0, imgSize: int = 384,
maxAngle: float = 4.0, MaxMovement=50.0, showLandmarks: bool = False) -> None:
predictor = dlib.shape_predictor(predictorPath)
detector = dlib.get_frontal_face_detector()
self.__decriptorHandler = utils.Descriptor_FileHandler(descriptorFilePath, threshold)
self.__minLatency = 1000.0/maxFps
self.__imgSize = imgSize
self.__maxAngle = maxAngle
self.__MaxMovement = MaxMovement
self.__showLandmarks = showLandmarks
self.__state = self.___waitForFace
self.__imgPos = np.zeros((4), dtype=np.int32)
self.__name = "processing.."
self.__faceDetected = False
self.__score = 0.0
self.__maxMissDetection = 2
self.__missDetections = 0
self.__ReqHeadRot = 8.0
self.__imgQueue = queue.Queue(maxsize=3)
self.__detectQueue = queue.Queue(maxsize=3)
self.__faceRecQueueIn = multiprocessing.Queue(maxsize=3)
self.__faceRecQueueOut = multiprocessing.Queue(maxsize=3)
self.__ImageWorker = utils.Image_loader(self.__imgQueue, url, imgSize,
maxFps)
self.__DetectionWorker = utils.Detector(self.__imgQueue, self.__detectQueue,
detector, predictor)
self.__FaceRecWorker = utils.FaceDecriptorProcess(self.__faceRecQueueIn, self.__faceRecQueueOut,
facerecPath)
self.__ImageWorker()
self.__DetectionWorker()
self.__FaceRecWorker()
def __del__(self) -> None:
return self.__FaceRecWorker.kill()
def __call__(self, key) -> np.ndarray:
# execute current state
return self.__state(key)
def __discardCurrentDescriptor(self) -> None:
# If Output Queue is empty -> Face descriptor computations are not done yet --> kill the process and restart it
self.__name == "processing.."
self.__faceDetected = False
self.__score = 0.0
if self.__faceRecQueueOut.empty():
self.__FaceRecWorker.kill()
self.__FaceRecWorker()
return
# If Output Queue is not empty -> Face descriptor computations are done --> discard the face descriptor in the Queue
_ = self.__faceRecQueueOut.get()
return
def ___waitForFace(self, key) -> np.ndarray:
rects, shapes, img = self.__detectQueue.get()
# Multiple persons
if len(rects) > 1:
outtext = "Error: Multiple faces detected."
utils.write_text_bottom(img, outtext, (0, 0, 255))
return img
# No Person
if len(rects) < 1:
outtext = "Error: No faces detected."
utils.write_text_bottom(img, outtext, (0, 0, 255))
return img
shape_np = utils.shape_to_np(shapes[0])
# Check face alignment
rot_angle = utils.get_angle(shape_np[27, :]-shape_np[30, :],
shape_np[27, :]-shape_np[33, :])
tilt_angle = utils.get_angle(shape_np[45, :]-shape_np[36, :],
np.array([1, 0]))
if np.abs(rot_angle) > self.__maxAngle or np.abs(tilt_angle) > self.__maxAngle:
outtext = "Look straight into the camera. Current rot angle: " + \
str(rot_angle) + " tilt angle: " + str(tilt_angle)
utils.write_text_bottom(img, outtext, (0, 127, 255))
return img
# start Computation of face descriptor
self.__faceRecQueueIn.put((shapes, img))
# draw rectangle
rect_np = utils.rect_to_np(rects[0], dtpye=np.int32) # convert dlib rectangle to numpy
img = utils.drawBoxAndName(img, rect_np, self.__name, self.__score)
# draw landmarks
if self.__showLandmarks:
for (px, py) in shape_np:
cv2.circle(img, (int(px), int(py)), 1, (255, 0, 0), -1)
# store face position for tracking
self.__imgPos = rect_np
# next state --> Do life check look left
self.__state = self.__lifeCheckLookLeft
return img
def __lifeCheckLookLeft(self, key) -> np.ndarray:
return self.__lifeCheckLookLeftRight(False, self.__lifeCheckLookRight)
def __lifeCheckLookRight(self, key) -> np.ndarray:
return self.__lifeCheckLookLeftRight(True, self.__tracking)
def __lifeCheckLookLeftRight(self, nLeftRight, nextState) -> np.ndarray:
rects, shapes, img = self.__detectQueue.get()
# Multiple persons
if len(rects) > 1:
outtext = "Error: Multiple faces detected."
utils.write_text_bottom(img, outtext, (0, 0, 255))
self.__missDetections += 1
if self.__missDetections > self.__maxMissDetection:
self.__discardCurrentDescriptor()
self.__state = self.___waitForFace
return img
# No Person
if len(rects) < 1:
outtext = "Error: No faces detected."
utils.write_text_bottom(img, outtext, (0, 0, 255))
self.__missDetections += 1
if self.__missDetections > self.__maxMissDetection:
self.__discardCurrentDescriptor()
self.__state = self.___waitForFace
return img
# Check for feasible movement -> If face jumps around most properly it is no real person
rect_np = utils.rect_to_np(rects[0], dtpye=np.int32) # convert dlib rectangle to numpy
movement = np.linalg.norm(rect_np-self.__imgPos)
if movement > self.__MaxMovement:
self.__discardCurrentDescriptor()
self.__state = self.___waitForFace
return img
shape_np = utils.shape_to_np(shapes[0])
# Check face alignment
rot_angle = utils.get_angle(shape_np[27, :]-shape_np[30, :],
shape_np[27, :]-shape_np[33, :])
if not nLeftRight and (rot_angle < self.__ReqHeadRot):
outtext = "Rotate your head LEFT. Current rot angle: " + str(rot_angle)
utils.write_text_bottom(img, outtext, (100, 255, 255))
cv2.arrowedLine(img, (30, int(self.__imgSize/2)), (5, int(self.__imgSize/2)),
(100, 255, 255), 3)
elif nLeftRight and (rot_angle > (-self.__ReqHeadRot)):
outtext = "Rotate your head RIGHT. Current rot angle: " + str(rot_angle)
utils.write_text_bottom(img, outtext, (255, 255, 100))
cv2.arrowedLine(img, (self.__imgSize-30, int(self.__imgSize/2)-5),
(self.__imgSize, int(self.__imgSize/2)),
(255, 255, 100), 3)
else:
outtext = "Good"
utils.write_text_bottom(img, outtext, (0, 255, 0))
# next state --> Do life check look left
self.__state = nextState
img = utils.drawBoxAndName(img, rect_np, self.__name, self.__score)
if self.__showLandmarks:
for (px, py) in shape_np:
cv2.circle(img, (int(px), int(py)), 1, (255, 0, 0), -1)
self.__imgPos = rect_np
return img
def __tracking(self, key):
rects, shapes, img = self.__detectQueue.get()
# Multiple persons
if len(rects) > 1:
outtext = "Error: Multiple faces detected."
utils.write_text_bottom(img, outtext, (0, 0, 255))
self.__missDetections += 1
if self.__missDetections > self.__maxMissDetection:
self.__discardCurrentDescriptor()
self.__state = self.___waitForFace
return img
# No Person
if len(rects) < 1:
outtext = "Error: No faces detected."
utils.write_text_bottom(img, outtext, (0, 0, 255))
self.__missDetections += 1
if self.__missDetections > self.__maxMissDetection:
self.__discardCurrentDescriptor()
self.__state = self.___waitForFace
return img
# Check for feasible movement -> If face jumps around most properly it is no real person
rect_np = utils.rect_to_np(rects[0], dtpye=np.int32) # convert dlib rectangle to numpy
movement = np.linalg.norm(rect_np-self.__imgPos)
if movement > self.__MaxMovement:
self.__discardCurrentDescriptor()
self.__state = self.___waitForFace
return img
shape_np = utils.shape_to_np(shapes[0])
# Check face alignment
rot_angle = utils.get_angle(shape_np[27, :]-shape_np[30, :],
shape_np[27, :]-shape_np[33, :])
tilt_angle = utils.get_angle(shape_np[45, :]-shape_np[36, :],
np.array([1, 0]))
if self.__faceDetected:
outtext = self.__name + " detected with {}\% confidence.".format(self.__score)
utils.write_text_bottom(img, outtext, (0, 255, 0))
else:
if self.__faceRecQueueOut.empty():
outtext = self.__name
utils.write_text_bottom(img, outtext, (255, 0, 0))
else:
faceDescriptor = self.__faceRecQueueOut.get()
self.__faceDetected, self.__name = self.__decriptorHandler.exists(faceDescriptor)
self.__score = 99.38 # dlib face recognition accuracy
img = utils.drawBoxAndName(img, rect_np, self.__name, self.__score)
if self.__showLandmarks:
for (px, py) in shape_np:
cv2.circle(img, (int(px), int(py)), 1, (255, 0, 0), -1)
self.__imgPos = rect_np
return img
def main(opt):
stm = StateMachine(opt.cam_url, opt.landmarkPredictor, opt.faceDescriptor,
opt.descriptor_file, opt.threshold,
maxFps=opt.max_fps, imgSize=384, showLandmarks=True)
key = 0
while(True):
img = stm(key)
cv2.imshow("Face Detector", img)
key = cv2.waitKey(1) & 0xFF
if key == ord('q'):
break
# Destroy all the windows
cv2.destroyAllWindows()
print("Leave Face Detection")
if __name__ == '__main__':
opt = _create_parser()
print(opt)
main(opt) | parser.add_argument('--landmarkPredictor', type=str,
default="../dlib_models/shape_predictor_68_face_landmarks.dat",
help="Path to dlib 68 face landmark predictor: shape_predictor_68_face_landmarks.dat") |
resource_cluster_unit_test.go | package instaclustr
import (
"fmt"
"github.com/hashicorp/terraform/helper/schema"
"reflect"
"strings" | func TestCreateBundleUserUpdateRequest(t *testing.T) {
testUsername := "hail"
testPassword := "reallySecure123"
testBundleRequest := createBundleUserUpdateRequest(testUsername, testPassword)
expectedOutput := []byte(fmt.Sprintf("{\"username\":\"%s\",\"password\":\"%s\"}", testUsername, testPassword))
if !reflect.DeepEqual(testBundleRequest, expectedOutput) {
t.Fatalf("Incorrect request returned.\nExpected:%s\nActual:%s", expectedOutput, testBundleRequest)
}
}
func TestGetBundleConfig(t *testing.T) {
var testBundles []Bundle
var testBundleConfig BundleConfig
testBundles = append(testBundles, Bundle{Bundle: "KAFKA"})
testBundleConfig = getBundleConfig(testBundles)
expectedOutput := BundleConfig{
IsKafkaCluster: true,
HasRestProxy: false,
HasSchemaRegistry: false}
if testBundleConfig != expectedOutput {
t.Fatalf("Incorrect Bundle Config returned.\nExpected: %+v\nActual: %+v", expectedOutput, testBundleConfig)
}
testBundles = append(testBundles, Bundle{Bundle: "KAFKA_REST_PROXY"})
testBundleConfig = getBundleConfig(testBundles)
expectedOutput = BundleConfig{
IsKafkaCluster: true,
HasRestProxy: true,
HasSchemaRegistry: false}
if testBundleConfig != expectedOutput {
t.Fatalf("Incorrect Bundle Config returned.\nExpected: %+v\nActual: %+v", expectedOutput, testBundleConfig)
}
testBundles = append(testBundles, Bundle{Bundle: "KAFKA_SCHEMA_REGISTRY"})
testBundleConfig = getBundleConfig(testBundles)
expectedOutput = BundleConfig{
IsKafkaCluster: true,
HasRestProxy: true,
HasSchemaRegistry: true}
if testBundleConfig != expectedOutput {
t.Fatalf("Incorrect Bundle Config returned.\nExpected: %+v\nActual: %+v", expectedOutput, testBundleConfig)
}
}
func TestAppendIfMissing(t *testing.T) {
var testSlice []string
testSlice = append(testSlice, "1", "2")
testString := "test"
expectedSlice := append(testSlice, testString)
appendedSlice := appendIfMissing(testSlice, testString)
if !reflect.DeepEqual(expectedSlice, appendedSlice) {
t.Fatalf("Value appened incorrectly to the slice.\nExpected: %s\nActual: %s", expectedSlice, appendedSlice)
}
appendedSlice = appendIfMissing(testSlice, testString)
if !reflect.DeepEqual(expectedSlice, appendedSlice) {
t.Fatalf("Value appened incorrectly to the slice.\nExpected: %s\nActual: %s", expectedSlice, appendedSlice)
}
}
func TestFormatCreateErrMsg(t *testing.T) {
testError := fmt.Errorf("test error")
formattedError := formatCreateErrMsg(testError)
expectedError := fmt.Sprintf("[Error] Error creating cluster: %s", testError.Error())
if formattedError.Error() != expectedError {
t.Fatalf("Incorrectly formatted error.\nExpected: %s\nActual: %s", expectedError, formattedError)
}
}
func TestCheckIfBundleRequiresRackAllocation(t *testing.T) {
bundles := []Bundle{{Bundle: "REDIS"}}
isRackAllocationRequired := checkIfBundleRequiresRackAllocation(bundles)
if isRackAllocationRequired == true {
t.Fatalf("Incorrect check performed for REDIS bundle.\nExpected: %v\nActual: %v\n", false, true)
}
bundles = []Bundle{{Bundle: "APACHE_ZOOKEEPER"}}
isRackAllocationRequired = checkIfBundleRequiresRackAllocation(bundles)
if isRackAllocationRequired == true {
t.Fatalf("Incorrect check performed for APACHE_ZOOKEEPER bundle.\nExpected: %v\nActual: %v\n", false, true)
}
bundles = []Bundle{{Bundle: "POSTGRESQL"}}
isRackAllocationRequired = checkIfBundleRequiresRackAllocation(bundles)
if isRackAllocationRequired == true {
t.Fatalf("Incorrect check performed for POSTGRESQL bundle.\nExpected: %v\nActual: %v\n", false, true)
}
bundles = []Bundle{{Bundle: "APACHE_CASSANDRA"}}
isRackAllocationRequired = checkIfBundleRequiresRackAllocation(bundles)
if isRackAllocationRequired == false {
t.Fatalf("Incorrect check performed for APACHE_CASSANDRA.\nExpected: %v\nActual: %v\n", true, false)
}
}
func TestIsElasticsearchSizeAllChange(t *testing.T) {
helper := func(kibanaSize, masterSize, dataSize, expectedNewSize string, kibana, dedicatedMaster, expectedIsAll bool) {
newSize, isAllChange := isElasticsearchSizeAllChange(kibanaSize, masterSize, dataSize, kibana, dedicatedMaster)
if isAllChange != expectedIsAll {
t.Fatalf("changeAll should be %t when using kibanaSize: %s, masterSize: %s, dataSize: %s, kibana: %t, dedicatedMaster: %t", expectedIsAll, kibanaSize, masterSize, dataSize, kibana, dedicatedMaster)
}
if isAllChange && newSize != expectedNewSize {
t.Fatalf("newSize should be %s when using kibanaSize: %s, masterSize: %s, dataSize: %s, kibana: %t, dedicatedMaster:%t", expectedNewSize, kibanaSize, masterSize, dataSize, kibana, dedicatedMaster)
}
}
helper("t3.small-v2", "t3.small-v2", "t3.small-v2", "t3.small-v2", true, true, true)
helper("t3.small-v2", "t3.small-v2", "", "t3.small-v2", true, false, true)
helper("", "t3.small-v2", "t3.small-v2", "t3.small-v2", false, true, true)
helper("", "t3.small-v2", "", "t3.small-v2", false, false, true)
helper("t3.small-v2", "m5l-400-v2", "t3.small-v2", "t3.small-v2", true, true, false)
helper("t3.small-v2", "t3.small-v2", "m5l-400-v2", "t3.small-v2", true, true, false)
helper("m5l-400-v2", "t3.small-v2", "t3.small-v2", "t3.small-v2", true, true, false)
helper("", "t3.small-v2", "", "t3.small-v2", false, true, false)
helper("", "t3.small-v2", "t3.small-v2", "t3.small-v2", true, false, false)
helper("t3.small-v2", "", "t3.small-v2", "t3.small-v2", false, false, false)
}
func TestIsKafkaSizeAllChange(t *testing.T) {
helper := func(brokerSize, zookeeperSize, expectedNewSize string, dedicatedZookeeper, expectedIsAll bool) {
newSize, isAllChange := isKafkaSizeAllChange(brokerSize, zookeeperSize, dedicatedZookeeper)
if isAllChange != expectedIsAll {
t.Fatalf("changeAll should be %t when using brokerSize: %s, zookeeperSize: %s, dedicatedZookeeper: %t", expectedIsAll, brokerSize, zookeeperSize, dedicatedZookeeper)
}
if isAllChange && newSize != expectedNewSize {
t.Fatalf("newSize should be %s when using brokerSize: %s, zookeeperSize: %s, dedicatedZookeeper: %t c", expectedNewSize, brokerSize, zookeeperSize, dedicatedZookeeper)
}
}
helper("t3.small-v2", "t3.small-v2", "t3.small-v2", true, true)
helper("t3.small-v2", "t3.small-v2", "t3.small-v2", false, true)
helper("t3.small-v2", "", "t3.small-v2", true, false)
helper("t3.small-v2", "m5l-400-v2", "t3.small-v2", true, false)
}
func TestGetSingleChangedElasticsearchSizeAndPurpose(t *testing.T) {
helper := func(kibanaSize, masterSize, dataSize, expectedNewSize string, kibana, dedicatedMaster, expectErr bool, expectedNodePurpose NodePurpose) {
newSize, nodePurpose, err := getSingleChangedElasticsearchSizeAndPurpose(kibanaSize, masterSize, dataSize, kibana, dedicatedMaster)
if expectErr {
if err == nil {
t.Fatalf("expect error when using kibanaSize: %s, masterSize: %s, dataSize: %s, kibana: %t, dedicatedMaster: %t", kibanaSize, masterSize, dataSize, kibana, dedicatedMaster)
} else {
return
}
}
if err != nil {
t.Fatalf("got unexpected error: %s when using kibanaSize: %s, masterSize: %s, dataSize: %s, kibana: %t, dedicatedMaster: %t", err.Error(), kibanaSize, masterSize, dataSize, kibana, dedicatedMaster)
}
if newSize != expectedNewSize {
t.Fatalf("newSize should be %s when using kibanaSize: %s, masterSize: %s, dataSize: %s, kibana: %t, dedicatedMaster:%t", expectedNewSize, kibanaSize, masterSize, dataSize, kibana, dedicatedMaster)
}
if nodePurpose.String() != expectedNodePurpose.String() {
t.Fatalf("nodePurpose should be %s when using kibanaSize: %s, masterSize: %s, dataSize: %s, kibana: %t, dedicatedMaster:%t", expectedNodePurpose, kibanaSize, masterSize, dataSize, kibana, dedicatedMaster)
}
}
helper("t3.small-v2", "t3.small-v2", "t3.small-v2", "t3.small-v2", true, true, true, ELASTICSEARCH_KIBANA)
helper("", "", "t3.small-v2", "t3.small-v2", true, false, true, ELASTICSEARCH_KIBANA)
helper("t3.small-v2", "", "", "t3.small-v2", false, false, true, ELASTICSEARCH_KIBANA)
helper("t3.small-v2", "", "t3.small-v2", "t3.small-v2", false, false, true, ELASTICSEARCH_KIBANA)
helper("t3.small-v2", "", "t3.small-v2", "t3.small-v2", false, false, true, ELASTICSEARCH_KIBANA)
helper("t3.small-v2", "", "", "t3.small-v2", true, false, false, ELASTICSEARCH_KIBANA)
helper("t3.small-v2", "", "", "t3.small-v2", true, true, false, ELASTICSEARCH_KIBANA)
helper("", "t3.small-v2", "", "t3.small-v2", true, true, false, ELASTICSEARCH_MASTER)
helper("", "t3.small-v2", "", "t3.small-v2", true, false, false, ELASTICSEARCH_MASTER)
helper("", "", "t3.small-v2", "t3.small-v2", true, true, false, ELASTICSEARCH_DATA_AND_INGEST)
}
func TestGetSingleChangedKafkaSizeAndPurpose(t *testing.T) {
helper := func(brokerSize, zookeeperSize, expectedNewSize string, dedicatedZookeeper, expectErr bool, expectedNodePurpose NodePurpose) {
newSize, nodePurpose, err := getSingleChangedKafkaSizeAndPurpose(brokerSize, zookeeperSize, dedicatedZookeeper)
if expectErr {
if err == nil {
t.Fatalf("expect error when using brokerSize: %s, zookeeperSize: %s, dedicatedZookeeper: %t", brokerSize, zookeeperSize, dedicatedZookeeper)
} else {
return
}
}
if err != nil {
t.Fatalf("got unexpected error: %s when using brokerSize: %s, zookeeperSize: %s, dedicatedZookeeper: %t", err.Error(), brokerSize, zookeeperSize, dedicatedZookeeper)
}
if newSize != expectedNewSize {
t.Fatalf("newSize should be %s when using brokerSize: %s, zookeeperSize: %s, dedicatedZookeeper: %t", expectedNewSize, brokerSize, zookeeperSize, dedicatedZookeeper)
}
if nodePurpose.String() != expectedNodePurpose.String() {
t.Fatalf("nodePurpose should be %s when using brokerSize: %s, zookeeperSize: %s, dedicatedZookeeper: %t", expectedNodePurpose, brokerSize, zookeeperSize, dedicatedZookeeper)
}
}
helper("t3.small-v2", "t3.small-v2", "t3.small-v2", false, true, KAFKA_BROKER)
helper("t3.small-v2", "t3.small-v2", "t3.small-v2", true, true, KAFKA_BROKER)
helper("t3.small-v2", "", "t3.small-v2", false, false, KAFKA_BROKER)
helper("t3.small-v2", "", "t3.small-v2", true, false, KAFKA_BROKER)
helper("", "t3.small-v2", "t3.small-v2", true, false, KAFKA_DEDICATED_ZOOKEEPER)
}
func TestGetBundleOptionKey(t *testing.T) {
helper := func(bundleIndex int, option, expect string) {
if getBundleOptionKey(bundleIndex, option) != expect {
t.Fatalf("With parameter %d, %s, should return %s", bundleIndex, option, expect)
}
}
helper(0, "test", "bundle.0.options.test")
helper(2, "kibana_node_size", "bundle.2.options.kibana_node_size")
helper(-1, "kibana_node_size", "bundle.-1.options.kibana_node_size")
}
func TestGetNodeSize(t *testing.T) {
helper := func(data resourceDataInterface, bundles []Bundle, expectedErrMsg, expectedSize string) {
size, err := getNodeSize(data, bundles)
if len(expectedErrMsg) > 0 {
if err == nil || err.Error() != expectedErrMsg {
t.Fatalf("Expect error %s but got %s", expectedErrMsg, err)
}
} else {
if err != nil {
t.Fatalf("Expect error to be nil but got %s", err)
}
if size != expectedSize {
t.Fatalf("Expect size %s but got %s", expectedSize, size)
}
}
}
data := MockResourceData{
map[string]MockChange{},
}
bundles := []Bundle{
{
Bundle: "ELASTICSEARCH",
Options: &BundleOptions{
DedicatedMasterNodes: nil,
MasterNodeSize: "",
KibanaNodeSize: "",
DataNodeSize: "",
},
},
}
helper(data, bundles, "[ERROR] 'master_node_size' is required in the bundle option.", "")
bundles = []Bundle{
{
Bundle: "CASSANDRA",
Options: &BundleOptions{
DedicatedMasterNodes: nil,
MasterNodeSize: "",
KibanaNodeSize: "",
DataNodeSize: "",
},
},
}
data.changes["node_size"] = MockChange{
before: "",
after: "t3.small",
}
helper(&data, bundles, "", "t3.small")
bundles = []Bundle{
{
Bundle: "Kafka",
Options: &BundleOptions{
DedicatedMasterNodes: nil,
MasterNodeSize: "",
KibanaNodeSize: "",
DataNodeSize: "",
},
},
}
data.changes["node_size"] = MockChange{
before: "",
after: "t3.small",
}
helper(&data, bundles, "", "t3.small")
dedicatedMaster := true
bundles = []Bundle{
{
Bundle: "ELASTICSEARCH",
Options: &BundleOptions{
DedicatedMasterNodes: &dedicatedMaster,
MasterNodeSize: "t3.small",
KibanaNodeSize: "",
DataNodeSize: "",
},
},
}
helper(&data, bundles, "[ERROR] Elasticsearch dedicated master is enabled, 'data_node_size' is required in the bundle option.", "")
bundles = []Bundle{
{
Bundle: "ELASTICSEARCH",
Options: &BundleOptions{
DedicatedMasterNodes: &dedicatedMaster,
MasterNodeSize: "t3.small",
KibanaNodeSize: "",
DataNodeSize: "t3.small-v2",
},
},
}
helper(&data, bundles, "", "t3.small-v2")
dedicatedMaster = false
bundles = []Bundle{
{
Bundle: "ELASTICSEARCH",
Options: &BundleOptions{
DedicatedMasterNodes: &dedicatedMaster,
MasterNodeSize: "t3.small",
KibanaNodeSize: "",
DataNodeSize: "t3.small-v2",
},
},
}
helper(&data, bundles, "[ERROR] When 'dedicated_master_nodes' is not true , data_node_size can be either null or equal to master_node_size.", "")
bundles = []Bundle{
{
Bundle: "ELASTICSEARCH",
Options: &BundleOptions{
DedicatedMasterNodes: &dedicatedMaster,
MasterNodeSize: "t3.small",
KibanaNodeSize: "",
DataNodeSize: "t3.small",
},
},
}
helper(&data, bundles, "", "t3.small")
}
func TestGetBundleIndex(t *testing.T) {
if index, err := getBundleIndex("ELASTICSEARCH", []Bundle{
{Bundle: "LOG_SHIPPER"},
{Bundle: "ELASTICSEARCH"},
}); err != nil || index != 1 {
t.Fatalf("Expect no error and 1, got %v and %v", err, index)
}
if index, err := getBundleIndex("ELASTICSEARCH", []Bundle{
{Bundle: "ELASTICSEARCH"},
}); err != nil || index != 0 {
t.Fatalf("Expect no error and 0, got %v and %v", err, index)
}
}
func TestGetNewSizeOrEmpty(t *testing.T) {
data := schema.ResourceData{}
if size := getNewSizeOrEmpty(&data, "node_size"); size != "" {
t.Fatalf("Expect empty string but got %v", size)
}
}
func TestHasElasticsearchSizeChanges(t *testing.T) {
data := schema.ResourceData{}
if hasChange := hasElasticsearchSizeChanges(0, &data); hasChange {
t.Fatalf("Expect false but got %v", hasChange)
}
}
func TestHasKafkaSizeChanges(t *testing.T) {
data := schema.ResourceData{}
if hasChange := hasKafkaSizeChanges(0, &data); hasChange {
t.Fatalf("Expect false but got %v", hasChange)
}
}
func TestHasCassandraSizeChanges(t *testing.T) {
data := schema.ResourceData{}
if hasChange := hasCassandraSizeChanges(&data); hasChange {
t.Fatalf("Expect false but got %v", hasChange)
}
}
func TestDoClusterResizeDefault(t *testing.T) {
err := doClusterResize(MockApiClient{
cluster: Cluster{
ID: "REDIS",
BundleType: "REDIS",
},
}, "mock", MockResourceData{}, []Bundle{
{Bundle: "REDIS"},
})
if err == nil || !strings.Contains(err.Error(), "CDC resize does not support:") {
t.Fatalf("Expect err with 'CDC resize does not support:' but got %v", err)
}
}
func TestDoClusterResizeES(t *testing.T) {
client := MockApiClient{
cluster: Cluster{
ID: "mock",
BundleType: "ELASTICSEARCH",
BundleOption: &BundleOptions{},
DataCentres: []DataCentre{
{ID: "test"},
},
},
}
data := MockResourceData{
changes: map[string]MockChange{"bundle.0.options.master_node_size": {before: "t3.small", after: "t3.small-v2"}},
}
bundles := []Bundle{
{Bundle: "ELASTICSEARCH"},
}
err := doClusterResize(client, "mock", data, bundles)
if err != nil {
t.Fatalf("Expect nil err but got %v", err)
}
delete(data.changes, "bundle.0.options.master_node_size")
err = doClusterResize(client, "mock", data, bundles)
if err != nil {
t.Fatalf("Expect nil err but got %v", err)
}
}
func TestDoClusterResizeKA(t *testing.T) {
client := MockApiClient{
cluster: Cluster{
ID: "mock",
BundleType: "KAFKA",
BundleOption: &BundleOptions{},
DataCentres: []DataCentre{
{ID: "test"},
},
},
}
data := MockResourceData{
changes: map[string]MockChange{"node_size": {before: "t3.small", after: "t3.small-v2"}},
}
bundles := []Bundle{
{Bundle: "KAFKA"},
}
err := doClusterResize(client, "mock", data, bundles)
if err != nil {
t.Fatalf("Expect nil err but got %v", err)
}
delete(data.changes, "node_size")
err = doClusterResize(client, "mock", data, bundles)
if err != nil {
t.Fatalf("Expect nil err but got %v", err)
}
}
func TestDoClusterResizeCA(t *testing.T) {
client := MockApiClient{
cluster: Cluster{
ID: "mock",
BundleType: "APACHE_CASSANDRA",
BundleOption: &BundleOptions{},
DataCentres: []DataCentre{
{ID: "test"},
},
},
}
data := MockResourceData{
changes: map[string]MockChange{"node_size": {before: "t3.small", after: "t3.small-v2"}},
}
bundles := []Bundle{
{Bundle: "APACHE_CASSANDRA"},
}
err := doClusterResize(client, "mock", data, bundles)
if err == nil || err.Error() != "[Error] Cannot resize nodes from t3.small to t3.small-v2" {
t.Fatalf("Expect err to be '[Error] Cannot resize nodes from t3.small to t3.small-v2' but got %v", err)
}
delete(data.changes, "node_size")
err = doClusterResize(client, "mock", data, bundles)
if err != nil {
t.Fatalf("Expect nil err but got %v", err)
}
}
func TestCreateVpcPeeringRequest(t *testing.T) {
resourceSchema := map[string]*schema.Schema{
"peer_vpc_id": {
Type: schema.TypeString,
},
"peer_account_id": {
Type: schema.TypeString,
},
"peer_subnets": {
Type: schema.TypeSet,
Elem: &schema.Schema{
Type: schema.TypeString,
},
},
"peer_region": {
Type: schema.TypeString,
},
}
peerSubnets := schema.NewSet(schema.HashString, []interface{}{"10.20.0.0/16", "10.21.0.0/16"})
resourceDataMap := map[string]interface{}{
"peer_vpc_id": "vpc-12345678",
"peer_account_id": "494111121110",
"peer_subnets": peerSubnets.List(),
"peer_region": "",
}
resourceLocalData := schema.TestResourceDataRaw(t, resourceSchema, resourceDataMap)
if _, err := createVpcPeeringRequest(resourceLocalData); err != nil {
t.Fatalf("Expected nil error but got %v", err)
}
}
func TestCreateVpcPeeringRequestLegacy(t *testing.T) {
resourceSchema := map[string]*schema.Schema{
"peer_vpc_id": {
Type: schema.TypeString,
},
"peer_account_id": {
Type: schema.TypeString,
},
"peer_subnet": {
Type: schema.TypeString,
},
"peer_region": {
Type: schema.TypeString,
},
}
resourceDataMap := map[string]interface{}{
"peer_vpc_id": "vpc-12345678",
"peer_account_id": "494111121110",
"peer_subnet": "10.20.0.0/16",
"peer_region": "",
}
resourceLocalData := schema.TestResourceDataRaw(t, resourceSchema, resourceDataMap)
if _, err := createVpcPeeringRequest(resourceLocalData); err != nil {
t.Fatalf("Expected nil error but got %v", err)
}
}
type MockApiClient struct {
cluster Cluster
err error
}
func (m MockApiClient) ReadCluster(clusterID string) (*Cluster, error) {
return &m.cluster, m.err
}
func (m MockApiClient) ResizeCluster(clusterID string, cdcID string, newNodeSize string, nodePurpose *NodePurpose) error {
return m.err
}
type MockChange struct {
before interface{}
after interface{}
}
type MockResourceData struct {
changes map[string]MockChange
}
func (m MockResourceData) HasChange(key string) bool {
_, ok := m.changes[key]
return ok
}
func (m MockResourceData) GetChange(key string) (interface{}, interface{}) {
return m.changes[key].before, m.changes[key].after
}
func (m MockResourceData) GetOk(key string) (interface{}, bool) {
change, ok := m.changes[key]
if ok {
return change.after, ok
} else {
return nil, ok
}
}
func (m MockResourceData) Get(key string) interface{} {
change, ok := m.changes[key]
if ok {
return change.after
} else {
return nil
}
} | "testing"
)
|
ignore.py | from __future__ import unicode_literals
import os
from dulwich.ignore import match_pattern, read_ignore_patterns
from dvc.utils import relpath
from dvc.utils.compat import cast_bytes
from dvc.utils.fs import get_parent_dirs_up_to
class DvcIgnoreFileHandler(object):
def __init__(self, tree):
self.tree = tree
def read_patterns(self, path):
with self.tree.open(path, binary=True) as stream:
return self._read_patterns(stream)
def get_repo_root(self):
return self.tree.tree_root
def _read_patterns(self, binary_stream):
negate_patterns = []
patterns = []
for pattern in read_ignore_patterns(binary_stream):
if pattern.lstrip().startswith(b"!"):
negate_patterns.append(pattern)
else:
patterns.append(pattern)
return negate_patterns, patterns
class DvcIgnore(object):
DVCIGNORE_FILE = ".dvcignore"
def __call__(self, root, dirs, files):
raise NotImplementedError
class DvcIgnoreFromFile(DvcIgnore):
def __init__(self, ignore_file_path, ignore_handler):
self.ignore_file_path = ignore_file_path
self.dirname = os.path.normpath(os.path.dirname(ignore_file_path))
self.patterns = []
self.negate_patterns = []
self.negate_patterns, self.patterns = ignore_handler.read_patterns(
ignore_file_path
)
def __call__(self, root, dirs, files):
files = [f for f in files if not self.matches(root, f)]
dirs = [d for d in dirs if not self.matches(root, d)]
return dirs, files
def get_match(self, abs_path):
relative_path = relpath(abs_path, self.dirname)
if os.name == "nt":
relative_path = relative_path.replace("\\", "/")
relative_path = cast_bytes(relative_path, "utf-8")
for pattern in self.patterns:
if match_pattern(
relative_path, pattern
) and self._no_negate_pattern_matches(relative_path):
return (abs_path, pattern, self.ignore_file_path)
return None
def matches(self, dirname, basename):
if self.get_match(os.path.join(dirname, basename)):
return True
return False
def _no_negate_pattern_matches(self, path):
return all([not match_pattern(path, p) for p in self.negate_patterns])
def __hash__(self):
return hash(self.ignore_file_path)
class DvcIgnoreConstant(DvcIgnore):
def __init__(self, basename):
self.basename = basename
class DvcIgnoreDir(DvcIgnoreConstant):
def __call__(self, root, dirs, files):
dirs = [d for d in dirs if not d == self.basename]
return dirs, files
class DvcIgnoreFile(DvcIgnoreConstant):
def | (self, root, dirs, files):
files = [f for f in files if not f == self.basename]
return dirs, files
class DvcIgnoreFilter(object):
def __init__(self, wdir, ignore_file_handler=None):
self.ignores = [
DvcIgnoreDir(".git"),
DvcIgnoreDir(".hg"),
DvcIgnoreDir(".dvc"),
DvcIgnoreFile(".dvcignore"),
]
self.ignore_file_handler = ignore_file_handler
self._process_ignores_in_parent_dirs(wdir)
def _process_ignores_in_parent_dirs(self, wdir):
if self.ignore_file_handler:
wdir = os.path.normpath(os.path.abspath(wdir))
ignore_search_end_dir = self.ignore_file_handler.get_repo_root()
parent_dirs = get_parent_dirs_up_to(wdir, ignore_search_end_dir)
for d in parent_dirs:
self.update(d)
def update(self, wdir):
ignore_file_path = os.path.join(wdir, DvcIgnore.DVCIGNORE_FILE)
if os.path.exists(ignore_file_path):
file_ignore = DvcIgnoreFromFile(
ignore_file_path, ignore_handler=self.ignore_file_handler
)
self.ignores.append(file_ignore)
def __call__(self, root, dirs, files):
if self.ignore_file_handler:
self.update(root)
for ignore in self.ignores:
dirs, files = ignore(root, dirs, files)
return dirs, files
| __call__ |
eth_test.py | #!/usr/bin/python
import sys
import socket
import struct
import time
import logging
import sys,getopt
import os
import random
import numpy
from matplotlib import pyplot as plt
IPADDR = os.environ.get('IP_ADDR')
if IPADDR is None: IPADDR = 'rflab1.lbl.gov' # 128.3.128.122
PORTNUM = 3000
global plot_ena, slow_ena
plot_ena=0
slow_ena=0
def three_bytes(ad):
|
def mem_gate_write_prep(alist, dlist):
" write register through mem_gateway "
p = struct.pack('!I',random.getrandbits(32))
p += struct.pack('!I',random.getrandbits(32))
for ix,ad in enumerate(alist):
# read commands include space for result
# print dlist[ix]
p += '\x00' + three_bytes(ad) + struct.pack('!I',dlist[ix])
#print p.encode('hex')
return p
def mem_gate_write(s,p):
s.send(p)
r, addr = s.recvfrom(1024) # buffer size is 1024 bytes
#print r.encode('hex')
if (r[0:8] != p[0:8]):
print "header mismatch"
sys.exit(2)
#res=[] # build up result list here
#for ix in range(0, len(alist)):
# rh = (r[12+8*ix:16+8*ix])
# res.append(struct.unpack('!I',rh)[0])
## print "%6.6x: %s"%(alist[ix], rh.encode('hex'))
#return res
def mem_gate_read(s, alist):
" read config_romx "
p = struct.pack('!I',random.getrandbits(32))
p += struct.pack('!I',random.getrandbits(32))
for ad in alist:
# read commands include space for result
p += '\x10' + three_bytes(ad) + 4*' '
s.send(p)
r, addr = s.recvfrom(1024) # buffer size is 1024 bytes
if (r[0:8] != p[0:8]):
print "header mismatch"
sys.exit(2)
ra = r[ 8:12]
if (alist[0] + 0x10000000 != int(ra.encode('hex'),16)):
print 'echo first address %x %x'%(alist[0],int(ra.encode('hex'),16))
res=[] # build up result list here
for ix in range(0, len(alist)):
rv = r[12+8*ix:16+8*ix]
int_value=int(rv.encode('hex'),16)
res.append(int_value)
return res
def decode_lbnl_rom(dat):
" decode content of config_romx "
d = numpy.bitwise_and(dat, 0xff)
if (d[0] == 85):
user_l={1:"ldoolitt",2:"cswanson",3:"kasemir",4:"hengjie",5:"crofford",6:"meddeler",7:"baptiste",8:"llrf_oper",9:"hyaver",10:"dim",11:"begcbp",12:"ghuang",13:"luser",14:"kstefan",15:"cserrano",16:"asalom",17:"du",18:"yangjin",19:"lilima",20:"ernesto"}
user = user_l[d[9]] if d[9] in user_l else "unknown"
board_l={1:"mebt",2:"interim",3:"fcm",4:"avnet",5:"uxo",6:"llrf4",7:"av5t",8:"sp601",9:"sp605",10:"ml505",11:"ml506",12:"fllrf",13:"spec",14:"lx150t",15:"cute_wr",17:"ac701",18:"ml605",19:"kc705",99:"test"}
board = board_l[d[10]] if d[10] in board_l else "unknown"
print "DSP flavor: %d"%d[1]
print "build date: %4.4d-%2.2d-%2.2d"%(d[2]+2000,d[3],d[4])
print "build time: %2.2d:%2.2d UTC"%(d[5],d[6])
print "tool rev: %d.%d"%(d[8]/16,d[8]%16)
print "user: %d (%s)"%(d[9],user)
print "board type: %d (%s)"%(d[10],board)
gs=""
for ix in range(0,20):
gs+=chr(d[12+ix])
print "git commit: %s"%(gs.encode('hex'))
if d[32] == 170:
print "circle_aw: %d"%d[33]
print "mode_count: %d"%d[34]
print "mode_shift: %d"%d[35]
print "n_mech_modes: %d"%d[36]
print "df_scale: %d"%d[37]
print "simple_demo: %d"%d[38]
else:
print "no magic found %d"%d[0]
# circle_count, circle_stat, adc min/max (6 words), tag_now, tag_old, timestamp
def slow_decode(aux):
a = [aux[2*ix]*256+aux[2*ix+1] for ix in range(0,2)] # circle_buf.v
b = [aux[2*ix]*256+aux[2*ix+1] for ix in range(2,8)]
b = [bb if bb<32767 else bb-65536 for bb in b] # 3 x ADC min/max
tag_now = aux[16]
tag_old = aux[17]
c = aux[26:17:-1] # timestamp.v
t = 0
for cc in c: t=t*256+cc
t = t/32 # five false bits at bottom; this converts to actual clock ticks
#if not plot_ena:
if 1:
print a, b, tag_now, tag_old, t
# New!
def acknowledge_buffer(s):
mem_gate_write(s,mem_gate_write_prep([0,0,0,0,0,0x3800,0],[0,0,0,0,0,1,0]));
def read_mem_buf(s):
res=[]
while (not mem_gate_read(s,range(0,32))[0]>>8&1):
print 'circular buffer not ready yet'
time.sleep(0.02)
aux = mem_gate_read(s,range(0x2011,0x2031)) # "slow" readout registers
if slow_ena:
slow_decode(aux)
for index in range(0x4000,0x6000,0x40):
res.extend(mem_gate_read(s,range(index,index+0x40)))
# assume 8 bits selected in ch_keep
acknowledge_buffer(s)
return [res,aux]
def setup_sock():
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, 0)
s.connect((IPADDR, PORTNUM))
# set up for address decoder in cryomodule.v, not larger.v
rom = mem_gate_read(s, range(0x10000,0x10000+48))
decode_lbnl_rom(rom)
return s
# Main procedure
def main(argv):
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
s = setup_sock()
if argv[0] == "config":
exit(0)
if plot_ena:
fig=plt.figure(1)
fig.show()
# send the address/value pairs created by param.py to the hardware
with open('larger_in.dat', 'r') as f:
addr_value=f.read().split('\n')
addr=[]
value=[]
for line in addr_value:
#if line:
# (ad, vd) = map(int, line.split())
# addr.append(ad)
# value.append(vd)
aa=line.split()
if aa:
addr.append(int(aa[0]))
v=int(aa[1])
if (v<0): v += 2**32
value.append(v)
mem_gate_write(s,mem_gate_write_prep(addr,value));
fcnt=0;
while (fcnt < 10 or plot_ena):
#mem_gate_write(s,mem_gate_write_prep(addr,value));
[res,aux]=read_mem_buf(s)
varray=numpy.array([x-65536 if x>32767 else x for x in res]).reshape([1024,8])
# numpy.savetxt("live%d.dat"%fcnt,res,'%6.0f')
fcnt += 1
if plot_ena:
plt.plot(varray)
fig.canvas.draw()
fig.clf()
else:
print "not a plot",fcnt
s.close()
if __name__ == "__main__":
argv = sys.argv[1:]
plot_ena = 'plot' in argv
slow_ena = 'slow' in argv
if plot_ena: from matplotlib import pyplot as plt
main(argv)
| " encode an integer as three bytes "
adx = struct.pack('!i',ad)
return adx[1:4] |
samplecontroller_client.go | /*
Copyright The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by client-gen. DO NOT EDIT.
package v1alpha1
import (
"net/http"
v1alpha1 "sample-controller/pkg/apis/samplecontroller/v1alpha1"
"sample-controller/pkg/generated/clientset/versioned/scheme"
rest "k8s.io/client-go/rest"
)
type SamplecontrollerV1alpha1Interface interface {
RESTClient() rest.Interface
StarsGetter
}
// SamplecontrollerV1alpha1Client is used to interact with features provided by the samplecontroller.k8s.io group.
type SamplecontrollerV1alpha1Client struct {
restClient rest.Interface
}
func (c *SamplecontrollerV1alpha1Client) Stars(namespace string) StarInterface {
return newStars(c, namespace)
}
// NewForConfig creates a new SamplecontrollerV1alpha1Client for the given config.
// NewForConfig is equivalent to NewForConfigAndClient(c, httpClient),
// where httpClient was generated with rest.HTTPClientFor(c).
func NewForConfig(c *rest.Config) (*SamplecontrollerV1alpha1Client, error) {
config := *c
if err := setConfigDefaults(&config); err != nil {
return nil, err
}
httpClient, err := rest.HTTPClientFor(&config)
if err != nil {
return nil, err
}
return NewForConfigAndClient(&config, httpClient)
}
// NewForConfigAndClient creates a new SamplecontrollerV1alpha1Client for the given config and http client.
// Note the http client provided takes precedence over the configured transport values.
func NewForConfigAndClient(c *rest.Config, h *http.Client) (*SamplecontrollerV1alpha1Client, error) {
config := *c
if err := setConfigDefaults(&config); err != nil |
client, err := rest.RESTClientForConfigAndClient(&config, h)
if err != nil {
return nil, err
}
return &SamplecontrollerV1alpha1Client{client}, nil
}
// NewForConfigOrDie creates a new SamplecontrollerV1alpha1Client for the given config and
// panics if there is an error in the config.
func NewForConfigOrDie(c *rest.Config) *SamplecontrollerV1alpha1Client {
client, err := NewForConfig(c)
if err != nil {
panic(err)
}
return client
}
// New creates a new SamplecontrollerV1alpha1Client for the given RESTClient.
func New(c rest.Interface) *SamplecontrollerV1alpha1Client {
return &SamplecontrollerV1alpha1Client{c}
}
func setConfigDefaults(config *rest.Config) error {
gv := v1alpha1.SchemeGroupVersion
config.GroupVersion = &gv
config.APIPath = "/apis"
config.NegotiatedSerializer = scheme.Codecs.WithoutConversion()
if config.UserAgent == "" {
config.UserAgent = rest.DefaultKubernetesUserAgent()
}
return nil
}
// RESTClient returns a RESTClient that is used to communicate
// with API server by this client implementation.
func (c *SamplecontrollerV1alpha1Client) RESTClient() rest.Interface {
if c == nil {
return nil
}
return c.restClient
}
| {
return nil, err
} |
crd.go | /*
Copyright 2018 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package source
import (
"fmt"
"os"
"strings"
"time"
log "github.com/sirupsen/logrus"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/runtime/schema"
"k8s.io/apimachinery/pkg/runtime/serializer"
"k8s.io/client-go/kubernetes"
"k8s.io/client-go/rest"
"k8s.io/client-go/tools/clientcmd"
"sigs.k8s.io/external-dns/endpoint"
)
// crdSource is an implementation of Source that provides endpoints by listing
// specified CRD and fetching Endpoints embedded in Spec.
type crdSource struct {
crdClient rest.Interface
namespace string
crdResource string
codec runtime.ParameterCodec
}
func addKnownTypes(scheme *runtime.Scheme, groupVersion schema.GroupVersion) error {
scheme.AddKnownTypes(groupVersion,
&endpoint.DNSEndpoint{},
&endpoint.DNSEndpointList{},
)
metav1.AddToGroupVersion(scheme, groupVersion)
return nil
}
// NewCRDClientForAPIVersionKind return rest client for the given apiVersion and kind of the CRD
func NewCRDClientForAPIVersionKind(client kubernetes.Interface, kubeConfig, kubeMaster, apiVersion, kind string) (*rest.RESTClient, *runtime.Scheme, error) {
if kubeConfig == "" {
if _, err := os.Stat(clientcmd.RecommendedHomeFile); err == nil {
kubeConfig = clientcmd.RecommendedHomeFile
}
}
config, err := clientcmd.BuildConfigFromFlags(kubeMaster, kubeConfig)
if err != nil {
return nil, nil, err
}
groupVersion, err := schema.ParseGroupVersion(apiVersion)
if err != nil {
return nil, nil, err
}
apiResourceList, err := client.Discovery().ServerResourcesForGroupVersion(groupVersion.String())
if err != nil {
return nil, nil, fmt.Errorf("error listing resources in GroupVersion %q: %s", groupVersion.String(), err)
}
var crdAPIResource *metav1.APIResource
for _, apiResource := range apiResourceList.APIResources {
if apiResource.Kind == kind {
crdAPIResource = &apiResource
break
}
}
if crdAPIResource == nil {
return nil, nil, fmt.Errorf("unable to find Resource Kind %q in GroupVersion %q", kind, apiVersion)
}
scheme := runtime.NewScheme()
addKnownTypes(scheme, groupVersion)
config.ContentConfig.GroupVersion = &groupVersion
config.APIPath = "/apis"
config.NegotiatedSerializer = serializer.DirectCodecFactory{CodecFactory: serializer.NewCodecFactory(scheme)}
crdClient, err := rest.UnversionedRESTClientFor(config)
if err != nil {
return nil, nil, err
}
return crdClient, scheme, nil
}
// NewCRDSource creates a new crdSource with the given config.
func NewCRDSource(crdClient rest.Interface, namespace, kind string, scheme *runtime.Scheme) (Source, error) {
return &crdSource{
crdResource: strings.ToLower(kind) + "s",
namespace: namespace,
crdClient: crdClient,
codec: runtime.NewParameterCodec(scheme),
}, nil
}
func (cs *crdSource) AddEventHandler(handler func() error, stopChan <-chan struct{}, minInterval time.Duration) {
}
// Endpoints returns endpoint objects.
func (cs *crdSource) Endpoints() ([]*endpoint.Endpoint, error) {
endpoints := []*endpoint.Endpoint{}
result, err := cs.List(&metav1.ListOptions{})
if err != nil {
return nil, err
}
for _, dnsEndpoint := range result.Items {
// Make sure that all endpoints have targets for A or CNAME type
crdEndpoints := []*endpoint.Endpoint{}
for _, ep := range dnsEndpoint.Spec.Endpoints {
if (ep.RecordType == "CNAME" || ep.RecordType == "A" || ep.RecordType == "AAAA") && len(ep.Targets) < 1 {
log.Warnf("Endpoint %s with DNSName %s has an empty list of targets", dnsEndpoint.ObjectMeta.Name, ep.DNSName)
continue
}
illegalTarget := false
for _, target := range ep.Targets {
if strings.HasSuffix(target, ".") {
illegalTarget = true
break
}
}
if illegalTarget {
log.Warnf("Endpoint %s with DNSName %s has an illegal target. The subdomain must consist of lower case alphanumeric characters, '-' or '.', and must start and end with an alphanumeric character (e.g. 'example.com')", dnsEndpoint.ObjectMeta.Name, ep.DNSName)
continue
}
if ep.Labels == nil {
ep.Labels = endpoint.NewLabels()
}
crdEndpoints = append(crdEndpoints, ep)
}
cs.setResourceLabel(&dnsEndpoint, crdEndpoints)
endpoints = append(endpoints, crdEndpoints...)
if dnsEndpoint.Status.ObservedGeneration == dnsEndpoint.Generation {
continue
}
dnsEndpoint.Status.ObservedGeneration = dnsEndpoint.Generation
// Update the ObservedGeneration
_, err = cs.UpdateStatus(&dnsEndpoint)
if err != nil {
log.Warnf("Could not update ObservedGeneration of the CRD: %v", err)
}
}
return endpoints, nil
}
func (cs *crdSource) setResourceLabel(crd *endpoint.DNSEndpoint, endpoints []*endpoint.Endpoint) {
for _, ep := range endpoints {
ep.Labels[endpoint.ResourceLabelKey] = fmt.Sprintf("crd/%s/%s", crd.ObjectMeta.Namespace, crd.ObjectMeta.Name)
}
}
func (cs *crdSource) List(opts *metav1.ListOptions) (result *endpoint.DNSEndpointList, err error) {
result = &endpoint.DNSEndpointList{}
err = cs.crdClient.Get().
Namespace(cs.namespace).
Resource(cs.crdResource).
VersionedParams(opts, cs.codec).
Do().
Into(result)
return
}
func (cs *crdSource) UpdateStatus(dnsEndpoint *endpoint.DNSEndpoint) (result *endpoint.DNSEndpoint, err error) {
result = &endpoint.DNSEndpoint{}
err = cs.crdClient.Put().
Namespace(dnsEndpoint.Namespace).
Resource(cs.crdResource).
Name(dnsEndpoint.Name).
SubResource("status").
Body(dnsEndpoint).
Do().
Into(result)
return | } |
|
test_total_tree_length_integration.py | from mock import patch, call
from pathlib import Path
from textwrap import dedent
from phykit.phykit import Phykit
here = Path(__file__)
@pytest.mark.integration
class TestTotalTreeLength(object):
@patch("builtins.print")
def test_total_tree_length0(self, mocked_print):
expected_result = 277.2772
testargs = [
"phykit",
"total_tree_length",
f"{here.parent.parent.parent}/sample_files/tree_simple.tre",
]
with patch.object(sys, "argv", testargs):
Phykit()
assert mocked_print.mock_calls == [call(expected_result)]
@patch("builtins.print")
def test_total_tree_length1(self, mocked_print):
expected_result = 0.0675
testargs = [
"phykit",
"tree_len",
f"{here.parent.parent.parent}/sample_files/small_Aspergillus_tree.tre",
]
with patch.object(sys, "argv", testargs):
Phykit()
assert mocked_print.mock_calls == [call(expected_result)]
@patch("builtins.print")
def test_total_tree_length_alias(self, mocked_print):
expected_result = 277.2772
testargs = [
"phykit",
"tree_len",
f"{here.parent.parent.parent}/sample_files/tree_simple.tre",
]
with patch.object(sys, "argv", testargs):
Phykit()
assert mocked_print.mock_calls == [call(expected_result)]
@patch("builtins.print")
def test_total_tree_length_incorrect_file_path(self, mocked_print):
testargs = [
"phykit",
"total_tree_length",
f"{here.parent.parent.parent}/sample_files/tree_simple.tr",
]
with pytest.raises(SystemExit) as pytest_wrapped_e:
Phykit()
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 2 | import pytest
import sys
from math import isclose |
|
runner.rs | use super::filter::*;
use super::handler::tid_2_contig;
use super::handler::Nascent;
use rust_htslib::bam;
use rust_htslib::prelude::*;
use std::str;
/// Iterates through bam records and annotates each read with the T>>C conversion count.
///
/// This this is the core of the tcq executable. It will write a bam to the output
/// file specified by `ob`.
///
/// 1. ib: path for input bam with unannotated reads
/// 1. ob: output bam with annotated reads
/// 1. tag: preferred aux tag in which to store conversion count for each read
/// 1. p: threads to use for reading and writing
/// 1. blk: path to optional indexed vcf/bcf blacklist of individual positions to exclude
/// 1. mq: minimum read mapq
///
/// # Example (compiled, not run)
/// ```rust,no_run
/// use tcq::runner;
/// runner::run_through_bam("in.bam", "out.bam", "XZ", 4, Some("filt.bcf"), 30);
/// ```
pub fn run_through_bam(
ib: &str,
ob: &str,
tag: &str,
p: usize,
blk: Option<&str>,
mq: u8,
) {
info!("beginning run...");
// Creates either a working filter or a none.
let filt: Option<ConvFilter> = match blk {
Some(b) => {
info!("creating filter...");
Some(ConvFilter::from_vcf_path(b, p).unwrap())
}
None => None,
};
info!("opening bams...");
// Make bam reader
let mut bam = bam::Reader::from_path(ib).unwrap();
// Make bam header for writing output and seqname/target id conversion.
let hdr = bam::header::Header::from_template(bam.header());
let hdrv = bam.header().to_owned();
// Initialize bam writer.
let mut obam = bam::Writer::from_path(ob, &hdr).unwrap();
// Create lookup hash table for converting TID to human readable chrom name.
let tid_lookup = tid_2_contig(&hdrv);
info!("setting thread usage...");
// Set thread usage for reading/writing bam.
if p >= 2 {
let p2 = if (p % 2) == 0 { p / 2 } else { (p - 1) / 2 };
bam.set_threads(p2).unwrap();
obam.set_threads(p - p2).unwrap();
} else {
bam.set_threads(1).unwrap();
obam.set_threads(1).unwrap();
}
// Begin iterator chain that processes each read individually. | // 1-unwrap read
// 2-make sure it is mapped
// 3-confirm mapq > cutoff
// 4-annotate with conversion count (see tcq::handler)
// 5-write read to bam
info!("annotating reads with t>>c conversions...");
let mut r: bam::Record = bam::Record::new();
while let Ok(_r) = bam.read(&mut r) {
if r.is_unmapped() || r.mapq() < mq {
continue;
}
r.push_tc_conv_aux(tag.as_bytes(), &filt, &tid_lookup).unwrap();
obam.write(&r).unwrap()
}
} | |
codec.go | package types
import (
"github.com/cosmos/cosmos-sdk/codec"
cdctypes "github.com/cosmos/cosmos-sdk/codec/types"
sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/cosmos/cosmos-sdk/types/msgservice"
)
func RegisterCodec(cdc *codec.LegacyAmino) {
cdc.RegisterConcrete(&MsgCreateDso{}, "pm/CreateDso", nil)
cdc.RegisterConcrete(&MsgUpdateDso{}, "pm/UpdateDso", nil)
cdc.RegisterConcrete(&MsgDeleteDso{}, "pm/DeleteDso", nil)
cdc.RegisterConcrete(&MsgCreateAggregator{}, "pm/CreateAggregator", nil)
cdc.RegisterConcrete(&MsgUpdateAggregator{}, "pm/UpdateAggregator", nil)
cdc.RegisterConcrete(&MsgDeleteAggregator{}, "pm/DeleteAggregator", nil)
cdc.RegisterConcrete(&MsgCreatePlayer{}, "pm/CreatePlayer", nil)
cdc.RegisterConcrete(&MsgUpdatePlayer{}, "pm/UpdatePlayer", nil)
cdc.RegisterConcrete(&MsgDeletePlayer{}, "pm/DeletePlayer", nil)
cdc.RegisterConcrete(&MsgCreateLem{}, "pm/CreateLem", nil)
cdc.RegisterConcrete(&MsgUpdateLem{}, "pm/UpdateLem", nil)
cdc.RegisterConcrete(&MsgDeleteLem{}, "pm/DeleteLem", nil)
cdc.RegisterConcrete(&MsgCreateLemMeasure{}, "pm/CreateLemMeasure", nil)
cdc.RegisterConcrete(&MsgUpdateLemMeasure{}, "pm/UpdateLemMeasure", nil)
cdc.RegisterConcrete(&MsgDeleteLemMeasure{}, "pm/DeleteLemMeasure", nil)
cdc.RegisterConcrete(&MsgCreateSla{}, "pm/CreateSla", nil)
cdc.RegisterConcrete(&MsgUpdateSla{}, "pm/UpdateSla", nil) | cdc.RegisterConcrete(&MsgUpdateKpi{}, "pm/UpdateKpi", nil)
cdc.RegisterConcrete(&MsgDeleteKpi{}, "pm/DeleteKpi", nil)
cdc.RegisterConcrete(&MsgCreateKpiMeasure{}, "pm/CreateKpiMeasure", nil)
cdc.RegisterConcrete(&MsgUpdateKpiMeasure{}, "pm/UpdateKpiMeasure", nil)
cdc.RegisterConcrete(&MsgDeleteKpiMeasure{}, "pm/DeleteKpiMeasure", nil)
cdc.RegisterConcrete(&MsgCreateLemDataset{}, "pm/CreateLemDataset", nil)
cdc.RegisterConcrete(&MsgUpdateLemDataset{}, "pm/UpdateLemDataset", nil)
cdc.RegisterConcrete(&MsgDeleteLemDataset{}, "pm/DeleteLemDataset", nil)
cdc.RegisterConcrete(&MsgCreateDefaultLemPars{}, "pm/CreateDefaultLemPars", nil)
cdc.RegisterConcrete(&MsgUpdateDefaultLemPars{}, "pm/UpdateDefaultLemPars", nil)
cdc.RegisterConcrete(&MsgDeleteDefaultLemPars{}, "pm/DeleteDefaultLemPars", nil)
cdc.RegisterConcrete(&MsgCreateMarketOperator{}, "pm/CreateMarketOperator", nil)
cdc.RegisterConcrete(&MsgUpdateMarketOperator{}, "pm/UpdateMarketOperator", nil)
cdc.RegisterConcrete(&MsgDeleteMarketOperator{}, "pm/DeleteMarketOperator", nil)
cdc.RegisterConcrete(&MsgCreateGridState{}, "pm/CreateGridState", nil)
cdc.RegisterConcrete(&MsgUpdateGridState{}, "pm/UpdateGridState", nil)
cdc.RegisterConcrete(&MsgDeleteGridState{}, "pm/DeleteGridState", nil)
cdc.RegisterConcrete(&MsgCreateKpiFeatures{}, "pm/CreateKpiFeatures", nil)
cdc.RegisterConcrete(&MsgUpdateKpiFeatures{}, "pm/UpdateKpiFeatures", nil)
cdc.RegisterConcrete(&MsgDeleteKpiFeatures{}, "pm/DeleteKpiFeatures", nil)
cdc.RegisterConcrete(&MsgCreateForecast{}, "pm/CreateForecast", nil)
cdc.RegisterConcrete(&MsgUpdateForecast{}, "pm/UpdateForecast", nil)
cdc.RegisterConcrete(&MsgDeleteForecast{}, "pm/DeleteForecast", nil)
// this line is used by starport scaffolding # 2
}
func RegisterInterfaces(registry cdctypes.InterfaceRegistry) {
registry.RegisterImplementations((*sdk.Msg)(nil),
&MsgCreateDso{},
&MsgUpdateDso{},
&MsgDeleteDso{},
)
registry.RegisterImplementations((*sdk.Msg)(nil),
&MsgCreateAggregator{},
&MsgUpdateAggregator{},
&MsgDeleteAggregator{},
)
registry.RegisterImplementations((*sdk.Msg)(nil),
&MsgCreatePlayer{},
&MsgUpdatePlayer{},
&MsgDeletePlayer{},
)
registry.RegisterImplementations((*sdk.Msg)(nil),
&MsgCreateLem{},
&MsgUpdateLem{},
&MsgDeleteLem{},
)
registry.RegisterImplementations((*sdk.Msg)(nil),
&MsgCreateLemMeasure{},
&MsgUpdateLemMeasure{},
&MsgDeleteLemMeasure{},
)
registry.RegisterImplementations((*sdk.Msg)(nil),
&MsgCreateSla{},
&MsgUpdateSla{},
&MsgDeleteSla{},
)
registry.RegisterImplementations((*sdk.Msg)(nil),
&MsgCreateKpi{},
&MsgUpdateKpi{},
&MsgDeleteKpi{},
)
registry.RegisterImplementations((*sdk.Msg)(nil),
&MsgCreateKpiMeasure{},
&MsgUpdateKpiMeasure{},
&MsgDeleteKpiMeasure{},
)
registry.RegisterImplementations((*sdk.Msg)(nil),
&MsgCreateLemDataset{},
&MsgUpdateLemDataset{},
&MsgDeleteLemDataset{},
)
registry.RegisterImplementations((*sdk.Msg)(nil),
&MsgCreateDefaultLemPars{},
&MsgUpdateDefaultLemPars{},
&MsgDeleteDefaultLemPars{},
)
registry.RegisterImplementations((*sdk.Msg)(nil),
&MsgCreateMarketOperator{},
&MsgUpdateMarketOperator{},
&MsgDeleteMarketOperator{},
)
registry.RegisterImplementations((*sdk.Msg)(nil),
&MsgCreateGridState{},
&MsgUpdateGridState{},
&MsgDeleteGridState{},
)
registry.RegisterImplementations((*sdk.Msg)(nil),
&MsgCreateKpiFeatures{},
&MsgUpdateKpiFeatures{},
&MsgDeleteKpiFeatures{},
)
registry.RegisterImplementations((*sdk.Msg)(nil),
&MsgCreateForecast{},
&MsgUpdateForecast{},
&MsgDeleteForecast{},
)
// this line is used by starport scaffolding # 3
msgservice.RegisterMsgServiceDesc(registry, &_Msg_serviceDesc)
}
var (
amino = codec.NewLegacyAmino()
ModuleCdc = codec.NewProtoCodec(cdctypes.NewInterfaceRegistry())
) | cdc.RegisterConcrete(&MsgDeleteSla{}, "pm/DeleteSla", nil)
cdc.RegisterConcrete(&MsgCreateKpi{}, "pm/CreateKpi", nil) |
t.js | var tests = [
{
name: 'minimal',
jsonform: {
schema: {
mainappcolor: {
type: 'string',
title: 'One color',
format: 'color',
'default': 'blue'
}
}
}
},
{
name: 'form',
jsonform: {
schema: {
mainappcolor: {
type: 'string',
title: 'One color'
}
}, | }
]
}
}
]; | form: [
{
key: 'mainappcolor',
type: 'color' |
shape.rs | pub fn shape_4d(grid: &Vec<Vec<Vec<Vec<u8>>>>) -> Vec<usize> {
return vec![grid.len(), grid[0].len(), grid[0][0].len(), grid[0][0][0].len()];
}
pub fn shape_3d(grid: &Vec<Vec<Vec<u8>>>) -> Vec<usize> {
return vec![grid.len(), grid[0].len(), grid[0][0].len()]; | } | }
pub fn shape_2d(grid: &Vec<Vec<u8>>) -> Vec<usize> {
return vec![grid.len(), grid[0].len()]; |
CloudRain.tsx | /* GENERATED FILE */
import * as React from "react";
import Svg, { Rect, Path, Line } from "react-native-svg";
import { IconProps } from '../lib'
function CloudRain(props: IconProps) {
return <Svg viewBox="0 0 256 256" width={props.size} height={props.size}
fill={props.color} {...props}><Rect width={256} height={256} fill="none" /><Path d="M88,92a68,68,0,1,1,68,68H76a44,44,0,0,1,0-88,42.5,42.5,0,0,1,14.3,2.4" opacity={0.2} /><Line x1={128} y1={240} x2={154.3} y2={200} fill="none" stroke={props.color} strokeLinecap="round" strokeLinejoin="round" strokeWidth={16} /><Line x1={98.7} y1={208} x2={130.3} y2={160} fill="none" stroke={props.color} strokeLinecap="round" strokeLinejoin="round" strokeWidth={16} /><Path d="M88,92a68,68,0,1,1,68,68H76a44,44,0,0,1,0-88,42.5,42.5,0,0,1,14.3,2.4" fill="none" stroke={props.color} strokeLinecap="round" strokeLinejoin="round" strokeWidth={16} /></Svg>; | }
export default CloudRain; | |
algo_6_3.py | # coding=utf-8
from common.BNFParser import *
from common.Grammar import Grammar
# 求文法G的可空变量集
# 该算法只跟G的P有关系
def algo_6_3(P):
"""
测试数据来源于第6章 |
>>> from common.production import Production
>>> p1 = Production(['S'], [['A', 'B', 'D', 'C']])
>>> p2 = Production(['A'], [['B', 'D'], ['\\"a\\"', '\\"a\\"'], ['\\"ε\\"']])
>>> p3 = Production(['B'], [['\\"a\\"', 'B'], ['\\"a\\"']])
>>> p4 = Production(['C'], [['D','C'], ['\\"c\\"'], ['\\"ε\\"']])
>>> p5 = Production(['D'], [['\\"ε\\"']])
>>> p = [p1, p2, p3, p4, p5]
>>> u = algo_6_3(p)
>>> set(u) == set(['A', 'C', 'D'])
True
"""
simple_plist = []
for p in P:
simple_plist.extend(Production.toSimpleProduction(p))
old_u = set()
new_u = set()
for p in simple_plist:
if Production.isDirectEmpty(p):
new_u.add(p.left[0])
while new_u != old_u:
old_u = new_u
for p in simple_plist:
if set(p.right[0]) <= old_u:
new_u.add(p.left[0])
return new_u
| 习题12(2)
|
namespace.py | # This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
class FactNamespace:
def __init__(self, namespace_name):
self.namespace_name = namespace_name
def transform(self, name):
'''Take a text name, and transforms it as needed (add a namespace prefix, etc)'''
return name
def _underscore(self, name):
return name.replace('-', '_')
class PrefixFactNamespace(FactNamespace):
def __init__(self, namespace_name, prefix=None):
super(PrefixFactNamespace, self).__init__(namespace_name)
self.prefix = prefix
def | (self, name):
new_name = self._underscore(name)
return '%s%s' % (self.prefix, new_name)
| transform |
querys-principal.service.ts | import {
AngularFirestoreCollection,
AngularFirestore
} from 'angularfire2/firestore';
import {
Injectable
} from '@angular/core';
import {
Observable
} from 'rxjs/Observable';
import {
Http
} from '@angular/http';
import {
URLCORREO
} from '../../../config';
import { promise } from 'protractor';
@Injectable()
export class | {
// INICIALIZACION DE CONSULTAS PARA LABORATORIOS
private labsCollection: AngularFirestoreCollection<any>;
labs: Observable<any[]>;
// INICIALIZACION DE CONSULTAS PARA SERVICIOS
private servsCollection: AngularFirestoreCollection<any>;
servs: Observable<any[]>;
// INICIALIZACION DE CONSULTAS PARA SERVICIOS
private pruebasCollection: AngularFirestoreCollection<any>;
pruebas: Observable<any[]>;
datosLabsEstructurados = [];
datosServEstructurados = [];
datosPrubEstructurados = [];
constructor(private afs: AngularFirestore, private http: Http) {
}
// METODO QUE TRAE LA COLECCION DE TODOS LOS LABORATORIOS
getLaboratorios() {
const colle = this.afs.collection('cache').doc('cfFacil');
return colle.ref.get();
}
// METODO QUE TRAE LA COLECCION DE TODOS LOS SERVICIOS
getServicios() {
const colle = this.afs.collection('cache').doc('cfSrv');
return colle.ref.get();
}
// METODO QUE TRAE LA EL DOCUMENTO CACHE DE TODOS LAS PRACTICAS
getPractices() {
const colle = this.afs.collection('cache').doc('practice');
return colle.ref.get();
}
// tslint:disable-next-line:member-ordering
private racesCollection: AngularFirestoreCollection<any>;
// METODO QUE TRAE LA COLECCION DE TODOS LAS PRUEBAS
getPruebas() {
const coll = this.afs.collection('practice');
const ref = coll.ref.where('active', '==', true);
return ref.get();
}
// estructurar datos lab 2.0
async estructurarDataCf(data: any) {
console.log('entro');
this.datosLabsEstructurados = [];
return new Promise((resolve, reject) => {
data.forEach(doc => {
const elemento = doc.data();
if (elemento.facilityAdmin === '') {
console.log(doc.id);
}
this.buscarDirector(elemento.facilityAdmin).then(adminlab => {
const duenoLab = adminlab.data();
console.log(adminlab.exists);
this.datosLabsEstructurados.push(elemento);
// console.log(this.datosLabsEstructurados, data.size);
if (this.datosLabsEstructurados.length === data.size) {
resolve({
data: this.datosLabsEstructurados
});
}
});
});
});
}// METODO QUE ESTRUCTURA LA DATA PARA LA VISTA BUSQUEDA DE LABORATORIOS
estructurarDataLab(data: any) {
this.datosLabsEstructurados = [];
var laboratorios = data.data()
const promise = new Promise((resolve, reject) => {
var cont = 0
var datasize = Object.keys(data)
for (const key in laboratorios) {
if (laboratorios.hasOwnProperty(key)) {
const laboratorio = laboratorios[key];
if (laboratorio.active) {
this.datosLabsEstructurados.push(laboratorio);
}
cont++
if (cont === datasize.length) {
resolve({
data: this.datosLabsEstructurados
});
}
}
}
});
return promise;
}
// METODO QUE ESTRUCTURA LA DATA PARA LA VISTA BUSQUEDA DE LABORATORIOS
estructurarDataLabAdmin(data: any) {
this.datosLabsEstructurados = []
var laboratorios = data.data()
const promise = new Promise((resolve, reject) => {
var cont = 0
var datasize = Object.keys(data)
for (const key in laboratorios) {
if (laboratorios.hasOwnProperty(key)) {
const laboratorio = laboratorios[key]
if (laboratorio.active) {
laboratorio.active = 'Activo'
} else {
laboratorio.active = 'Inactivo'
}
this.datosLabsEstructurados.push(laboratorio)
cont++
if (cont === datasize.length) {
resolve({
data: this.datosLabsEstructurados
})
}
}
}
});
return promise
}
// METODO QUE ESTRUCTURA LA DATA PARA LA VISTA BUSQUEDA DE LABORATORIOS
estructurarDataServ(data: any) {
this.datosServEstructurados = [];
var servicios = data.data()
const promise = new Promise((resolve, reject) => {
var cont = 0
var datasize = Object.keys(servicios)
for (const key in servicios) {
if (servicios.hasOwnProperty(key)) {
const laboratorio = servicios[key];
if (laboratorio.active) {
this.datosServEstructurados.push(laboratorio);
}
cont++
if (cont === datasize.length) {
resolve({
data: this.datosServEstructurados
});
}
}
}
});
return promise;
}
// METODO QUE ESTRUCTURA LA DATA PARA LA VISTA BUSQUEDA DE SERVICIOS
getDataServ(data: any) {
return this.buscarServicio(data.uid).then(serv => {
const elemento = serv.data();
if (elemento.active) {
if (elemento.cfFacil) {
return this.buscarLaboratorio(elemento.cfFacil).then(lab => {
const labencontrado = lab.data();
if (labencontrado) {
return this.buscarDirector(labencontrado.facilityAdmin).then(dueno => {
const duenoLab = dueno.data();
if (duenoLab && labencontrado.mainSpace) {
return this.buscarEspacio(labencontrado.mainSpace).then(espacio => {
const espacioLab = espacio.data();
return this.buscarDireccion(labencontrado.headquarter, labencontrado.subHq, labencontrado.mainSpace).then(direspa => {
return this.estructuraTelefonos(elemento.cfFacil).then(snapTelefonos => {
const servicios = {
nombreserv: elemento.cfName,
nombrelab: labencontrado.cfName,
infoServ: {
descripcion: elemento.cfDesc,
precio: elemento.cfPrice,
variaciones: this.variations(serv.id),
equipos: this.estructurarEquipos(elemento.relatedEquipments),
condiciones: elemento.cfCondition,
parametros: elemento.parametros,
descuento: elemento.descuento,
uid: serv.id
},
infoLab: {
uid: elemento.cfFacil,
direspacio: direspa,
telefonos: snapTelefonos,
personal: this.buscarAnalistas(labencontrado.relatedPers),
iddirecto: labencontrado.facilityAdmin,
desc: labencontrado.cfDescr,
email: labencontrado.otros.email,
escuela: labencontrado.knowledgeArea,
inves: labencontrado.researchGroup,
director: duenoLab.cfFirstNames + ' ' + duenoLab.cfFamilyNames,
emaildir: duenoLab.email,
condiciones: labencontrado.cfConditions,
disponibilidad: labencontrado.cfAvailability
},
coord: {
lat: espacioLab.spaceData.geoRep.longitud,
lon: espacioLab.spaceData.geoRep.latitud
}
};
return servicios
})
});
});
}
});
}
});
}
}
})
}
// METODO QUE ESTRUCTURA LA DATA PARA LA VISTA BUSQUEDA DE LABORATORIOS
estructurarDataPruebas(data: any) {
var practicas = data.data()
const promise = new Promise((resolve, reject) => {
this.datosLabsEstructurados = [];
var cont = 0
var datasize = Object.keys(practicas)
for (const key in practicas) {
if (practicas.hasOwnProperty(key)) {
const laboratorio = practicas[key];
if (laboratorio.active) {
this.datosLabsEstructurados.push(laboratorio);
}
cont++
console.log(practicas, cont, datasize.length, this.datosLabsEstructurados)
if (cont === datasize.length) {
resolve({
data: this.datosLabsEstructurados
});
}
}
}
});
return promise;
}
// METODO QUE ESTRUCTURA LA DATA PARA LA VISTA BUSQUEDA DE PRUEBAS
getDataPractice(data: any) {
const promise = new Promise((resolve, reject) => {
this.afs.doc('practice/' + data.uid).ref.get().then(practiceSnap => {//programmingData
this.afs.collection('practice/' + data.uid + '/programmingData').ref.get().then(programmingDataSnap => {
const practica = practiceSnap.data()
programmingDataSnap.forEach(programing => {
const prog = programing.data();
this.buscarLaboratorio(practica.cfFacil).then(lab => {
const labencontrado = lab.data();
this.buscarDirector(labencontrado.facilityAdmin).then(dueno => {
const duenoLab = dueno.data();
if (duenoLab && labencontrado.mainSpace) {
this.buscarEspacio(labencontrado.mainSpace).then(espacio => {
const espacioLab = espacio.data();
const pruebas = {
nombreprub: practica.practiceName,
nombrelab: labencontrado.cfName,
infoPrub: {
programacion: {
id_pro: programing.id,
estudiantes: prog.noStudents,
horario: prog.schedule,
semestre: prog.semester
}
},
infoLab: {
dir: labencontrado.otros.direccion,
desc: labencontrado.cfDescr,
telefonos: this.estructuraTelefonos(practica.cfFacil),
email: labencontrado.otros.email,
escuela: labencontrado.knowledgeArea,
inves: labencontrado.researchGroup,
director: duenoLab.cfFirstNames + ' ' + duenoLab.cfFamilyNames,
condiciones: labencontrado.cfConditions,
disponibilidad: labencontrado.cfAvailability
},
coord: {
lat: espacioLab.spaceData.geoRep.longitud,
lon: espacioLab.spaceData.geoRep.latitud
}
};
resolve(pruebas);
});
}
});
});
})
})
}, err => console.log(err));
});
return promise;
}
// METODO QUE ESTRUCTURA LA DATA DE LAS PRACTICAS EN LA VISTA BUSQUEDA DE LABORATORIOS
// RECIBE EL NODO DE LABORATORIO QUE CONTIENE LAS PRACTICAS ASOCIADOS
estructurarEquipos(item) {
const arr = [];
for (const clave in item) {
// Controlando que json realmente tenga esa propiedad
if (item.hasOwnProperty(clave)) {
if (item[clave]) {
this.afs.doc('cfEquip/' + clave).ref.get().then(data => {
const equip = data.data();
// funciona con una programacion, cuando hayan mas toca crear otro metodo
const equipo = {
nombre: equip.cfName,
descripcion: equip.cfDescr,
};
arr.push(equipo);
});
}
}
}
return arr;
}
getDataLab(lab: any) {
console.log(lab)
return this.buscarLaboratorio(lab.uid).then(labsnapshot => {
const elemento = labsnapshot.data()
console.log(elemento)
if (lab.facilityAdmin !== '') {
return this.buscarDirector(elemento.facilityAdmin).then(dueno => {
const duenoLab = dueno.data();
var promesas = []
var disponibilidad = []
if (elemento.cfAvailability) {
disponibilidad = elemento.cfAvailability
}
console.log(443, elemento.relatedServices, elemento.relatedPractices, elemento.relatedPractices)
promesas.push(this.estructurarServicios(elemento.relatedServices))
promesas.push(this.estructurarPracticas(elemento.relatedPractices))
promesas.push(this.estructuraTelefonos(elemento.relatedPractices))
let laboratorio = {
uid: labsnapshot.id,
nombre: elemento.cfName,
escuela: elemento.knowledgeArea !== '' ? elemento.knowledgeArea : 'ninguno',
inves: elemento.researchGroup !== '' ? elemento.researchGroup : 'ninguno',
iddirecto: elemento.facilityAdmin,
desc: elemento.cfDescr,
direspacio: {},
director: '',
emaildir: '',
coord: {
lat: 0,
lon: 0
},
info: {
email: elemento.otros.email
},
personal: this.buscarAnalistas(elemento.relatedPers),
condiciones: elemento.cfConditions,
disponibilidad: disponibilidad
};
if (duenoLab && elemento.otros) {
laboratorio.director = duenoLab.cfFirstNames + ' ' + duenoLab.cfFamilyNames;
laboratorio.emaildir = duenoLab.email;
}
if (elemento.mainSpace !== '') {
var buscarespacios = this.buscarEspacio(elemento.mainSpace).then(espacio => {
const espacioLab = espacio.data();
return this.buscarDireccion(elemento.headquarter, elemento.subHq, elemento.mainSpace).then(direspa => {
laboratorio.direspacio = direspa;
laboratorio.coord.lat = espacioLab.spaceData.geoRep ? espacioLab.spaceData.geoRep.longitud : 0;
laboratorio.coord.lon = espacioLab.spaceData.geoRep ? espacioLab.spaceData.geoRep.latitud : 0;
});
})
promesas.push(buscarespacios)
}
return Promise.all(promesas).then(values => {
console.log(439, values); // [3, 1337, "foo"]
laboratorio['servicios'] = values[0]
laboratorio['practicas'] = values[1]
laboratorio['telefonos'] = values[2]
return laboratorio
}).catch(err => {
console.log(504, err)
});
});
}
})
}
// METODO QUE TRAE UN LABORATORIO ESPECIFICO DEPENDIENDO EL ID-LABORATORIO
buscarLaboratorio(idLab) {
return this.afs.doc('cfFacil/' + idLab).ref.get();
}
// METODO QUE TRAE UN SERVICIO ESPECIFICO DEPENDIENDO EL ID-SERVICIO
buscarServicio(idService) {
return this.afs.doc('cfSrv/' + idService).ref.get();
}
// METODO QUE TRAE UN DIRECTOR ESPECIFICO DEPENDIENDO EL ID-DIRECTOR
buscarDirector(iddirector) {
return this.afs.doc('cfPers/' + iddirector).ref.get();
}
// METODO QUE TRAE UN ESPACIO ESPECIFICO DEPENDIENDO EL ID-ESPACIO
buscarEspacio(idespacio) {
return this.afs.doc('space/' + idespacio).ref.get();
}
buscarDireccion(sede, subsede, espacio) {
let direccion = '';
let espa = '';
const promise = new Promise((resolve, reject) => {
this.afs.doc('headquarter/' + sede).ref.get().then(sedereturn => {
this.afs.doc('cfPAddr/' + subsede).ref.get().then(subreturn => {
this.afs.doc('space/' + espacio).ref.get().then(espareturn => {
direccion = sedereturn.data().cfName + ' ' + subreturn.data().cfAddrline2 +
' ' + subreturn.data().cfAddrline1;
espa = espareturn.data().spaceData.building;
resolve({
dir: direccion,
espa: espa
});
});
});
});
});
return promise;
}
// METODO QUE ESTRUCTURA LA DATA DE LOS SERVICIOS EN LA VISTA BUSQUEDA DE LABORATORIOS
// RECIBE EL NODO DE LABORATORIO QUE CONTIENE LOS SERVICIOS ASOCIADOS
estructurarServicios(item) {
var arr = [];
var promesas = []
if ((typeof item) == 'object') {
var keys = Object.keys(item)
if (keys.length) {
for (const clave in item) {
// Controlando que json realmente tenga esa propiedad
if (item.hasOwnProperty(clave)) {
if (item[clave]) {
var serv = this.afs.doc('cfSrv/' + clave).ref.get()
promesas.push(serv)
}
}
}
return Promise.all(promesas).then(responses => {
responses.forEach(data => {
var servicio = data.data();
if (servicio.cfName) {
var serv = {
nombre: servicio.cfName,
descripcion: servicio.cfDesc,
precio: servicio.cfPrice,
activo: servicio.active,
equipos: this.estructurarEquipos(servicio.relatedEquipments),
condiciones: servicio.cfCondition,
descuento: servicio.descuento,
parametros: servicio.parametros,
variaciones: this.variations(data.key),
uid: data.id
};
arr.push(serv);
}
})
return arr
})
} else {
console.log(arr)
return arr;
}
} else {
return arr
}
}
// METODO QUE ESTRUCTURA LA DATA DE LAS PRACTICAS EN LA VISTA BUSQUEDA DE LABORATORIOS
// RECIBE EL NODO DE LABORATORIO QUE CONTIENE LAS PRACTICAS ASOCIADOS
estructurarPracticas(item) {
const arr = [];
if ((typeof item) == 'object') {
var keys = Object.keys(item)
if (keys.length) {
return new Promise((resolve, reject) => {
var cont = 0
for (const clave in item) {
// Controlando que json realmente tenga esa propiedad
cont++
if (item.hasOwnProperty(clave)) {
this.afs.doc('practice/' + clave).ref.get().then(data => {
const practica = data.data();
this.afs.doc('practice/' + clave).collection('programmingData').ref.get().then(data2 => {
// funciona con una programacion, cuando hayan mas toca crear otro metodo
if (data2.docs[0].exists) {
const prog = data2.docs[0].data();
const pract = {
nombre: practica.practiceName,
id: data.id,
programacion: {
id_pro: data2.docs[0].id,
estudiantes: prog.noStudents,
horario: prog.schedule,
semestre: prog.semester
},
activo: practica.active
};
if (practica.active) {
arr.push(pract);
}
} else {
const pract = {
nombre: practica ? practica.practiceName : 'ninguno',
activo: practica ? practica.active : 'none'
};
if (practica.active) {
arr.push(pract);
}
}
if (cont == keys.length) {
resolve(arr)
}
}).catch(err => {
if (cont == keys.length) {
resolve(arr)
}
console.log(err)
});
});
}
}
});
} else {
console.log(arr)
return arr;
}
} else {
return arr
}
}
estructuraTelefonos(idlab) {
var tels = [];
return this.afs.doc('cfFacil/' + idlab).collection('cfEAddr').ref.get().then(data => {
console.log(data.empty)
if (data.empty) {
return tels;
} else {
data.forEach(element => {
console.log(element.data())
tels.push(element.data().cfEAddrValue);
});
return tels;
}
});
}
// METODO QUE ESTRUCTURA LAS VARIACIONES DE UN SERVICIO
variations(clave) {
const variaciones = [];
this.afs.doc('cfSrv/' + clave).collection('variations').ref.get().then(data => {
if (data) {
data.forEach(doc => {
const element = doc.data();
if (element.active) {
variaciones.push({
data: element,
id: doc.id
});
}
});
} else {
return variaciones;
}
});
return variaciones;
}
// METODO QUE AGREGA UNA NUEVA SOLICITUD DE SERVICIO
addSolicitudServicio(item) {
return this.afs.collection('cfSrvReserv').add(item);
}
addItem(item: any) {
this.labsCollection.add(item);
}
enviarEmails(nombreserv, emailSolicitante, emailEncargado, emailLaboratorio, analistas) {
const fecha = new Date();
const fechaes = fecha.getDate() + '/' + (fecha.getMonth() + 1) + '/' + fecha.getFullYear();
const url = URLCORREO;
const asunto = 'NUEVA SOLICITUD DE SERVICIO';
let destino = '';
if (analistas) {
for (let i = 0; i < analistas.length; i++) {
destino += analistas[i] + ',';
}
}
const mensaje = 'Se le notifica que se ha realizado una nueva solicitud del servicio ' +
nombreserv + '. Esta fue realizada en la fecha ' + fechaes +
' por el usuario con el correo: ' + emailSolicitante + '.';
destino += emailSolicitante + ',' + emailEncargado + ',' + emailLaboratorio;
console.log(destino);
this.http.post(url, {
para: destino,
asunto: asunto,
mensaje: mensaje
}).subscribe((res) => {
if (res.status === 200) {
console.log('notificaciones enviadas');
} else {
console.log('error notificaciones');
}
});
}
buscarAnalistas(personas) {
const arra = [];
for (const key in personas) {
if (personas.hasOwnProperty(key)) {
if (personas[key]) {
this.buscarDirector(key).then(doc => {
if (doc.data().user !== '') {
this.buscarUsuario(doc.data().user).then(user => {
if (user) {
for (const key in user.data().appRoles) {
if (user.data().appRoles.hasOwnProperty(key)) {
if (user.data().appRoles[key]) {
if (key === '6ITqecW7XrgTLaW6fpn6') {
arra.push(doc.data().email);
}
}
}
}
}
});
}
});
}
}
}
return arra;
}
enviarNotificaciones(notificaciones, nombreserv, emailSolicitante) {
console.log(notificaciones);
const fecha = new Date().toISOString().split('T')[0];
const mensaje = 'Se le notifica que se ha realizado una nueva solicitud del servicio ' +
nombreserv + '. Esta fue solicitada en la fecha ' + fecha +
' por el usuario con el correo ' + emailSolicitante + '.';
const obj = {
asunto: 'Solicitud de servicio',
mensaje: mensaje,
fecha: new Date().toISOString().split('T')[0],
estado: 'sinver'
};
for (let i = 0; i < notificaciones.length; i++) {
const element = notificaciones[i];
this.enviarNotificacion(element, obj).then(() => {
});
}
}
buscarUsuario(id) {
return this.afs.collection('user').doc(id).ref.get();
}
buscarUsuarioWithEmail(email) {
const col = this.afs.collection('user');
const refer = col.ref.where('email', '==', email);
return refer.get();
}
enviarNotificacion(iduser, object) {
return this.afs.doc('user/' + iduser).collection('notification').add(object);
}
//METODOS VIEJOS CON MUCHAS CONSULTAS
old_estructurarDataLab(data: any) {
this.datosLabsEstructurados = [];
const promise = new Promise((resolve, reject) => {
var cont = 0
data.forEach(doc => {
const elemento = doc.data();
console.log(elemento.facilityAdmin)
if (elemento.facilityAdmin !== '') {
this.buscarDirector(elemento.facilityAdmin).then(dueno => {
const duenoLab = dueno.data();
const laboratorio = {
uid: doc.id,
nombre: elemento.cfName,
escuela: elemento.knowledgeArea !== '' ? elemento.knowledgeArea : 'ninguno',
inves: elemento.researchGroup !== '' ? elemento.researchGroup : 'ninguno',
iddirecto: elemento.facilityAdmin,
desc: elemento.cfDescr,
direspacio: {},
director: '',
emaildir: '',
coord: {
lat: 0,
lon: 0
},
telefonos: this.estructuraTelefonos(doc.id),
info: {
email: elemento.otros.email
},
servicios: this.estructurarServicios(elemento.relatedServices),
practicas: this.estructurarPracticas(elemento.relatedPractices),
personal: this.buscarAnalistas(elemento.relatedPers),
condiciones: elemento.cfConditions,
disponibilidad: elemento.cfAvailability
};
if (duenoLab && elemento.otros) {
laboratorio.director = duenoLab.cfFirstNames + ' ' + duenoLab.cfFamilyNames;
laboratorio.emaildir = duenoLab.email;
}
if (elemento.mainSpace !== '') {
this.buscarEspacio(elemento.mainSpace).then(espacio => {
const espacioLab = espacio.data();
this.buscarDireccion(elemento.headquarter, elemento.subHq, elemento.mainSpace).then(direspa => {
laboratorio.direspacio = direspa;
laboratorio.coord.lat = espacioLab.spaceData.geoRep ? espacioLab.spaceData.geoRep.longitud : 0;
laboratorio.coord.lon = espacioLab.spaceData.geoRep ? espacioLab.spaceData.geoRep.latitud : 0;
});
});
}
cont++
this.datosLabsEstructurados.push(laboratorio);
console.log(this.datosLabsEstructurados, data.size, cont);
if (cont === data.size) {
resolve({
data: this.datosLabsEstructurados
});
}
});
}
});
});
return promise;
}
}
| QuerysPrincipalService |
keyrock_organizations_users.go | /*
MIT License
Copyright (c) 2020-2021 Kazuhito Suda
This file is part of NGSI Go
https://github.com/lets-fiware/ngsi-go
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
package ngsicmd
import (
"bytes"
"fmt"
"net/http"
"github.com/lets-fiware/ngsi-go/internal/ngsilib"
"github.com/urfave/cli/v2"
)
type keyrockOrganizationUsersItems struct {
UserID string `json:"user_id"`
OrganizationID string `json:"organization_id"`
Role string `json:"role"`
}
type keyrockOrganizationUsers struct {
OrganizationUsers []keyrockOrganizationUsersItems `json:"organization_users"`
}
func orgUsersList(c *cli.Context) error {
const funcName = "orgUsersList"
ngsi, err := initCmd(c, funcName, true)
if err != nil {
return &ngsiCmdError{funcName, 1, err.Error(), err}
}
client, err := newClient(ngsi, c, false, []string{"keyrock"})
if err != nil {
return &ngsiCmdError{funcName, 2, err.Error(), err}
}
if !c.IsSet("oid") {
return &ngsiCmdError{funcName, 3, "specify application id", nil}
}
client.SetPath("/v1/organizations/" + c.String("oid") + "/users")
res, body, err := client.HTTPGet()
if err != nil {
return &ngsiCmdError{funcName, 4, err.Error(), err}
}
if res.StatusCode != http.StatusOK {
return &ngsiCmdError{funcName, 5, fmt.Sprintf("error %s %s", res.Status, string(body)), nil}
}
if c.Bool("verbose") || c.Bool("pretty") {
if c.Bool("pretty") {
newBuf := new(bytes.Buffer)
err := ngsi.JSONConverter.Indent(newBuf, body, "", " ")
if err != nil {
return &ngsiCmdError{funcName, 6, err.Error(), err}
}
fmt.Fprintln(ngsi.StdWriter, newBuf.String())
} else {
fmt.Fprint(ngsi.StdWriter, string(body))
}
} else {
var users keyrockOrganizationUsers
err := ngsilib.JSONUnmarshal(body, &users)
if err != nil {
return &ngsiCmdError{funcName, 7, err.Error(), err}
}
for _, user := range users.OrganizationUsers {
fmt.Fprintln(ngsi.StdWriter, user.UserID)
}
}
return nil
}
func orgUsersGet(c *cli.Context) error {
const funcName = "orgUsersGet"
ngsi, err := initCmd(c, funcName, true)
if err != nil {
return &ngsiCmdError{funcName, 1, err.Error(), err}
}
client, err := newClient(ngsi, c, false, []string{"keyrock"})
if err != nil {
return &ngsiCmdError{funcName, 2, err.Error(), err}
}
if !c.IsSet("oid") {
return &ngsiCmdError{funcName, 3, "specify organization id", nil}
}
if !c.IsSet("uid") {
return &ngsiCmdError{funcName, 4, "specify user id", nil}
}
client.SetPath("/v1/organizations/" + c.String("oid") + "/users/" + c.String("uid") + "/organization_roles")
res, body, err := client.HTTPGet()
if err != nil {
return &ngsiCmdError{funcName, 5, err.Error(), err}
}
if res.StatusCode != http.StatusOK {
return &ngsiCmdError{funcName, 6, fmt.Sprintf("error %s %s", res.Status, string(body)), nil}
}
if c.Bool("pretty") {
newBuf := new(bytes.Buffer)
err := ngsi.JSONConverter.Indent(newBuf, body, "", " ")
if err != nil {
return &ngsiCmdError{funcName, 7, err.Error(), err}
}
fmt.Fprintln(ngsi.StdWriter, newBuf.String())
return nil
}
fmt.Fprint(ngsi.StdWriter, string(body))
return nil
}
func orgUsersCreate(c *cli.Context) error {
const funcName = "orgUsersCreate"
ngsi, err := initCmd(c, funcName, true)
if err != nil {
return &ngsiCmdError{funcName, 1, err.Error(), err}
}
client, err := newClient(ngsi, c, false, []string{"keyrock"})
if err != nil {
return &ngsiCmdError{funcName, 2, err.Error(), err}
}
if !c.IsSet("oid") {
return &ngsiCmdError{funcName, 3, "specify organization id", nil}
}
if !c.IsSet("uid") {
return &ngsiCmdError{funcName, 4, "specify user id", nil}
}
if !c.IsSet("orid") {
return &ngsiCmdError{funcName, 5, "specify organization role id", nil}
}
client.SetPath("/v1/organizations/" + c.String("oid") + "/users/" + c.String("uid") + "/organization_roles/" + c.String("orid"))
client.SetHeader("Content-Type", "application/json")
res, body, err := client.HTTPPost("")
if err != nil {
return &ngsiCmdError{funcName, 6, err.Error(), err}
}
if res.StatusCode != http.StatusCreated {
return &ngsiCmdError{funcName, 7, fmt.Sprintf("error %s %s", res.Status, string(body)), nil}
}
if c.Bool("pretty") {
newBuf := new(bytes.Buffer)
err := ngsi.JSONConverter.Indent(newBuf, body, "", " ")
if err != nil {
return &ngsiCmdError{funcName, 8, err.Error(), err}
}
fmt.Fprintln(ngsi.StdWriter, newBuf.String())
return nil
}
fmt.Fprint(ngsi.StdWriter, string(body))
return nil
}
func | (c *cli.Context) error {
const funcName = "orgUsersDelete"
ngsi, err := initCmd(c, funcName, true)
if err != nil {
return &ngsiCmdError{funcName, 1, err.Error(), err}
}
client, err := newClient(ngsi, c, false, []string{"keyrock"})
if err != nil {
return &ngsiCmdError{funcName, 2, err.Error(), err}
}
if !c.IsSet("oid") {
return &ngsiCmdError{funcName, 3, "specify organization id", nil}
}
if !c.IsSet("uid") {
return &ngsiCmdError{funcName, 4, "specify user id", nil}
}
if !c.IsSet("orid") {
return &ngsiCmdError{funcName, 5, "specify organization role id", nil}
}
client.SetPath("/v1/organizations/" + c.String("oid") + "/users/" + c.String("uid") + "/organization_roles/" + c.String("orid"))
res, body, err := client.HTTPDelete(nil)
if err != nil {
return &ngsiCmdError{funcName, 6, err.Error(), err}
}
if res.StatusCode != http.StatusNoContent {
return &ngsiCmdError{funcName, 7, fmt.Sprintf("error %s %s", res.Status, string(body)), nil}
}
return nil
}
| orgUsersDelete |
pathfind.js | "use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var _ = require("lodash");
var bignumber_js_1 = require("bignumber.js");
var utils_1 = require("./utils");
var common_1 = require("../common");
var pathfind_1 = require("./parse/pathfind");
var NotFoundError = common_1.errors.NotFoundError; | return _.defaults(_.assign({}, result, {
source_account: request.source_account,
source_currencies: request.source_currencies
}), { destination_amount: request.destination_amount });
}
function requestPathFind(connection, pathfind) {
var destinationAmount = _.assign({ value: '-1' }, pathfind.destination.amount);
var request = {
command: 'call_path_find',
source_account: pathfind.source.address,
destination_account: pathfind.destination.address,
destination_amount: common_1.toCalledAmount(destinationAmount)
};
if (typeof request.destination_amount === 'object'
&& !request.destination_amount.issuer) {
request.destination_amount.issuer = request.destination_account;
}
if (pathfind.source.currencies && pathfind.source.currencies.length > 0) {
request.source_currencies = pathfind.source.currencies.map(function (amount) { return utils_1.renameCounterpartyToIssuer(amount); });
}
if (pathfind.source.amount) {
if (pathfind.destination.amount.value !== undefined) {
throw new ValidationError('Cannot specify both source.amount'
+ ' and destination.amount.value in getPaths');
}
request.send_max = common_1.toCalledAmount(pathfind.source.amount);
if (typeof request.send_max !== 'string' && !request.send_max.issuer) {
request.send_max.issuer = pathfind.source.address;
}
}
return connection.request(request).then(function (paths) { return addParams(request, paths); });
}
function addDirectCallPath(paths, callBalance) {
// Add CALL "path" only if the source acct has enough CALL to make the payment
var destinationAmount = paths.destination_amount;
// @ts-ignore: destinationAmount can be a currency amount object! Fix!
if ((new bignumber_js_1.default(callBalance)).greaterThanOrEqualTo(destinationAmount)) {
paths.alternatives.unshift({
paths_computed: [],
source_amount: paths.destination_amount
});
}
return paths;
}
function isCalledIOUAmount(amount) {
return (typeof amount === 'object') &&
amount.currency && (amount.currency !== 'CALL');
}
function conditionallyAddDirectCALLPath(connection, address, paths) {
if (isCalledIOUAmount(paths.destination_amount)
|| !_.includes(paths.destination_currencies, 'CALL')) {
return Promise.resolve(paths);
}
return utils_1.getCALLBalance(connection, address, undefined).then(function (callBalance) { return addDirectCallPath(paths, callBalance); });
}
function filterSourceFundsLowPaths(pathfind, paths) {
if (pathfind.source.amount &&
pathfind.destination.amount.value === undefined && paths.alternatives) {
paths.alternatives = _.filter(paths.alternatives, function (alt) {
return !!alt.source_amount &&
!!pathfind.source.amount &&
// TODO: Returns false when alt.source_amount is a string. Fix?
typeof alt.source_amount !== 'string' &&
new bignumber_js_1.default(alt.source_amount.value).eq(pathfind.source.amount.value);
});
}
return paths;
}
function formatResponse(pathfind, paths) {
if (paths.alternatives && paths.alternatives.length > 0) {
return pathfind_1.default(paths);
}
if (paths.destination_currencies !== undefined &&
!_.includes(paths.destination_currencies, pathfind.destination.amount.currency)) {
throw new NotFoundError('No paths found. ' +
'The destination_account does not accept ' +
pathfind.destination.amount.currency + ', they only accept: ' +
paths.destination_currencies.join(', '));
}
else if (paths.source_currencies && paths.source_currencies.length > 0) {
throw new NotFoundError('No paths found. Please ensure' +
' that the source_account has sufficient funds to execute' +
' the payment in one of the specified source_currencies. If it does' +
' there may be insufficient liquidity in the network to execute' +
' this payment right now');
}
else {
throw new NotFoundError('No paths found.' +
' Please ensure that the source_account has sufficient funds to' +
' execute the payment. If it does there may be insufficient liquidity' +
' in the network to execute this payment right now');
}
}
function getPaths(pathfind) {
var _this = this;
common_1.validate.getPaths({ pathfind: pathfind });
var address = pathfind.source.address;
return requestPathFind(this.connection, pathfind).then(function (paths) {
return conditionallyAddDirectCALLPath(_this.connection, address, paths);
})
.then(function (paths) { return filterSourceFundsLowPaths(pathfind, paths); })
.then(function (paths) { return formatResponse(pathfind, paths); });
}
exports.default = getPaths;
//# sourceMappingURL=pathfind.js.map | var ValidationError = common_1.errors.ValidationError;
function addParams(request, result) { |
replica_set.rs | // Generated from definition io.k8s.api.apps.v1beta2.ReplicaSet
/// DEPRECATED - This group version of ReplicaSet is deprecated by apps/v1/ReplicaSet. See the release notes for more information. ReplicaSet ensures that a specified number of pod replicas are running at any given time.
#[derive(Clone, Debug, Default, PartialEq)]
pub struct ReplicaSet {
/// If the Labels of a ReplicaSet are empty, they are defaulted to be the same as the Pod(s) that the ReplicaSet manages. Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
pub metadata: crate::apimachinery::pkg::apis::meta::v1::ObjectMeta,
/// Spec defines the specification of the desired behavior of the ReplicaSet. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status
pub spec: Option<crate::api::apps::v1beta2::ReplicaSetSpec>,
/// Status is the most recently observed status of the ReplicaSet. This data may be out of date by some window of time. Populated by the system. Read-only. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status
pub status: Option<crate::api::apps::v1beta2::ReplicaSetStatus>,
}
// Begin apps/v1beta2/ReplicaSet
// Generated from operation createAppsV1beta2NamespacedReplicaSet
impl ReplicaSet {
/// create a ReplicaSet
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::CreateResponse`]`<Self>>` constructor, or [`crate::CreateResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `body`
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn create_namespaced_replica_set(
namespace: &str,
body: &crate::api::apps::v1beta2::ReplicaSet,
optional: crate::CreateOptional<'_>,
) -> Result<(crate::http::Request<Vec<u8>>, fn(crate::http::StatusCode) -> crate::ResponseBody<crate::CreateResponse<Self>>), crate::RequestError> {
let __url = format!("/apis/apps/v1beta2/namespaces/{namespace}/replicasets?",
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = crate::http::Request::post(__url);
let __body = crate::serde_json::to_vec(body).map_err(crate::RequestError::Json)?;
let __request = __request.header(crate::http::header::CONTENT_TYPE, crate::http::header::HeaderValue::from_static("application/json"));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation deleteAppsV1beta2CollectionNamespacedReplicaSet
impl ReplicaSet {
/// delete collection of ReplicaSet
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::DeleteResponse`]`<`[`crate::List`]`<Self>>>` constructor, or [`crate::DeleteResponse`]`<`[`crate::List`]`<Self>>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `delete_optional`
///
/// Delete options. Use `Default::default()` to not pass any.
///
/// * `list_optional`
///
/// List options. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn delete_collection_namespaced_replica_set(
namespace: &str,
delete_optional: crate::DeleteOptional<'_>,
list_optional: crate::ListOptional<'_>,
) -> Result<(crate::http::Request<Vec<u8>>, fn(crate::http::StatusCode) -> crate::ResponseBody<crate::DeleteResponse<crate::List<Self>>>), crate::RequestError> {
let __url = format!("/apis/apps/v1beta2/namespaces/{namespace}/replicasets?",
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
list_optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = crate::http::Request::delete(__url);
let __body = crate::serde_json::to_vec(&delete_optional).map_err(crate::RequestError::Json)?;
let __request = __request.header(crate::http::header::CONTENT_TYPE, crate::http::header::HeaderValue::from_static("application/json"));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation deleteAppsV1beta2NamespacedReplicaSet
impl ReplicaSet {
/// delete a ReplicaSet
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::DeleteResponse`]`<Self>>` constructor, or [`crate::DeleteResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the ReplicaSet
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn delete_namespaced_replica_set(
name: &str,
namespace: &str,
optional: crate::DeleteOptional<'_>,
) -> Result<(crate::http::Request<Vec<u8>>, fn(crate::http::StatusCode) -> crate::ResponseBody<crate::DeleteResponse<Self>>), crate::RequestError> {
let __url = format!("/apis/apps/v1beta2/namespaces/{namespace}/replicasets/{name}",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let __request = crate::http::Request::delete(__url);
let __body = crate::serde_json::to_vec(&optional).map_err(crate::RequestError::Json)?;
let __request = __request.header(crate::http::header::CONTENT_TYPE, crate::http::header::HeaderValue::from_static("application/json"));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation listAppsV1beta2NamespacedReplicaSet
impl ReplicaSet {
/// list or watch objects of kind ReplicaSet
///
/// This operation only supports listing all items of this type.
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::ListResponse`]`<Self>>` constructor, or [`crate::ListResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn list_namespaced_replica_set(
namespace: &str,
optional: crate::ListOptional<'_>,
) -> Result<(crate::http::Request<Vec<u8>>, fn(crate::http::StatusCode) -> crate::ResponseBody<crate::ListResponse<Self>>), crate::RequestError> {
let __url = format!("/apis/apps/v1beta2/namespaces/{namespace}/replicasets?",
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = crate::http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation listAppsV1beta2ReplicaSetForAllNamespaces
impl ReplicaSet {
/// list or watch objects of kind ReplicaSet
///
/// This operation only supports listing all items of this type.
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::ListResponse`]`<Self>>` constructor, or [`crate::ListResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn list_replica_set_for_all_namespaces(
optional: crate::ListOptional<'_>,
) -> Result<(crate::http::Request<Vec<u8>>, fn(crate::http::StatusCode) -> crate::ResponseBody<crate::ListResponse<Self>>), crate::RequestError> {
let __url = "/apis/apps/v1beta2/replicasets?".to_owned();
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = crate::http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation patchAppsV1beta2NamespacedReplicaSet
impl ReplicaSet {
/// partially update the specified ReplicaSet
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::PatchResponse`]`<Self>>` constructor, or [`crate::PatchResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the ReplicaSet
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `body`
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn patch_namespaced_replica_set(
name: &str,
namespace: &str,
body: &crate::apimachinery::pkg::apis::meta::v1::Patch,
optional: crate::PatchOptional<'_>,
) -> Result<(crate::http::Request<Vec<u8>>, fn(crate::http::StatusCode) -> crate::ResponseBody<crate::PatchResponse<Self>>), crate::RequestError> {
let __url = format!("/apis/apps/v1beta2/namespaces/{namespace}/replicasets/{name}?",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = crate::http::Request::patch(__url);
let __body = crate::serde_json::to_vec(body).map_err(crate::RequestError::Json)?;
let __request = __request.header(crate::http::header::CONTENT_TYPE, crate::http::header::HeaderValue::from_static(match body {
crate::apimachinery::pkg::apis::meta::v1::Patch::Json(_) => "application/json-patch+json",
crate::apimachinery::pkg::apis::meta::v1::Patch::Merge(_) => "application/merge-patch+json",
crate::apimachinery::pkg::apis::meta::v1::Patch::StrategicMerge(_) => "application/strategic-merge-patch+json",
}));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation patchAppsV1beta2NamespacedReplicaSetStatus
impl ReplicaSet {
/// partially update status of the specified ReplicaSet
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::PatchResponse`]`<Self>>` constructor, or [`crate::PatchResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the ReplicaSet
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `body`
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn patch_namespaced_replica_set_status(
name: &str,
namespace: &str,
body: &crate::apimachinery::pkg::apis::meta::v1::Patch,
optional: crate::PatchOptional<'_>,
) -> Result<(crate::http::Request<Vec<u8>>, fn(crate::http::StatusCode) -> crate::ResponseBody<crate::PatchResponse<Self>>), crate::RequestError> {
let __url = format!("/apis/apps/v1beta2/namespaces/{namespace}/replicasets/{name}/status?",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = crate::http::Request::patch(__url);
let __body = crate::serde_json::to_vec(body).map_err(crate::RequestError::Json)?;
let __request = __request.header(crate::http::header::CONTENT_TYPE, crate::http::header::HeaderValue::from_static(match body {
crate::apimachinery::pkg::apis::meta::v1::Patch::Json(_) => "application/json-patch+json",
crate::apimachinery::pkg::apis::meta::v1::Patch::Merge(_) => "application/merge-patch+json",
crate::apimachinery::pkg::apis::meta::v1::Patch::StrategicMerge(_) => "application/strategic-merge-patch+json",
}));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation readAppsV1beta2NamespacedReplicaSet
impl ReplicaSet {
/// read the specified ReplicaSet
///
/// Use the returned [`crate::ResponseBody`]`<`[`ReadNamespacedReplicaSetResponse`]`>` constructor, or [`ReadNamespacedReplicaSetResponse`] directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the ReplicaSet
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn read_namespaced_replica_set(
name: &str,
namespace: &str,
optional: ReadNamespacedReplicaSetOptional<'_>,
) -> Result<(crate::http::Request<Vec<u8>>, fn(crate::http::StatusCode) -> crate::ResponseBody<ReadNamespacedReplicaSetResponse>), crate::RequestError> {
let ReadNamespacedReplicaSetOptional {
exact,
export,
pretty,
} = optional;
let __url = format!("/apis/apps/v1beta2/namespaces/{namespace}/replicasets/{name}?",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
if let Some(exact) = exact {
__query_pairs.append_pair("exact", &exact.to_string());
}
if let Some(export) = export {
__query_pairs.append_pair("export", &export.to_string());
}
if let Some(pretty) = pretty {
__query_pairs.append_pair("pretty", pretty);
}
let __url = __query_pairs.finish();
let __request = crate::http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
/// Optional parameters of [`ReplicaSet::read_namespaced_replica_set`]
#[cfg(feature = "api")]
#[derive(Clone, Copy, Debug, Default)]
pub struct ReadNamespacedReplicaSetOptional<'a> {
/// Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'. Deprecated. Planned for removal in 1.18.
pub exact: Option<bool>,
/// Should this value be exported. Export strips fields that a user can not specify. Deprecated. Planned for removal in 1.18.
pub export: Option<bool>,
/// If 'true', then the output is pretty printed.
pub pretty: Option<&'a str>,
}
/// Use `<ReadNamespacedReplicaSetResponse as Response>::try_from_parts` to parse the HTTP response body of [`ReplicaSet::read_namespaced_replica_set`]
#[cfg(feature = "api")]
#[derive(Debug)]
pub enum ReadNamespacedReplicaSetResponse {
Ok(crate::api::apps::v1beta2::ReplicaSet),
Other(Result<Option<crate::serde_json::Value>, crate::serde_json::Error>),
}
#[cfg(feature = "api")]
impl crate::Response for ReadNamespacedReplicaSetResponse {
fn try_from_parts(status_code: crate::http::StatusCode, buf: &[u8]) -> Result<(Self, usize), crate::ResponseError> {
match status_code {
crate::http::StatusCode::OK => {
let result = match crate::serde_json::from_slice(buf) {
Ok(value) => value,
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => return Err(crate::ResponseError::Json(err)),
};
Ok((ReadNamespacedReplicaSetResponse::Ok(result), buf.len()))
},
_ => {
let (result, read) =
if buf.is_empty() {
(Ok(None), 0)
}
else {
match crate::serde_json::from_slice(buf) {
Ok(value) => (Ok(Some(value)), buf.len()),
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => (Err(err), 0),
}
};
Ok((ReadNamespacedReplicaSetResponse::Other(result), read))
},
}
}
}
// Generated from operation readAppsV1beta2NamespacedReplicaSetStatus
impl ReplicaSet {
/// read status of the specified ReplicaSet
///
/// Use the returned [`crate::ResponseBody`]`<`[`ReadNamespacedReplicaSetStatusResponse`]`>` constructor, or [`ReadNamespacedReplicaSetStatusResponse`] directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the ReplicaSet
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn read_namespaced_replica_set_status(
name: &str,
namespace: &str,
optional: ReadNamespacedReplicaSetStatusOptional<'_>,
) -> Result<(crate::http::Request<Vec<u8>>, fn(crate::http::StatusCode) -> crate::ResponseBody<ReadNamespacedReplicaSetStatusResponse>), crate::RequestError> {
let ReadNamespacedReplicaSetStatusOptional {
pretty,
} = optional;
let __url = format!("/apis/apps/v1beta2/namespaces/{namespace}/replicasets/{name}/status?",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
if let Some(pretty) = pretty {
__query_pairs.append_pair("pretty", pretty);
}
let __url = __query_pairs.finish();
let __request = crate::http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
/// Optional parameters of [`ReplicaSet::read_namespaced_replica_set_status`]
#[cfg(feature = "api")]
#[derive(Clone, Copy, Debug, Default)]
pub struct ReadNamespacedReplicaSetStatusOptional<'a> {
/// If 'true', then the output is pretty printed.
pub pretty: Option<&'a str>,
}
/// Use `<ReadNamespacedReplicaSetStatusResponse as Response>::try_from_parts` to parse the HTTP response body of [`ReplicaSet::read_namespaced_replica_set_status`]
#[cfg(feature = "api")]
#[derive(Debug)]
pub enum ReadNamespacedReplicaSetStatusResponse {
Ok(crate::api::apps::v1beta2::ReplicaSet),
Other(Result<Option<crate::serde_json::Value>, crate::serde_json::Error>),
}
#[cfg(feature = "api")]
impl crate::Response for ReadNamespacedReplicaSetStatusResponse {
fn try_from_parts(status_code: crate::http::StatusCode, buf: &[u8]) -> Result<(Self, usize), crate::ResponseError> {
match status_code {
crate::http::StatusCode::OK => {
let result = match crate::serde_json::from_slice(buf) {
Ok(value) => value,
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => return Err(crate::ResponseError::Json(err)),
};
Ok((ReadNamespacedReplicaSetStatusResponse::Ok(result), buf.len()))
},
_ => {
let (result, read) =
if buf.is_empty() {
(Ok(None), 0)
}
else {
match crate::serde_json::from_slice(buf) {
Ok(value) => (Ok(Some(value)), buf.len()),
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => (Err(err), 0),
}
};
Ok((ReadNamespacedReplicaSetStatusResponse::Other(result), read))
},
}
}
}
// Generated from operation replaceAppsV1beta2NamespacedReplicaSet
impl ReplicaSet {
/// replace the specified ReplicaSet
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::ReplaceResponse`]`<Self>>` constructor, or [`crate::ReplaceResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the ReplicaSet
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `body`
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn replace_namespaced_replica_set(
name: &str,
namespace: &str,
body: &crate::api::apps::v1beta2::ReplicaSet,
optional: crate::ReplaceOptional<'_>,
) -> Result<(crate::http::Request<Vec<u8>>, fn(crate::http::StatusCode) -> crate::ResponseBody<crate::ReplaceResponse<Self>>), crate::RequestError> {
let __url = format!("/apis/apps/v1beta2/namespaces/{namespace}/replicasets/{name}?",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = crate::http::Request::put(__url);
let __body = crate::serde_json::to_vec(body).map_err(crate::RequestError::Json)?;
let __request = __request.header(crate::http::header::CONTENT_TYPE, crate::http::header::HeaderValue::from_static("application/json"));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation replaceAppsV1beta2NamespacedReplicaSetStatus
impl ReplicaSet {
/// replace status of the specified ReplicaSet
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::ReplaceResponse`]`<Self>>` constructor, or [`crate::ReplaceResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the ReplicaSet
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `body`
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn replace_namespaced_replica_set_status(
name: &str,
namespace: &str,
body: &crate::api::apps::v1beta2::ReplicaSet,
optional: crate::ReplaceOptional<'_>,
) -> Result<(crate::http::Request<Vec<u8>>, fn(crate::http::StatusCode) -> crate::ResponseBody<crate::ReplaceResponse<Self>>), crate::RequestError> {
let __url = format!("/apis/apps/v1beta2/namespaces/{namespace}/replicasets/{name}/status?",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = crate::http::Request::put(__url);
let __body = crate::serde_json::to_vec(body).map_err(crate::RequestError::Json)?;
let __request = __request.header(crate::http::header::CONTENT_TYPE, crate::http::header::HeaderValue::from_static("application/json"));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation watchAppsV1beta2NamespacedReplicaSet
impl ReplicaSet {
/// list or watch objects of kind ReplicaSet
///
/// This operation only supports watching one item, or a list of items, of this type for changes.
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::WatchResponse`]`<Self>>` constructor, or [`crate::WatchResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn watch_namespaced_replica_set(
namespace: &str,
optional: crate::WatchOptional<'_>,
) -> Result<(crate::http::Request<Vec<u8>>, fn(crate::http::StatusCode) -> crate::ResponseBody<crate::WatchResponse<Self>>), crate::RequestError> {
let __url = format!("/apis/apps/v1beta2/namespaces/{namespace}/replicasets?",
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = crate::http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation watchAppsV1beta2ReplicaSetForAllNamespaces
impl ReplicaSet {
/// list or watch objects of kind ReplicaSet
///
/// This operation only supports watching one item, or a list of items, of this type for changes.
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::WatchResponse`]`<Self>>` constructor, or [`crate::WatchResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn watch_replica_set_for_all_namespaces(
optional: crate::WatchOptional<'_>,
) -> Result<(crate::http::Request<Vec<u8>>, fn(crate::http::StatusCode) -> crate::ResponseBody<crate::WatchResponse<Self>>), crate::RequestError> {
let __url = "/apis/apps/v1beta2/replicasets?".to_owned();
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = crate::http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// End apps/v1beta2/ReplicaSet
impl crate::Resource for ReplicaSet {
const API_VERSION: &'static str = "apps/v1beta2";
const GROUP: &'static str = "apps";
const KIND: &'static str = "ReplicaSet";
const VERSION: &'static str = "v1beta2";
const URL_PATH_SEGMENT: &'static str = "replicasets";
type Scope = crate::NamespaceResourceScope;
}
impl crate::ListableResource for ReplicaSet {
const LIST_KIND: &'static str = "ReplicaSetList";
}
impl crate::Metadata for ReplicaSet {
type Ty = crate::apimachinery::pkg::apis::meta::v1::ObjectMeta;
fn metadata(&self) -> &<Self as crate::Metadata>::Ty {
&self.metadata
}
fn metadata_mut(&mut self) -> &mut<Self as crate::Metadata>::Ty {
&mut self.metadata
}
}
impl<'de> crate::serde::Deserialize<'de> for ReplicaSet {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: crate::serde::Deserializer<'de> {
#[allow(non_camel_case_types)]
enum Field {
Key_api_version,
Key_kind,
Key_metadata,
Key_spec,
Key_status,
Other,
}
impl<'de> crate::serde::Deserialize<'de> for Field {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: crate::serde::Deserializer<'de> {
struct Visitor;
impl<'de> crate::serde::de::Visitor<'de> for Visitor {
type Value = Field;
fn | (&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("field identifier")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: crate::serde::de::Error {
Ok(match v {
"apiVersion" => Field::Key_api_version,
"kind" => Field::Key_kind,
"metadata" => Field::Key_metadata,
"spec" => Field::Key_spec,
"status" => Field::Key_status,
_ => Field::Other,
})
}
}
deserializer.deserialize_identifier(Visitor)
}
}
struct Visitor;
impl<'de> crate::serde::de::Visitor<'de> for Visitor {
type Value = ReplicaSet;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(<Self::Value as crate::Resource>::KIND)
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: crate::serde::de::MapAccess<'de> {
let mut value_metadata: Option<crate::apimachinery::pkg::apis::meta::v1::ObjectMeta> = None;
let mut value_spec: Option<crate::api::apps::v1beta2::ReplicaSetSpec> = None;
let mut value_status: Option<crate::api::apps::v1beta2::ReplicaSetStatus> = None;
while let Some(key) = crate::serde::de::MapAccess::next_key::<Field>(&mut map)? {
match key {
Field::Key_api_version => {
let value_api_version: String = crate::serde::de::MapAccess::next_value(&mut map)?;
if value_api_version != <Self::Value as crate::Resource>::API_VERSION {
return Err(crate::serde::de::Error::invalid_value(crate::serde::de::Unexpected::Str(&value_api_version), &<Self::Value as crate::Resource>::API_VERSION));
}
},
Field::Key_kind => {
let value_kind: String = crate::serde::de::MapAccess::next_value(&mut map)?;
if value_kind != <Self::Value as crate::Resource>::KIND {
return Err(crate::serde::de::Error::invalid_value(crate::serde::de::Unexpected::Str(&value_kind), &<Self::Value as crate::Resource>::KIND));
}
},
Field::Key_metadata => value_metadata = crate::serde::de::MapAccess::next_value(&mut map)?,
Field::Key_spec => value_spec = crate::serde::de::MapAccess::next_value(&mut map)?,
Field::Key_status => value_status = crate::serde::de::MapAccess::next_value(&mut map)?,
Field::Other => { let _: crate::serde::de::IgnoredAny = crate::serde::de::MapAccess::next_value(&mut map)?; },
}
}
Ok(ReplicaSet {
metadata: value_metadata.unwrap_or_default(),
spec: value_spec,
status: value_status,
})
}
}
deserializer.deserialize_struct(
<Self as crate::Resource>::KIND,
&[
"apiVersion",
"kind",
"metadata",
"spec",
"status",
],
Visitor,
)
}
}
impl crate::serde::Serialize for ReplicaSet {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: crate::serde::Serializer {
let mut state = serializer.serialize_struct(
<Self as crate::Resource>::KIND,
3 +
self.spec.as_ref().map_or(0, |_| 1) +
self.status.as_ref().map_or(0, |_| 1),
)?;
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "apiVersion", <Self as crate::Resource>::API_VERSION)?;
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "kind", <Self as crate::Resource>::KIND)?;
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "metadata", &self.metadata)?;
if let Some(value) = &self.spec {
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "spec", value)?;
}
if let Some(value) = &self.status {
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "status", value)?;
}
crate::serde::ser::SerializeStruct::end(state)
}
}
#[cfg(feature = "schemars")]
impl crate::schemars::JsonSchema for ReplicaSet {
fn schema_name() -> String {
"io.k8s.api.apps.v1beta2.ReplicaSet".to_owned()
}
fn json_schema(__gen: &mut crate::schemars::gen::SchemaGenerator) -> crate::schemars::schema::Schema {
crate::schemars::schema::Schema::Object(crate::schemars::schema::SchemaObject {
metadata: Some(Box::new(crate::schemars::schema::Metadata {
description: Some("DEPRECATED - This group version of ReplicaSet is deprecated by apps/v1/ReplicaSet. See the release notes for more information. ReplicaSet ensures that a specified number of pod replicas are running at any given time.".to_owned()),
..Default::default()
})),
instance_type: Some(crate::schemars::schema::SingleOrVec::Single(Box::new(crate::schemars::schema::InstanceType::Object))),
object: Some(Box::new(crate::schemars::schema::ObjectValidation {
properties: IntoIterator::into_iter([
(
"apiVersion".to_owned(),
crate::schemars::schema::Schema::Object(crate::schemars::schema::SchemaObject {
metadata: Some(Box::new(crate::schemars::schema::Metadata {
description: Some("APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources".to_owned()),
..Default::default()
})),
instance_type: Some(crate::schemars::schema::SingleOrVec::Single(Box::new(crate::schemars::schema::InstanceType::String))),
..Default::default()
}),
),
(
"kind".to_owned(),
crate::schemars::schema::Schema::Object(crate::schemars::schema::SchemaObject {
metadata: Some(Box::new(crate::schemars::schema::Metadata {
description: Some("Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds".to_owned()),
..Default::default()
})),
instance_type: Some(crate::schemars::schema::SingleOrVec::Single(Box::new(crate::schemars::schema::InstanceType::String))),
..Default::default()
}),
),
(
"metadata".to_owned(),
{
let mut schema_obj = __gen.subschema_for::<crate::apimachinery::pkg::apis::meta::v1::ObjectMeta>().into_object();
schema_obj.metadata = Some(Box::new(crate::schemars::schema::Metadata {
description: Some("If the Labels of a ReplicaSet are empty, they are defaulted to be the same as the Pod(s) that the ReplicaSet manages. Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata".to_owned()),
..Default::default()
}));
crate::schemars::schema::Schema::Object(schema_obj)
},
),
(
"spec".to_owned(),
{
let mut schema_obj = __gen.subschema_for::<crate::api::apps::v1beta2::ReplicaSetSpec>().into_object();
schema_obj.metadata = Some(Box::new(crate::schemars::schema::Metadata {
description: Some("Spec defines the specification of the desired behavior of the ReplicaSet. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status".to_owned()),
..Default::default()
}));
crate::schemars::schema::Schema::Object(schema_obj)
},
),
(
"status".to_owned(),
{
let mut schema_obj = __gen.subschema_for::<crate::api::apps::v1beta2::ReplicaSetStatus>().into_object();
schema_obj.metadata = Some(Box::new(crate::schemars::schema::Metadata {
description: Some("Status is the most recently observed status of the ReplicaSet. This data may be out of date by some window of time. Populated by the system. Read-only. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status".to_owned()),
..Default::default()
}));
crate::schemars::schema::Schema::Object(schema_obj)
},
),
]).collect(),
required: IntoIterator::into_iter([
"metadata",
]).map(std::borrow::ToOwned::to_owned).collect(),
..Default::default()
})),
..Default::default()
})
}
}
| expecting |
tree_test.go | // Copyright 2015 Husobee Associates, LLC. All rights reserved.
// Use of this source code is governed by The MIT License, which
// can be found in the LICENSE file included.
package vestigo
import "fmt"
// prefix - print the prefix
func prefix(tail bool, p, on, off string) string |
// printTree - Helper method to print a representation of the tree
func (n *node) printTree(pfx string, tail bool) {
p := prefix(tail, pfx, "└── ", "├── ")
fmt.Printf("%s%s, %p: type=%d, parent=%p, resource=%v\n", p, n.prefix, n, n.typ, n.parent, n.resource)
children := n.children
l := len(children)
p = prefix(tail, pfx, " ", "│ ")
for i := 0; i < l-1; i++ {
children[i].printTree(p, false)
}
if l > 0 {
children[l-1].printTree(p, true)
}
}
| {
if tail {
return fmt.Sprintf("%s%s", p, on)
}
return fmt.Sprintf("%s%s", p, off)
} |
dotcom-rendering-commercial.js | // @flow
/*
import config from 'lib/config';
import { catchErrorsWithContext } from 'lib/robust';
import { markTime } from 'lib/user-timing';
import reportError from 'lib/report-error';
*/
// import { init as initCmpService } from 'commercial/modules/cmp/cmp';
// import { trackConsent as trackCmpConsent } from 'commercial/modules/cmp/consent-tracker';
// import { init as prepareGoogletag } from 'commercial/modules/dfp/prepare-googletag';
// import { init as initThirdPartyTags } from 'commercial/modules/third-party-tags';
/*
import {
defer,
wrap,
addStartTimeBaseline,
addEndTimeBaseline,
primaryBaseline,
} from 'commercial/modules/dfp/performance-logging';
import { trackPerformance } from 'common/modules/analytics/google';
*/
/*
const commercialModules: Array<Array<any>> = [
//['cm-prepare-cmp', initCmpService],
//['cm-track-cmp-consent', trackCmpConsent],
//['cm-thirdPartyTags', initThirdPartyTags],
//['cm-prepare-googletag', prepareGoogletag, true],
];
*/
/*
const loadHostedBundle = (): Promise<void> => {
if (config.page.isHosted) {
return new Promise(resolve => {
require.ensure(
[],
require => {
const hostedAbout = require('commercial/modules/hosted/about');
const initHostedVideo = require('commercial/modules/hosted/video');
const hostedGallery = require('commercial/modules/hosted/gallery');
const initHostedCarousel = require('commercial/modules/hosted/onward-journey-carousel');
const loadOnwardComponent = require('commercial/modules/hosted/onward');
commercialModules.push(
['cm-hostedAbout', hostedAbout.init],
[
'cm-hostedVideo',
initHostedVideo.initHostedVideo,
true,
],
['cm-hostedGallery', hostedGallery.init],
[
'cm-hostedOnward',
loadOnwardComponent.loadOnwardComponent,
true,
],
[
'cm-hostedOJCarousel',
initHostedCarousel.initHostedCarousel,
]
);
resolve();
},
'commercial-hosted'
);
});
}
return Promise.resolve();
};
const loadModules = (): Promise<void> => {
addStartTimeBaseline(primaryBaseline);
const modulePromises = [];
commercialModules.forEach(module => {
const moduleName: string = module[0];
const moduleInit: () => void = module[1];
const moduleDefer: boolean = module[2];
catchErrorsWithContext([
[
moduleName,
function pushAfterComplete(): void {
// These modules all have async init procedures which don't block, and return a promise purely for
// perf logging, to time when their async work is done. The command buffer guarantees execution order,
// so we don't use the returned promise to order the bootstrap's module invocations.
const wrapped = moduleDefer
? defer(moduleName, moduleInit)
: wrap(moduleName, moduleInit);
const result = wrapped(); | ]);
});
return Promise.all(modulePromises).then(
(): void => {
addEndTimeBaseline(primaryBaseline);
}
);
};
*/
const bootCommercial = () => {
/*
markTime('commercial start');
catchErrorsWithContext([
[
'ga-user-timing-commercial-start',
function runTrackPerformance(): void {
trackPerformance(
'Javascript Load',
'commercialStart',
'Commercial start parse time'
);
},
],
]);
// Stub the command queue
window.googletag = {
cmd: [],
};
return loadHostedBundle()
.then(loadModules)
.then(() => {
markTime('commercial end');
catchErrorsWithContext([
[
'ga-user-timing-commercial-end',
function runTrackPerformance(): void {
trackPerformance(
'Javascript Load',
'commercialEnd',
'Commercial end parse time'
);
},
],
]);
})
.catch(err => {
// Just in case something goes wrong, we don't want it to
// prevent enhanced from loading
reportError(err, {
feature: 'commercial',
});
});
*/
};
bootCommercial(); | modulePromises.push(result);
},
], |
LineHints.tsx | import * as React from 'react';
interface LineHintsProps {
hints: number[][]
solveState: boolean[][],
prefix: string
}
export class | extends React.Component<LineHintsProps, {}> {
render() {
const maxLength = this.props.hints.reduce((a, b) => Math.max(a, b.length), 0);
return (
<div className={`hint-${this.props.prefix}s`}>
{this.props.hints.map((line, i1) => (
<div className={`hint-${this.props.prefix}`} key={`${this.props.prefix}_hint_${i1}`}>
{Array(maxLength - line.length).fill(0).map((_, j) => (<div className='hint-square filler' key={`${this.props.prefix}_filler_${i1}_${j}`}></div>))}
{line.map((num, i2) => (
<div
className={`hint-square${this.props.solveState[i1][i2] ? ' solved' : ''}`}
key={`${this.props.prefix}_hintsquare_${i1}_${i2}`}
onContextMenu={(e:React.MouseEvent) => e.preventDefault()}
>
{num}
</div>
))}
</div>
))}
</div>
)
}
} | LineHints |
Subscriber.spec.js | import createSubscriber from "../../entities/subscriber";
import createFakeSubscriber from "../../../__tests__/fixtures/fakeSubscriber";
import truncate from "../../../__tests__/fixtures/utils/truncate";
import { Subscriber } from "./index";
describe("Subscriber database", () => {
beforeEach(async () => {
await truncate();
}); | const createdSubscriber = await Subscriber.create({
name: subscriber.getName(),
email: subscriber.getEmail(),
});
expect(createdSubscriber).toBeInstanceOf(Subscriber);
});
it("should update subscriber", async () => {
const subscriber = createSubscriber(createFakeSubscriber());
const createdSubscriber = await Subscriber.create({
name: subscriber.getName(),
email: subscriber.getEmail(),
});
const update = createSubscriber(createFakeSubscriber());
const [
numberOfUpdatedSubscribers,
updatedSubscribers,
] = await Subscriber.update(
{
email: update.getEmail(),
},
{
where: {
id: createdSubscriber.id,
},
individualHooks: true,
}
);
expect(numberOfUpdatedSubscribers).toBe(1);
expect(updatedSubscribers[0].email).toBe(update.getEmail());
});
it("should delete subscriber", async () => {
const subscriber = createSubscriber(createFakeSubscriber());
const createdSubscriber = await Subscriber.create({
name: subscriber.getName(),
email: subscriber.getEmail(),
});
const numberOfDestroyedSubscribers = await Subscriber.destroy({
where: {
id: createdSubscriber.id,
},
});
expect(numberOfDestroyedSubscribers).toBe(1);
});
it("should find subscribers by id", async () => {
const subscriber = createSubscriber(createFakeSubscriber());
const createdSubscriber = await Subscriber.create({
name: subscriber.getName(),
email: subscriber.getEmail(),
});
const findOne = await Subscriber.findOne({
where: {
id: createdSubscriber.id,
},
});
expect(findOne).toBeInstanceOf(Subscriber);
});
it("should find all subscribers", async () => {
const subscribers = [];
for (let i = 0; i < 10; i++) {
subscribers.push(createSubscriber(createFakeSubscriber()));
}
await Promise.all(
subscribers.map((subscriber) => {
return Subscriber.create({
name: subscriber.getName(),
email: subscriber.getEmail(),
});
})
);
const findAll = await Subscriber.findAll();
findAll.map((subscriber) => expect(subscriber).toBeInstanceOf(Subscriber));
});
}); |
it("should create subscriber", async () => {
const subscriber = createSubscriber(createFakeSubscriber());
|
05-objects-tasks.js | /* ************************************************************************************************
* *
* Please read the following tutorial before implementing tasks: *
* https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Object_initializer *
* https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object *
* *
************************************************************************************************ */
/**
* Returns the rectangle object with width and height parameters and getArea() method
*
* @param {number} width
* @param {number} height
* @return {Object}
*
* @example
* const r = new Rectangle(10,20);
* console.log(r.width); // => 10
* console.log(r.height); // => 20
* console.log(r.getArea()); // => 200
*/
function | (width, height) {
this.width = width;
this.height = height;
this.getArea = () => width * height;
}
/**
* Returns the JSON representation of specified object
*
* @param {object} obj
* @return {string}
*
* @example
* [1,2,3] => '[1,2,3]'
* { width: 10, height : 20 } => '{"height":10,"width":20}'
*/
function getJSON(obj) {
if (Array.isArray(obj)) return obj;
return JSON.stringify(obj);
}
/**
* Returns the object of specified type from JSON representation
*
* @param {Object} proto
* @param {string} json
* @return {object}
*
* @example
* const r = fromJSON(Circle.prototype, '{"radius":10}');
*
*/
function fromJSON(/* proto, json */) {
throw new Error('Not implemented');
}
/**
* Css selectors builder
*
* Each complex selector can consists of type, id, class, attribute, pseudo-class
* and pseudo-element selectors:
*
* element#id.class[attr]:pseudoClass::pseudoElement
* \----/\----/\----------/
* Can be several occurrences
*
* All types of selectors can be combined using the combination ' ','+','~','>' .
*
* The task is to design a single class, independent classes or classes hierarchy
* and implement the functionality to build the css selectors using the provided cssSelectorBuilder.
* Each selector should have the stringify() method to output the string representation
* according to css specification.
*
* Provided cssSelectorBuilder should be used as facade only to create your own classes,
* for example the first method of cssSelectorBuilder can be like this:
* element: function(value) {
* return new MySuperBaseElementSelector(...)...
* },
*
* The design of class(es) is totally up to you, but try to make it as simple,
* clear and readable as possible.
*
* @example
*
* const builder = cssSelectorBuilder;
*
* builder.id('main').class('container').class('editable').stringify()
* => '#main.container.editable'
*
* builder.element('a').attr('href$=".png"').pseudoClass('focus').stringify()
* => 'a[href$=".png"]:focus'
*
* builder.combine(
* builder.element('div').id('main').class('container').class('draggable'),
* '+',
* builder.combine(
* builder.element('table').id('data'),
* '~',
* builder.combine(
* builder.element('tr').pseudoClass('nth-of-type(even)'),
* ' ',
* builder.element('td').pseudoClass('nth-of-type(even)')
* )
* )
* ).stringify()
* => 'div#main.container.draggable + table#data ~ tr:nth-of-type(even) td:nth-of-type(even)'
*
* For more examples see unit tests.
*/
const cssSelectorBuilder = {
element(/* value */) {
throw new Error('Not implemented');
},
id(/* value */) {
throw new Error('Not implemented');
},
class(/* value */) {
throw new Error('Not implemented');
},
attr(/* value */) {
throw new Error('Not implemented');
},
pseudoClass(/* value */) {
throw new Error('Not implemented');
},
pseudoElement(/* value */) {
throw new Error('Not implemented');
},
combine(/* selector1, combinator, selector2 */) {
throw new Error('Not implemented');
},
};
module.exports = {
Rectangle,
getJSON,
fromJSON,
cssSelectorBuilder,
};
| Rectangle |
main.go | package main
import (
"context"
"fmt"
rts "github.com/ory/keto/proto/ory/keto/relation_tuples/v1alpha2"
"google.golang.org/grpc"
)
func | () {
conn, err := grpc.Dial("127.0.0.1:4467", grpc.WithInsecure())
if err != nil {
panic("Encountered error: " + err.Error())
}
client := rts.NewWriteServiceClient(conn)
_, err = client.TransactRelationTuples(context.Background(), &rts.TransactRelationTuplesRequest{
RelationTupleDeltas: []*rts.RelationTupleDelta{
{
Action: rts.RelationTupleDelta_ACTION_INSERT,
RelationTuple: &rts.RelationTuple{
Namespace: "messages",
Object: "02y_15_4w350m3",
Relation: "decypher",
Subject: rts.NewSubjectID("john"),
},
},
},
})
if err != nil {
panic("Encountered error: " + err.Error())
}
fmt.Println("Successfully created tuple")
}
| main |
td_list.go | // Copyright (c) 2018, Maxime Soulé
// All rights reserved.
//
// This source code is licensed under the BSD-style license found in the
// LICENSE file in the root directory of this source tree.
package testdeep
import (
"bytes"
"reflect"
"github.com/maxatome/go-testdeep/internal/util"
)
type tdList struct {
BaseOKNil
items []reflect.Value
}
func n | items ...interface{}) (ret tdList) {
ret.BaseOKNil = NewBaseOKNil(4)
ret.items = make([]reflect.Value, len(items))
for idx, item := range items {
ret.items[idx] = reflect.ValueOf(item)
}
return
}
func (l *tdList) String() string {
return util.SliceToBuffer(bytes.NewBufferString(l.GetLocation().Func), l.items).
String()
}
func (l *tdList) uniqTypeBehind() reflect.Type {
var (
lastIfType, lastType, curType reflect.Type
severalIfTypes bool
)
//
for _, item := range l.items {
if !item.IsValid() {
return nil // no need to go further
}
if item.Type().Implements(testDeeper) {
curType = item.Interface().(TestDeep).TypeBehind()
// Ignore unknown TypeBehind
if curType == nil {
continue
}
// Ignore interface pointers too (see Isa), but keep them in
// mind in case we encounter always the same interface pointer
if curType.Kind() == reflect.Ptr &&
curType.Elem().Kind() == reflect.Interface {
if lastIfType == nil {
lastIfType = curType
} else if lastIfType != curType {
severalIfTypes = true
}
continue
}
} else {
curType = item.Type()
}
if lastType != curType {
if lastType != nil {
return nil
}
lastType = curType
}
}
// Only one type found
if lastType != nil {
return lastType
}
// Only one interface type found
if lastIfType != nil && !severalIfTypes {
return lastIfType
}
return nil
}
| ewList( |
grayscale.js | /*!
* Start Bootstrap - Grayscale Bootstrap Theme (http://startbootstrap.com)
* Code licensed under the Apache License v2.0.
* For details, see http://www.apache.org/licenses/LICENSE-2.0.
*/
// jQuery to collapse the navbar on scroll
// function collapseNavbar() {
// // if ($(".navbar").offset().top > 50) {
// // $(".navbar-fixed-top").addClass("top-nav-collapse");
// // } else {
// // $(".navbar-fixed-top").removeClass("top-nav-collapse");
// // }
// //FOR NOW NEVER COLLAPSE. BUT MAYBE TRY TO COLLAPSE ON HOME PAGE ONLY!
// // $(".navbar-fixed-top").addClass("top-nav-collapse");
// }
$(window).scroll(collapseNavbar); | $(document).ready(collapseNavbar);
// jQuery for page scrolling feature - requires jQuery Easing plugin
$(function() {
$('a.page-scroll').bind('click', function(event) {
var $anchor = $(this);
$('html, body').stop().animate({
scrollTop: $($anchor.attr('href')).offset().top
}, 1500, 'easeInOutExpo');
event.preventDefault();
});
});
// Closes the Responsive Menu on Menu Item Click
$('.navbar-collapse ul li a').click(function() {
if ($(this).attr('class') != 'dropdown-toggle active' && $(this).attr('class') != 'dropdown-toggle') {
$('.navbar-toggle:visible').click();
}
}); | |
ddd.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate ccc;
fn main() | {
ccc::do_work();
ccc::do_work_generic::<i16>();
ccc::do_work_generic::<i32>();
} |
|
2_robot_picam_test_NCS2_mobilenet.py | import cv2
import time
import numpy
import random
from multiprocessing import Process
from multiprocessing import Queue
from picamera.array import PiRGBArray
from picamera import PiCamera
#hacked from:
#https://software.intel.com/articles/OpenVINO-Install-RaspberryPI
#https://opencv2-python-tutroals.readthedocs.io/en/latest/py_tutorials/py_gui/py_video_display/py_video_display.html
#https://github.com/PINTO0309/MobileNet-SSD-RealSense/blob/master/SingleStickSSDwithUSBCamera_OpenVINO_NCS2.py
#https://raspberrypi.stackexchange.com/questions/87062/overhead-counter
#Les Wright Dec 24 2018
#modified to support picam 30 Dec 2018
#Robot code incorportated on 17 Jan 2019
# import curses and GPIO
import RPi.GPIO as GPIO
#set GPIO numbering mode and define output pins
GPIO.setmode(GPIO.BCM)
GPIO.setup(18,GPIO.OUT) #Left track fwd
GPIO.setup(23,GPIO.OUT) #lefttrack backwards
GPIO.setup(24,GPIO.OUT) #right track backwards
GPIO.setup(25,GPIO.OUT) #right track fwd
def motion(xminQueue,xmaxQueue):
def left(stime):
GPIO.output(18,False)
GPIO.output(25,True)
GPIO.output(23,True)
GPIO.output(24,False)
sustain(stime)
def right(stime):
GPIO.output(18,True)
GPIO.output(25,False)
GPIO.output(23,False)
GPIO.output(24,True)
sustain(stime)
def sustain(stime):
time.sleep(stime)
stop()
def forward():
GPIO.output(18,True)
GPIO.output(25,True)
GPIO.output(23,False)
GPIO.output(24,False)
def backward():
GPIO.output(18,False)
GPIO.output(25,False)
GPIO.output(23,True)
GPIO.output(24,True)
def stop():
GPIO.output(18,False)
GPIO.output(25,False)
GPIO.output(23,False)
GPIO.output(24,False)
def hunt():
right(0.2)
stop()
stop()
start = time.time() #start a timer
while True:
if not xminQueue.empty():
xmin = xminQueue.get()
xmax = xmaxQueue.get()
#print(str(xmin)+' '+str(xmax))
midpoint = (xmin+xmax)/2
width = xmax-xmin
#print("M:"+str(midpoint))
#print("W:"+str(width))
stime = abs(150-midpoint)/3000
#print(str(stime))
#align midoint with middle of the frame
if midpoint < 130:
left(stime)
if midpoint > 170:
right(stime)
if width:
if width < 50: | forward()
elif width > 90:
backward()
else:
stop()
start = time.time() #reset the timer
if xminQueue.empty():
seconds = time.time()-start
if seconds > 0.8: #if we are empty for longer than 0.8 sec, we probably lost the target...
#print('Hunting...')
hunt()
start = time.time() #reset the timer
# initialize the input queue (frames), output queue (out),
# and the list of actual detections returned by the child process
xminQueue = Queue(maxsize=1)
xmaxQueue = Queue(maxsize=1)
# construct a child process indepedent from our main process
print("[INFO] starting motion handling process...")
p2 = Process(target=motion, args=(xminQueue,xmaxQueue))
p2.daemon = True
p2.start()
# Note cv2.dnn.blobFromImage, the size is present in the XML files, we could write a preamble to go get that data,
# Then we dont have to explicitly set it!
# Load the model
net = cv2.dnn.readNet('models/MobileNetSSD_deploy.xml', 'models/MobileNetSSD_deploy.bin')
# Specify target device
net.setPreferableTarget(cv2.dnn.DNN_TARGET_MYRIAD)
#Misc vars
font = cv2.FONT_HERSHEY_SIMPLEX
frameWidth = 320
frameHeight = 240
framesPerSec = 24
secPerFrame = 0.0
detections = 0.0
confThreshold = 0.5
#initialize the camera and grab a reference to the raw camera capture
#well this is interesting, we can closely match the input of the network!
#this 'seems' to have improved accuracy!
camera = PiCamera()
camera.resolution = (320, 240)
camera.framerate = 20
rawCapture = PiRGBArray(camera, size=(320, 240))
# allow the camera to warmup
time.sleep(0.1)
labels_file = 'models/labels.txt'
with open(labels_file, 'r') as f:
labels = [x.strip() for x in f]
#print(labels)
#define the function that handles our processing thread
def classify_frame(net, inputQueue, outputQueue):
# keep looping
while True:
# check to see if there is a frame in our input queue
if not inputQueue.empty():
# grab the frame from the input queue, resize it, and
# construct a blob from it
frame = inputQueue.get()
resframe = cv2.resize(frame, (300, 300))
blob = cv2.dnn.blobFromImage(resframe, 0.007843, size=(300, 300),\
mean=(127.5,127.5,127.5), swapRB=False, crop=False)
net.setInput(blob)
out = net.forward()
# write the detections to the output queue
outputQueue.put(out)
# initialize the input queue (frames), output queue (out),
# and the list of actual detections returned by the child process
inputQueue = Queue(maxsize=1)
outputQueue = Queue(maxsize=1)
out = None
# construct a child process *indepedent* from our main process of
# execution
print("[INFO] starting inference process...")
p = Process(target=classify_frame, args=(net,inputQueue,outputQueue,))
p.daemon = True
p.start()
print("[INFO] starting capture...")
#time the frame rate....
start = time.time()
frames = 0
for frame in camera.capture_continuous(rawCapture, format="rgb", use_video_port=True):
# Capture frame-by-frame
frame = frame.array
# if the input queue *is* empty, give the current frame to
# classify
if inputQueue.empty():
inputQueue.put(frame)
# if the output queue *is not* empty, grab the detections
if not outputQueue.empty():
out = outputQueue.get()
# check to see if 'out' is not empty
if out is not None:
# loop over the detections
# Draw detections on the frame
for detection in out.reshape(-1, 7):
confidence = float(detection[2])
obj_type = int(detection[1]-1)
xmin = int(detection[3] * frame.shape[1])
ymin = int(detection[4] * frame.shape[0])
xmax = int(detection[5] * frame.shape[1])
ymax = int(detection[6] * frame.shape[0])
#bottle = 4, person = 14 , dog = 11
if obj_type == 4: #Our object
if confidence > confThreshold:
#bounding box
cv2.rectangle(frame, (xmin, ymin), (xmax, ymax), color=(0, 255, 255))
#label
cv2.rectangle(frame, (xmin-1, ymin-1),\
(xmin+70, ymin-10), (0,255,255), -1)
#labeltext
cv2.putText(frame,labels[obj_type]+' '+str(round(confidence,2)),\
(xmin,ymin-2), font, 0.3,(0,0,0),1,cv2.LINE_AA)
detections += 1
xmaxQueue.put(xmax)
xminQueue.put(xmin)
# Display the resulting frame
cv2.putText(frame,'Threshold: '+str(round(confThreshold,1)), (10, 10), cv2.FONT_HERSHEY_SIMPLEX, 0.3,(0, 0, 0), 1, cv2.LINE_AA)
cv2.namedWindow('frame',cv2.WINDOW_NORMAL)
cv2.resizeWindow('frame',frameWidth,frameHeight)
cv2.imshow('frame',frame)
frames+=1
# clear the stream in preparation for the next frame
rawCapture.truncate(0)
keyPress = cv2.waitKey(1)
if keyPress == 113:
break
if keyPress == 82:
confThreshold += 0.1
if keyPress == 84:
confThreshold -= 0.1
if confThreshold >1:
confThreshold = 1
if confThreshold <0:
confThreshold = 0
end = time.time()
seconds = end-start
fps = frames/seconds
print("Avg Frames Per Sec: "+str(fps))
dts = detections/seconds
print("Avg detections Per Sec: "+str(dts))
cv2.destroyAllWindows()
GPIO.cleanup() | |
arrow-up.js | /*
Copyright (c) 2018-2019 Uber Technologies, Inc.
This source code is licensed under the MIT license found in the
LICENSE file in the root directory of this source tree.
*/
// @flow
// BASEUI-GENERATED-REACT-ICON
// DO NOT EDIT THIS FILE DIRECTLY, SEE README.md
import * as React from 'react';
import Icon from './icon.js';
import omitDollarPrefixedKeys from './omit-dollar-prefixed-keys.js';
import type {IconPropsT} from './types.js';
import {ThemeContext} from '../styles/theme-provider.js';
export default function | (props: IconPropsT) {
return (
<ThemeContext.Consumer>
{theme =>
theme.icons && theme.icons.ArrowUp ? (
<theme.icons.ArrowUp
title="Arrow Up"
viewBox="0 0 24 24"
{...omitDollarPrefixedKeys(props)}
/>
) : (
<Icon title="Arrow Up" viewBox="0 0 24 24" {...props}>
<path
fillRule="evenodd"
clipRule="evenodd"
d="M11.2929 6.29289C11.6834 5.90237 12.3166 5.90237 12.7071 6.29289L16.7071 10.2929C17.0976 10.6834 17.0976 11.3166 16.7071 11.7071C16.3166 12.0976 15.6834 12.0976 15.2929 11.7071L13 9.41421V17C13 17.5523 12.5523 18 12 18C11.4477 18 11 17.5523 11 17V9.41421L8.70711 11.7071C8.31658 12.0976 7.68342 12.0976 7.29289 11.7071C6.90237 11.3166 6.90237 10.6834 7.29289 10.2929L11.2929 6.29289Z"
/>
</Icon>
)
}
</ThemeContext.Consumer>
);
}
| ArrowUp |
spatialProjopt_Zops_numpy.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon May 4 21:39:12 2020
@author: pengning
This is part of the grad/Hess engine for spatial projection versions of the
original global constraint <S|T>-<T|U|T>. The Lagrangian multipliers are distributed in
the order alphaP0_1, alphaP0_2, alphaP1_1, alphaP1_2 ... where P0 is just the identity
"""
import numpy as np
def | (Lags, O, UPlist):
#P0 is identity and UP0 is the original U matrix
ZTT = np.zeros_like(O, dtype=np.complex)
ZTT[:,:] = O[:,:]
for i in range(len(UPlist)):
SymUP = (UPlist[i]+UPlist[i].conj().T)/2
AsymUP = (UPlist[i]-UPlist[i].conj().T)/(2j)
ZTT += Lags[2*i]*SymUP + Lags[2*i+1]*AsymUP
return ZTT
def grad_Z_TT(Lags, UPlist):
gradZ = []
for i in range(len(UPlist)):
SymUP = (UPlist[i]+UPlist[i].conj().T)/2
AsymUP = (UPlist[i]-UPlist[i].conj().T)/(2j)
gradZ.append(SymUP)
gradZ.append(AsymUP)
return gradZ
def check_spatialProj_Lags_validity(Lags, Olist, UPlistlist):
modenum = len(Olist)
mineig = np.inf
for mode in range(modenum):
ZTT = Z_TT(Lags, Olist[mode], UPlistlist[mode])
eigZTT = np.linalg.eigvalsh(ZTT)
if eigZTT[0]<0:
print('mineig', eigZTT[0])
return eigZTT[0]
mineig = min(mineig,eigZTT[0])
return mineig
def find_singular_ZTT_eigv(Lags, Olist, UPlistlist):
modenum = len(Olist)
mineigw = np.inf
mineigv = np.zeros(Olist[0].shape[0])
modemineig = -1
for i in range(modenum):
ZTT = Z_TT(Lags, Olist[i], UPlistlist[i])
eigw, eigv = np.linalg.eigh(ZTT)
if eigw[0]<=0:
modemineig = i
mineigv = eigv[:,0]
return modemineig, mineigv
elif eigw[0]<mineigw:
mineigw = eigw[0]
mineigv = eigv[:,0]
modemineig = i
return modemineig, mineigv
def get_ZTT_mineig(Lags, Olist, UPlistlist, eigvals_only=False):
modenum = len(Olist)
mineigw = np.inf
modemineig = -1
if eigvals_only:
for mode in range(modenum):
ZTT = Z_TT(Lags, Olist[mode], UPlistlist[mode])
eigw = np.linalg.eigvalsh(ZTT)
if eigw[0]<=0:
return mode, eigw[0]
elif eigw[0]<mineigw:
mineigw = eigw[0]
modemineig = mode
return modemineig, mineigw
else:
for mode in range(modenum):
ZTT = Z_TT(Lags, Olist[mode], UPlistlist[mode])
eigw, eigv = np.linalg.eigh(ZTT)
if eigw[0]<=0:
return mode, eigw[0], eigv[:,0]
elif eigw[0]<mineigw:
mineigw = eigw[0]
mineigv = eigv[:,0]
modemineig = mode
return modemineig, mineigw, mineigv
def get_inc_ZTT_mineig(incLags, include, Olist, UPlistlist, eigvals_only=False):
Lags = np.zeros(len(include))
Lags[include] = incLags[:]
return get_ZTT_mineig(Lags, Olist, UPlistlist, eigvals_only=eigvals_only)
###method for finding derivatives of mineig of ZTT, to use for phase I (entering domain of duality) of optimization
def get_ZTT_mineig_grad(ZTT, gradZTT):
eigw, eigv = np.linalg.eigh(ZTT)
eiggrad = np.zeros(len(gradZTT))
for i in range(len(eiggrad)):
eiggrad[i] = np.real(np.vdot(eigv[:,0], gradZTT[i] @ eigv[:,0]))
return eiggrad
| Z_TT |
0006_auto_20200718_0429.py | # Generated by Django 3.0.7 on 2020-07-18 04:29
from django.db import migrations, models
class | (migrations.Migration):
dependencies = [
('scanEngine', '0005_auto_20200718_0407'),
]
operations = [
migrations.AlterField(
model_name='wordlist',
name='path',
field=models.CharField(blank=True, default='', max_length=200),
),
]
| Migration |
train_cpu.py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE.txt file in the root directory of this source tree.
import logging
import math
import time
from collections import defaultdict
from functools import partial
from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Tuple
import torch
import torch.distributed as td
from torch.optim import Optimizer
from torchbiggraph.async_adagrad import AsyncAdagrad
from torchbiggraph.batching import AbstractBatchProcessor, call, process_in_batches
from torchbiggraph.bucket_scheduling import (
BucketStats,
DistributedBucketScheduler,
LockServer,
SingleMachineBucketScheduler,
)
from torchbiggraph.checkpoint_manager import (
CheckpointManager,
ConfigMetadataProvider,
MetadataProvider,
PartitionClient,
)
from torchbiggraph.config import ConfigSchema
from torchbiggraph.distributed import ProcessRanks, init_process_group, start_server
from torchbiggraph.edgelist import EdgeList
from torchbiggraph.eval import RankingEvaluator
from torchbiggraph.graph_storages import EDGE_STORAGES, ENTITY_STORAGES
from torchbiggraph.losses import LOSS_FUNCTIONS, AbstractLossFunction
from torchbiggraph.model import MultiRelationEmbedder, make_model
from torchbiggraph.parameter_sharing import ParameterServer, ParameterSharer
from torchbiggraph.row_adagrad import RowAdagrad
from torchbiggraph.stats import Stats, StatsHandler
from torchbiggraph.types import (
SINGLE_TRAINER,
UNPARTITIONED,
Bucket,
EntityName,
FloatTensorType,
ModuleStateDict,
Partition,
Rank,
)
from torchbiggraph.util import (
BucketLogger,
DummyOptimizer,
EmbeddingHolder,
allocate_shared_tensor,
create_pool,
fast_approx_rand,
get_async_result,
get_num_workers,
hide_distributed_logging,
round_up_to_nearest_multiple,
split_almost_equally,
tag_logs_with_process_name,
)
logger = logging.getLogger("torchbiggraph")
dist_logger = logging.LoggerAdapter(logger, {"distributed": True})
class Trainer(AbstractBatchProcessor):
def __init__(
self,
model_optimizer: Optimizer,
loss_fn: AbstractLossFunction,
relation_weights: List[float],
) -> None:
super().__init__(loss_fn, relation_weights)
self.model_optimizer = model_optimizer
self.unpartitioned_optimizers: Dict[EntityName, Optimizer] = {}
self.partitioned_optimizers: Dict[Tuple[EntityName, Partition], Optimizer] = {}
def _process_one_batch(
self, model: MultiRelationEmbedder, batch_edges: EdgeList
) -> Stats:
model.zero_grad()
scores, reg = model(batch_edges)
loss = self.calc_loss(scores, batch_edges)
stats = Stats(
loss=float(loss),
reg=float(reg) if reg is not None else 0.0,
violators_lhs=int((scores.lhs_neg > scores.lhs_pos.unsqueeze(1)).sum()),
violators_rhs=int((scores.rhs_neg > scores.rhs_pos.unsqueeze(1)).sum()),
count=len(batch_edges),
)
if reg is not None:
(loss + reg).backward()
else:
loss.backward()
self.model_optimizer.step(closure=None)
for optimizer in self.unpartitioned_optimizers.values():
optimizer.step(closure=None)
for optimizer in self.partitioned_optimizers.values():
optimizer.step(closure=None)
return stats
class IterationManager(MetadataProvider):
def __init__(
self,
num_epochs: int,
edge_paths: List[str],
num_edge_chunks: int,
*,
iteration_idx: int = 0,
) -> None:
self.num_epochs = num_epochs
self.edge_paths = edge_paths
self.num_edge_chunks = num_edge_chunks
self.iteration_idx = iteration_idx
@property
def epoch_idx(self) -> int:
return self.iteration_idx // self.num_edge_chunks // self.num_edge_paths
@property
def num_edge_paths(self) -> int:
return len(self.edge_paths)
@property
def edge_path_idx(self) -> int:
return self.iteration_idx // self.num_edge_chunks % self.num_edge_paths
@property
def edge_path(self) -> str:
return self.edge_paths[self.edge_path_idx]
@property
def edge_chunk_idx(self) -> int:
return self.iteration_idx % self.num_edge_chunks
def __iter__(self) -> Iterable[Tuple[int, int, int]]:
while self.epoch_idx < self.num_epochs:
yield self.epoch_idx, self.edge_path_idx, self.edge_chunk_idx
self.iteration_idx += 1
def get_checkpoint_metadata(self) -> Dict[str, Any]:
return {
"iteration/num_epochs": self.num_epochs,
"iteration/epoch_idx": self.epoch_idx,
"iteration/num_edge_paths": self.num_edge_paths,
"iteration/edge_path_idx": self.edge_path_idx,
"iteration/edge_path": self.edge_path,
"iteration/num_edge_chunks": self.num_edge_chunks,
"iteration/edge_chunk_idx": self.edge_chunk_idx,
}
def __add__(self, delta: int) -> "IterationManager":
return IterationManager(
self.num_epochs,
self.edge_paths,
self.num_edge_chunks,
iteration_idx=self.iteration_idx + delta,
)
def should_preserve_old_checkpoint(
iteration_manager: IterationManager, interval: Optional[int]
) -> bool:
"""Whether the checkpoint consumed by the current iteration should be kept
Given the period, in number of epochs, at which to snapshot checkpoints,
determinen whether the checkpoint that is used as input by the current
iteration (as determined by the given manager) should be preserved rather
than getting cleaned up.
"""
if interval is None:
return False
is_checkpoint_epoch = iteration_manager.epoch_idx % interval == 0
is_first_edge_path = iteration_manager.edge_path_idx == 0
is_first_edge_chunk = iteration_manager.edge_chunk_idx == 0
return is_checkpoint_epoch and is_first_edge_path and is_first_edge_chunk
def get_num_edge_chunks(config: ConfigSchema) -> int:
if config.num_edge_chunks is not None:
return config.num_edge_chunks
max_edges_per_bucket = 0
# We should check all edge paths, all lhs partitions and all rhs partitions,
# but the combinatorial explosion could lead to thousands of checks. Let's
# assume that edges are uniformly distributed among buckets (this is not
# exactly the case, as it's the entities that are uniformly distributed
# among the partitions, and edge assignments to buckets are a function of
# that, thus, for example, very high degree entities could skew this), and
# use the size of bucket (0, 0) as an estimate of the average bucket size.
# We still do it for all edge paths as there could be semantic differences
# between them which lead to different sizes.
for edge_path in config.edge_paths:
edge_storage = EDGE_STORAGES.make_instance(edge_path)
max_edges_per_bucket = max(
max_edges_per_bucket,
edge_storage.get_number_of_edges(UNPARTITIONED, UNPARTITIONED),
)
return max(1, math.ceil(max_edges_per_bucket / config.max_edges_per_chunk))
def make_optimizer(
config: ConfigSchema, params: Iterable[torch.nn.Parameter], is_emb: bool
) -> Optimizer:
params = list(params)
if len(params) == 0:
optimizer = DummyOptimizer()
elif is_emb:
optimizer = RowAdagrad(params, lr=config.lr)
else:
if config.relation_lr is not None:
lr = config.relation_lr
else:
lr = config.lr
optimizer = AsyncAdagrad(params, lr=lr)
optimizer.share_memory()
return optimizer
NOOP_STATS_HANDLER = StatsHandler()
class TrainingCoordinator:
def __init__( # noqa
self,
config: ConfigSchema,
model: Optional[MultiRelationEmbedder] = None,
trainer: Optional[AbstractBatchProcessor] = None,
evaluator: Optional[AbstractBatchProcessor] = None,
rank: Rank = SINGLE_TRAINER,
subprocess_init: Optional[Callable[[], None]] = None,
stats_handler: StatsHandler = NOOP_STATS_HANDLER,
):
"""Each epoch/pass, for each partition pair, loads in embeddings and edgelist
from disk, runs HOGWILD training on them, and writes partitions back to disk.
"""
tag_logs_with_process_name(f"Trainer-{rank}")
self.config = config
if config.verbose > 0:
import pprint
pprint.PrettyPrinter().pprint(config.to_dict())
logger.info("Loading entity counts...")
entity_storage = ENTITY_STORAGES.make_instance(config.entity_path)
entity_counts: Dict[str, List[int]] = {}
for entity, econf in config.entities.items():
entity_counts[entity] = []
for part in range(econf.num_partitions):
entity_counts[entity].append(entity_storage.load_count(entity, part))
# Figure out how many lhs and rhs partitions we need
holder = self.holder = EmbeddingHolder(config)
logger.debug(
f"nparts {holder.nparts_lhs} {holder.nparts_rhs} "
f"types {holder.lhs_partitioned_types} {holder.rhs_partitioned_types}"
)
# We know ahead of time that we wil need 1-2 storages for each embedding type,
# as well as the max size of this storage (num_entities x D).
# We allocate these storages n advance in `embedding_storage_freelist`.
# When we need storage for an entity type, we pop it from this free list,
# and then add it back when we 'delete' the embedding table.
embedding_storage_freelist: Dict[
EntityName, Set[torch.FloatStorage]
] = defaultdict(set)
for entity_type, counts in entity_counts.items():
max_count = max(counts)
num_sides = (
(1 if entity_type in holder.lhs_partitioned_types else 0)
+ (1 if entity_type in holder.rhs_partitioned_types else 0)
+ (
1
if entity_type
in (holder.lhs_unpartitioned_types | holder.rhs_unpartitioned_types)
else 0
)
)
for _ in range(num_sides):
embedding_storage_freelist[entity_type].add(
allocate_shared_tensor(
(max_count, config.entity_dimension(entity_type)),
dtype=torch.float,
).storage()
)
# create the handlers, threads, etc. for distributed training
if config.num_machines > 1 or config.num_partition_servers > 0:
if not 0 <= rank < config.num_machines:
raise RuntimeError("Invalid rank for trainer")
if not td.is_available():
raise RuntimeError(
"The installed PyTorch version doesn't provide "
"distributed training capabilities."
)
ranks = ProcessRanks.from_num_invocations(
config.num_machines, config.num_partition_servers
)
num_ps_groups = config.num_groups_for_partition_server
groups: List[List[int]] = [ranks.trainers] # barrier group
groups += [
ranks.trainers + ranks.partition_servers
] * num_ps_groups # ps groups
group_idxs_for_partition_servers = range(1, len(groups))
if rank == SINGLE_TRAINER:
logger.info("Setup lock server...")
start_server(
LockServer(
num_clients=len(ranks.trainers),
nparts_lhs=holder.nparts_lhs,
nparts_rhs=holder.nparts_rhs,
entities_lhs=holder.lhs_partitioned_types,
entities_rhs=holder.rhs_partitioned_types,
entity_counts=entity_counts,
init_tree=config.distributed_tree_init_order,
stats_handler=stats_handler,
),
process_name="LockServer",
init_method=config.distributed_init_method,
world_size=ranks.world_size,
server_rank=ranks.lock_server,
groups=groups,
subprocess_init=subprocess_init,
)
self.bucket_scheduler = DistributedBucketScheduler(
server_rank=ranks.lock_server, client_rank=ranks.trainers[rank]
)
logger.info("Setup param server...")
start_server(
ParameterServer(num_clients=len(ranks.trainers)),
process_name=f"ParamS-{rank}",
init_method=config.distributed_init_method,
world_size=ranks.world_size,
server_rank=ranks.parameter_servers[rank],
groups=groups,
subprocess_init=subprocess_init,
)
parameter_sharer = ParameterSharer(
process_name=f"ParamC-{rank}",
client_rank=ranks.parameter_clients[rank],
all_server_ranks=ranks.parameter_servers,
init_method=config.distributed_init_method,
world_size=ranks.world_size,
groups=groups,
subprocess_init=subprocess_init,
)
if config.num_partition_servers == -1:
start_server(
ParameterServer(
num_clients=len(ranks.trainers),
group_idxs=group_idxs_for_partition_servers,
log_stats=True,
),
process_name=f"PartS-{rank}",
init_method=config.distributed_init_method,
world_size=ranks.world_size,
server_rank=ranks.partition_servers[rank],
groups=groups,
subprocess_init=subprocess_init,
)
groups = init_process_group(
rank=ranks.trainers[rank],
world_size=ranks.world_size,
init_method=config.distributed_init_method,
groups=groups,
)
trainer_group, *groups_for_partition_servers = groups
self.barrier_group = trainer_group
if len(ranks.partition_servers) > 0:
partition_client = PartitionClient(
ranks.partition_servers,
groups=groups_for_partition_servers,
log_stats=True,
)
else:
partition_client = None
else:
self.barrier_group = None
self.bucket_scheduler = SingleMachineBucketScheduler(
holder.nparts_lhs, holder.nparts_rhs, config.bucket_order, stats_handler
)
parameter_sharer = None
partition_client = None
hide_distributed_logging()
# fork early for HOGWILD threads
logger.info("Creating workers...")
self.num_workers = get_num_workers(config.workers)
self.pool = create_pool(
self.num_workers,
subprocess_name=f"TWorker-{rank}",
subprocess_init=subprocess_init,
)
checkpoint_manager = CheckpointManager(
config.checkpoint_path,
rank=rank,
num_machines=config.num_machines,
partition_client=partition_client,
subprocess_name=f"BackgRW-{rank}",
subprocess_init=subprocess_init,
)
self.checkpoint_manager = checkpoint_manager
checkpoint_manager.register_metadata_provider(ConfigMetadataProvider(config))
if rank == 0:
checkpoint_manager.write_config(config)
num_edge_chunks = get_num_edge_chunks(config)
self.iteration_manager = IterationManager(
config.num_epochs,
config.edge_paths,
num_edge_chunks,
iteration_idx=checkpoint_manager.checkpoint_version,
)
checkpoint_manager.register_metadata_provider(self.iteration_manager)
logger.info("Initializing global model...")
if model is None:
model = make_model(config)
model.share_memory()
loss_fn = LOSS_FUNCTIONS.get_class(config.loss_fn)(margin=config.margin)
relation_weights = [relation.weight for relation in config.relations]
if trainer is None:
trainer = Trainer(
model_optimizer=make_optimizer(config, model.parameters(), False),
loss_fn=loss_fn,
relation_weights=relation_weights,
)
if evaluator is None:
eval_overrides = {}
if config.eval_num_batch_negs is not None:
eval_overrides["num_batch_negs"] = config.eval_num_batch_negs
if config.eval_num_uniform_negs is not None:
eval_overrides["num_uniform_negs"] = config.eval_num_uniform_negs
evaluator = RankingEvaluator(
loss_fn=loss_fn,
relation_weights=relation_weights,
overrides=eval_overrides,
)
if config.init_path is not None:
self.loadpath_manager = CheckpointManager(config.init_path)
else:
self.loadpath_manager = None
# load model from checkpoint or loadpath, if available
state_dict, optim_state = checkpoint_manager.maybe_read_model()
if state_dict is None and self.loadpath_manager is not None:
state_dict, optim_state = self.loadpath_manager.maybe_read_model()
if state_dict is not None:
model.load_state_dict(state_dict, strict=False)
if optim_state is not None:
trainer.model_optimizer.load_state_dict(optim_state)
logger.debug("Loading unpartitioned entities...")
for entity in holder.lhs_unpartitioned_types | holder.rhs_unpartitioned_types:
count = entity_counts[entity][0]
s = embedding_storage_freelist[entity].pop()
dimension = config.entity_dimension(entity)
embs = torch.FloatTensor(s).view(-1, dimension)[:count]
embs, optimizer = self._load_embeddings(entity, UNPARTITIONED, out=embs)
holder.unpartitioned_embeddings[entity] = embs
trainer.unpartitioned_optimizers[entity] = optimizer
# start communicating shared parameters with the parameter server
if parameter_sharer is not None:
shared_parameters: Set[int] = set()
for name, param in model.named_parameters():
if id(param) in shared_parameters:
continue
shared_parameters.add(id(param))
key = f"model.{name}"
logger.info(
f"Adding {key} ({param.numel()} params) to parameter server"
)
parameter_sharer.set_param(key, param.data)
for entity, embs in holder.unpartitioned_embeddings.items():
key = f"entity.{entity}"
logger.info(f"Adding {key} ({embs.numel()} params) to parameter server")
parameter_sharer.set_param(key, embs.data)
# store everything in self
self.model = model
self.trainer = trainer
self.evaluator = evaluator
self.rank = rank
self.entity_counts = entity_counts
self.embedding_storage_freelist = embedding_storage_freelist
self.stats_handler = stats_handler
self.strict = False
def train(self) -> None:
holder = self.holder
config = self.config
iteration_manager = self.iteration_manager
total_buckets = holder.nparts_lhs * holder.nparts_rhs
# yield stats from checkpoint, to reconstruct
# saved part of the learning curve
if self.rank == SINGLE_TRAINER:
for stats_dict in self.checkpoint_manager.maybe_read_stats():
index: int = stats_dict["index"]
stats: Optional[Stats] = None
if "stats" in stats_dict:
stats: Stats = Stats.from_dict(stats_dict["stats"])
eval_stats_before: Optional[Stats] = None
if "eval_stats_before" in stats_dict:
eval_stats_before = Stats.from_dict(stats_dict["eval_stats_before"])
eval_stats_after: Optional[Stats] = None
if "eval_stats_after" in stats_dict:
eval_stats_after = Stats.from_dict(stats_dict["eval_stats_after"])
eval_stats_chunk_avg: Optional[Stats] = None
if "eval_stats_chunk_avg" in stats_dict:
eval_stats_chunk_avg = Stats.from_dict(
stats_dict["eval_stats_chunk_avg"]
)
self.stats_handler.on_stats(
index,
eval_stats_before,
stats,
eval_stats_after,
eval_stats_chunk_avg,
)
for epoch_idx, edge_path_idx, edge_chunk_idx in iteration_manager:
logger.info(
f"Starting epoch {epoch_idx + 1} / {iteration_manager.num_epochs}, "
f"edge path {edge_path_idx + 1} / {iteration_manager.num_edge_paths}, "
f"edge chunk {edge_chunk_idx + 1} / {iteration_manager.num_edge_chunks}"
)
edge_storage = EDGE_STORAGES.make_instance(iteration_manager.edge_path)
logger.info(f"Edge path: {iteration_manager.edge_path}")
self._barrier()
dist_logger.info("Lock client new epoch...")
self.bucket_scheduler.new_pass(
is_first=iteration_manager.iteration_idx == 0
)
self._barrier()
remaining = total_buckets
cur_b: Optional[Bucket] = None
cur_stats: Optional[BucketStats] = None
while remaining > 0:
old_b: Optional[Bucket] = cur_b
old_stats: Optional[BucketStats] = cur_stats
cur_b, remaining = self.bucket_scheduler.acquire_bucket()
logger.info(f"still in queue: {remaining}")
if cur_b is None:
cur_stats = None
if old_b is not None:
# if you couldn't get a new pair, release the lock
# to prevent a deadlock!
tic = time.perf_counter()
release_bytes = self._swap_partitioned_embeddings(
old_b, None, old_stats
)
release_time = time.perf_counter() - tic
logger.info(
f"Swapping old embeddings to release lock. io: {release_time:.2f} s for {release_bytes:,} bytes "
f"( {release_bytes / release_time / 1e6:.2f} MB/sec )"
)
time.sleep(1) # don't hammer td
continue
tic = time.perf_counter()
self.cur_b = cur_b
bucket_logger = BucketLogger(logger, bucket=cur_b)
self.bucket_logger = bucket_logger
io_bytes = self._swap_partitioned_embeddings(old_b, cur_b, old_stats)
self.model.set_all_embeddings(holder, cur_b)
current_index = (
iteration_manager.iteration_idx + 1
) * total_buckets - remaining
bucket_logger.debug("Loading edges")
edges = edge_storage.load_chunk_of_edges(
cur_b.lhs,
cur_b.rhs,
edge_chunk_idx,
iteration_manager.num_edge_chunks,
shared=True,
)
num_edges = len(edges)
# this might be off in the case of tensorlist or extra edge fields
io_bytes += edges.lhs.tensor.numel() * edges.lhs.tensor.element_size()
io_bytes += edges.rhs.tensor.numel() * edges.rhs.tensor.element_size()
io_bytes += edges.rel.numel() * edges.rel.element_size()
io_time = time.perf_counter() - tic
tic = time.perf_counter()
bucket_logger.debug("Shuffling edges")
# Fix a seed to get the same permutation every time; have it
# depend on all and only what affects the set of edges.
# Note: for the sake of efficiency, we sample eval edge idxs
# from the edge set *with replacement*, meaning that there may
# be duplicates of the same edge in the eval set. When we swap
# edges into the eval set, if there are duplicates then all
# but one will be clobbered. These collisions are unlikely
# if eval_fraction is small.
#
# Importantly, this eval sampling strategy is theoretically
# sound:
# * Training and eval sets are (exactly) disjoint
# * Eval set may have (rare) duplicates, but they are
# uniformly sampled so it's still an unbiased estimator
# of the out-of-sample statistics
num_eval_edges = int(num_edges * config.eval_fraction)
num_train_edges = num_edges - num_eval_edges
if num_eval_edges > 0:
g = torch.Generator()
g.manual_seed(
hash((edge_path_idx, edge_chunk_idx, cur_b.lhs, cur_b.rhs))
)
eval_edge_idxs = torch.randint(
num_edges, (num_eval_edges,), dtype=torch.long, generator=g
)
else:
eval_edge_idxs = None
# HOGWILD evaluation before training
eval_stats_before = self._coordinate_eval(edges, eval_edge_idxs)
if eval_stats_before is not None:
bucket_logger.info(f"Stats before training: {eval_stats_before}")
eval_time = time.perf_counter() - tic
tic = time.perf_counter()
# HOGWILD training
bucket_logger.debug("Waiting for workers to perform training")
stats = self._coordinate_train(edges, eval_edge_idxs, epoch_idx)
if stats is not None:
bucket_logger.info(f"Training stats: {stats}")
train_time = time.perf_counter() - tic
tic = time.perf_counter()
# HOGWILD evaluation after training
eval_stats_after = self._coordinate_eval(edges, eval_edge_idxs)
if eval_stats_after is not None:
bucket_logger.info(f"Stats after training: {eval_stats_after}")
eval_time += time.perf_counter() - tic
bucket_logger.info(
f"bucket {total_buckets - remaining} / {total_buckets} : "
f"Trained {num_train_edges} edges in {train_time:.2f} s "
f"( {num_train_edges / train_time / 1e6:.2g} M/sec ); "
f"Eval 2*{num_eval_edges} edges in {eval_time:.2f} s "
f"( {2 * num_eval_edges / eval_time / 1e6:.2g} M/sec ); "
f"io: {io_time:.2f} s for {io_bytes:,} bytes ( {io_bytes / io_time / 1e6:.2f} MB/sec )"
)
self.model.clear_all_embeddings()
cur_stats = BucketStats(
lhs_partition=cur_b.lhs,
rhs_partition=cur_b.rhs,
index=current_index,
train=stats,
eval_before=eval_stats_before,
eval_after=eval_stats_after,
)
# release the final bucket
self._swap_partitioned_embeddings(cur_b, None, cur_stats)
# Distributed Processing: all machines can leave the barrier now.
self._barrier()
current_index = (iteration_manager.iteration_idx + 1) * total_buckets - 1
self._maybe_write_checkpoint(
epoch_idx, edge_path_idx, edge_chunk_idx, current_index
)
# now we're sure that all partition files exist,
# so be strict about loading them
self.strict = True
def close(self):
# cleanup
self.pool.close()
self.pool.join()
self._barrier()
self.checkpoint_manager.close()
if self.loadpath_manager is not None:
self.loadpath_manager.close()
# FIXME join distributed workers (not really necessary)
logger.info("Exiting")
###########################################################################
# private functions
###########################################################################
def _barrier(self) -> None:
if self.barrier_group is not None:
td.barrier(group=self.barrier_group)
def | (
self,
entity: EntityName,
part: Partition,
out: FloatTensorType,
strict: bool = False,
force_dirty: bool = False,
) -> Tuple[torch.nn.Parameter, Optimizer]:
if strict:
embs, optim_state = self.checkpoint_manager.read(
entity, part, out=out, force_dirty=force_dirty
)
else:
# Strict is only false during the first iteration, because in that
# case the checkpoint may not contain any data (unless a previous
# run was resumed) so we fall back on initial values.
embs, optim_state = self.checkpoint_manager.maybe_read(
entity, part, out=out, force_dirty=force_dirty
)
if embs is None and self.loadpath_manager is not None:
embs, optim_state = self.loadpath_manager.maybe_read(
entity, part, out=out
)
if embs is None:
embs = out
fast_approx_rand(embs)
embs.mul_(self.config.init_scale)
optim_state = None
embs = torch.nn.Parameter(embs)
optimizer = make_optimizer(self.config, [embs], True)
if optim_state is not None:
optimizer.load_state_dict(optim_state)
return embs, optimizer
def _swap_partitioned_embeddings(
self,
old_b: Optional[Bucket],
new_b: Optional[Bucket],
old_stats: Optional[BucketStats],
) -> int:
io_bytes = 0
logger.info(f"Swapping partitioned embeddings {old_b} {new_b}")
holder = self.holder
old_parts: Set[Tuple[EntityName, Partition]] = set()
if old_b is not None:
old_parts.update((e, old_b.lhs) for e in holder.lhs_partitioned_types)
old_parts.update((e, old_b.rhs) for e in holder.rhs_partitioned_types)
new_parts: Set[Tuple[EntityName, Partition]] = set()
if new_b is not None:
new_parts.update((e, new_b.lhs) for e in holder.lhs_partitioned_types)
new_parts.update((e, new_b.rhs) for e in holder.rhs_partitioned_types)
assert old_parts == holder.partitioned_embeddings.keys()
if old_b is not None:
if old_stats is None:
raise TypeError("Got old bucket but not its stats")
logger.info("Saving partitioned embeddings to checkpoint")
for entity, part in old_parts - new_parts:
logger.debug(f"Saving ({entity} {part})")
embs = holder.partitioned_embeddings.pop((entity, part))
optimizer = self.trainer.partitioned_optimizers.pop((entity, part))
self.checkpoint_manager.write(
entity, part, embs.detach(), optimizer.state_dict()
)
self.embedding_storage_freelist[entity].add(embs.storage())
io_bytes += embs.numel() * embs.element_size() # ignore optim state
# these variables are holding large objects; let them be freed
del embs
del optimizer
self.bucket_scheduler.release_bucket(old_b, old_stats)
if new_b is not None:
logger.info("Loading partitioned embeddings from checkpoint")
for entity, part in new_parts - old_parts:
logger.debug(f"Loading ({entity} {part})")
force_dirty = self.bucket_scheduler.check_and_set_dirty(entity, part)
count = self.entity_counts[entity][part]
s = self.embedding_storage_freelist[entity].pop()
dimension = self.config.entity_dimension(entity)
embs = torch.FloatTensor(s).view(-1, dimension)[:count]
embs, optimizer = self._load_embeddings(
entity, part, out=embs, strict=self.strict, force_dirty=force_dirty
)
holder.partitioned_embeddings[entity, part] = embs
self.trainer.partitioned_optimizers[entity, part] = optimizer
io_bytes += embs.numel() * embs.element_size() # ignore optim state
assert new_parts == holder.partitioned_embeddings.keys()
return io_bytes
def _coordinate_train(self, edges, eval_edge_idxs, epoch_idx) -> Stats:
assert self.config.num_gpus == 0, "GPU training not supported"
if eval_edge_idxs is not None:
num_train_edges = len(edges) - len(eval_edge_idxs)
train_edge_idxs = torch.arange(len(edges))
train_edge_idxs[eval_edge_idxs] = torch.arange(num_train_edges, len(edges))
train_edge_idxs = train_edge_idxs[:num_train_edges]
edge_perm = train_edge_idxs[torch.randperm(num_train_edges)]
else:
edge_perm = torch.randperm(len(edges))
future_all_stats = self.pool.map_async(
call,
[
partial(
process_in_batches,
batch_size=self.config.batch_size,
model=self.model,
batch_processor=self.trainer,
edges=edges,
indices=edge_perm[s],
# FIXME should we only delay if iteration_idx == 0?
delay=self.config.hogwild_delay
if epoch_idx == 0 and self.rank > 0
else 0,
)
for rank, s in enumerate(
split_almost_equally(edge_perm.size(0), num_parts=self.num_workers)
)
],
)
all_stats = get_async_result(future_all_stats, self.pool)
return Stats.sum(all_stats).average()
def _coordinate_eval(self, edges, eval_edge_idxs) -> Optional[Stats]:
eval_batch_size = round_up_to_nearest_multiple(
self.config.batch_size, self.config.eval_num_batch_negs
)
if eval_edge_idxs is not None:
self.bucket_logger.debug("Waiting for workers to perform evaluation")
future_all_eval_stats = self.pool.map_async(
call,
[
partial(
process_in_batches,
batch_size=eval_batch_size,
model=self.model,
batch_processor=self.evaluator,
edges=edges,
indices=eval_edge_idxs[s],
)
for s in split_almost_equally(
eval_edge_idxs.size(0), num_parts=self.num_workers
)
],
)
all_eval_stats = get_async_result(future_all_eval_stats, self.pool)
return Stats.sum(all_eval_stats).average()
else:
return None
def _maybe_write_checkpoint(
self,
epoch_idx: int,
edge_path_idx: int,
edge_chunk_idx: int,
current_index: int,
) -> None:
config = self.config
# Preserving a checkpoint requires two steps:
# - create a snapshot (w/ symlinks) after it's first written;
# - don't delete it once the following one is written.
# These two happen in two successive iterations of the main loop: the
# one just before and the one just after the epoch boundary.
preserve_old_checkpoint = should_preserve_old_checkpoint(
self.iteration_manager, config.checkpoint_preservation_interval
)
preserve_new_checkpoint = should_preserve_old_checkpoint(
self.iteration_manager + 1, config.checkpoint_preservation_interval
)
# Write metadata: for multiple machines, write from rank-0
logger.info(
f"Finished epoch {epoch_idx + 1} / {self.iteration_manager.num_epochs}, "
f"edge path {edge_path_idx + 1} / {self.iteration_manager.num_edge_paths}, "
f"edge chunk {edge_chunk_idx + 1} / "
f"{self.iteration_manager.num_edge_chunks}"
)
if self.rank == 0:
for entity, embs in self.holder.unpartitioned_embeddings.items():
logger.info(f"Writing {entity} embeddings")
optimizer = self.trainer.unpartitioned_optimizers[entity]
self.checkpoint_manager.write(
entity,
UNPARTITIONED,
embs.detach(),
optimizer.state_dict(),
unpartitioned=True,
)
logger.info("Writing the metadata")
state_dict: ModuleStateDict = self.model.state_dict()
self.checkpoint_manager.write_model(
state_dict, self.trainer.model_optimizer.state_dict()
)
logger.info("Writing the training stats")
all_stats_dicts: List[Dict[str, Any]] = []
bucket_eval_stats_list = []
chunk_stats_dict = {
"epoch_idx": epoch_idx,
"edge_path_idx": edge_path_idx,
"edge_chunk_idx": edge_chunk_idx,
}
for stats in self.bucket_scheduler.get_stats_for_pass():
stats_dict = {
"lhs_partition": stats.lhs_partition,
"rhs_partition": stats.rhs_partition,
"index": stats.index,
"stats": stats.train.to_dict(),
}
if stats.eval_before is not None:
stats_dict["eval_stats_before"] = stats.eval_before.to_dict()
bucket_eval_stats_list.append(stats.eval_before)
if stats.eval_after is not None:
stats_dict["eval_stats_after"] = stats.eval_after.to_dict()
stats_dict.update(chunk_stats_dict)
all_stats_dicts.append(stats_dict)
if len(bucket_eval_stats_list) != 0:
eval_stats_chunk_avg = Stats.average_list(bucket_eval_stats_list)
self.stats_handler.on_stats(
index=current_index, eval_stats_chunk_avg=eval_stats_chunk_avg
)
chunk_stats_dict["index"] = current_index
chunk_stats_dict[
"eval_stats_chunk_avg"
] = eval_stats_chunk_avg.to_dict()
all_stats_dicts.append(chunk_stats_dict)
self.checkpoint_manager.append_stats(all_stats_dicts)
logger.info("Writing the checkpoint")
self.checkpoint_manager.write_new_version(
config, self.entity_counts, self.embedding_storage_freelist
)
dist_logger.info(
"Waiting for other workers to write their parts of the checkpoint"
)
self._barrier()
dist_logger.info("All parts of the checkpoint have been written")
logger.info("Switching to the new checkpoint version")
self.checkpoint_manager.switch_to_new_version()
dist_logger.info(
"Waiting for other workers to switch to the new checkpoint version"
)
self._barrier()
dist_logger.info("All workers have switched to the new checkpoint version")
# After all the machines have finished committing
# checkpoints, we either remove the old checkpoints
# or we preserve it
if preserve_new_checkpoint:
# Add 1 so the index is a multiple of the interval, it looks nicer.
self.checkpoint_manager.preserve_current_version(config, epoch_idx + 1)
if not preserve_old_checkpoint:
self.checkpoint_manager.remove_old_version(config)
| _load_embeddings |
jquery-1.9.0.min.js | /*! jQuery v1.9.0 | (c) 2005, 2012 jQuery Foundation, Inc. | jquery.org/license */
(function (e, t) {
"use strict";
function n(e) {
var t = e.length, n = st.type(e);
return st.isWindow(e) ? !1 : 1 === e.nodeType && t ? !0 : "array" === n || "function" !== n && (0 === t || "number" == typeof t && t > 0 && t - 1 in e)
}
function r(e) {
var t = Tt[e] = {};
return st.each(e.match(lt) || [], function (e, n) {
t[n] = !0
}), t
}
function i(e, n, r, i) {
if (st.acceptData(e)) {
var o, a, s = st.expando, u = "string" == typeof n, l = e.nodeType, c = l ? st.cache : e, f = l ? e[s] : e[s] && s;
if (f && c[f] && (i || c[f].data) || !u || r !== t)return f || (l ? e[s] = f = K.pop() || st.guid++ : f = s), c[f] || (c[f] = {}, l || (c[f].toJSON = st.noop)), ("object" == typeof n || "function" == typeof n) && (i ? c[f] = st.extend(c[f], n) : c[f].data = st.extend(c[f].data, n)), o = c[f], i || (o.data || (o.data = {}), o = o.data), r !== t && (o[st.camelCase(n)] = r), u ? (a = o[n], null == a && (a = o[st.camelCase(n)])) : a = o, a
}
}
function o(e, t, n) {
if (st.acceptData(e)) {
var r, i, o, a = e.nodeType, u = a ? st.cache : e, l = a ? e[st.expando] : st.expando;
if (u[l]) {
if (t && (r = n ? u[l] : u[l].data)) {
st.isArray(t) ? t = t.concat(st.map(t, st.camelCase)) : t in r ? t = [t] : (t = st.camelCase(t), t = t in r ? [t] : t.split(" "));
for (i = 0, o = t.length; o > i; i++)delete r[t[i]];
if (!(n ? s : st.isEmptyObject)(r))return
}
(n || (delete u[l].data, s(u[l]))) && (a ? st.cleanData([e], !0) : st.support.deleteExpando || u != u.window ? delete u[l] : u[l] = null)
}
}
}
function a(e, n, r) {
if (r === t && 1 === e.nodeType) {
var i = "data-" + n.replace(Nt, "-$1").toLowerCase();
if (r = e.getAttribute(i), "string" == typeof r) {
try {
r = "true" === r ? !0 : "false" === r ? !1 : "null" === r ? null : +r + "" === r ? +r : wt.test(r) ? st.parseJSON(r) : r
} catch (o) {
}
st.data(e, n, r)
} else r = t
}
return r
}
function s(e) {
var t;
for (t in e)if (("data" !== t || !st.isEmptyObject(e[t])) && "toJSON" !== t)return !1;
return !0
}
function u() {
return !0
}
function l() {
return !1
}
function c(e, t) {
do e = e[t]; while (e && 1 !== e.nodeType);
return e
}
function f(e, t, n) {
if (t = t || 0, st.isFunction(t))return st.grep(e, function (e, r) {
var i = !!t.call(e, r, e);
return i === n
});
if (t.nodeType)return st.grep(e, function (e) {
return e === t === n
});
if ("string" == typeof t) {
var r = st.grep(e, function (e) {
return 1 === e.nodeType
});
if (Wt.test(t))return st.filter(t, r, !n);
t = st.filter(t, r)
}
return st.grep(e, function (e) {
return st.inArray(e, t) >= 0 === n
})
}
function p(e) {
var t = zt.split("|"), n = e.createDocumentFragment();
if (n.createElement)for (; t.length;)n.createElement(t.pop());
return n
}
function d(e, t) {
return e.getElementsByTagName(t)[0] || e.appendChild(e.ownerDocument.createElement(t))
}
function h(e) {
var t = e.getAttributeNode("type");
return e.type = (t && t.specified) + "/" + e.type, e
}
function g(e) {
var t = nn.exec(e.type);
return t ? e.type = t[1] : e.removeAttribute("type"), e
}
function m(e, t) {
for (var n, r = 0; null != (n = e[r]); r++)st._data(n, "globalEval", !t || st._data(t[r], "globalEval"))
}
function y(e, t) {
if (1 === t.nodeType && st.hasData(e)) {
var n, r, i, o = st._data(e), a = st._data(t, o), s = o.events;
if (s) {
delete a.handle, a.events = {};
for (n in s)for (r = 0, i = s[n].length; i > r; r++)st.event.add(t, n, s[n][r])
}
a.data && (a.data = st.extend({}, a.data))
}
}
function v(e, t) {
var n, r, i;
if (1 === t.nodeType) {
if (n = t.nodeName.toLowerCase(), !st.support.noCloneEvent && t[st.expando]) {
r = st._data(t);
for (i in r.events)st.removeEvent(t, i, r.handle);
t.removeAttribute(st.expando)
}
"script" === n && t.text !== e.text ? (h(t).text = e.text, g(t)) : "object" === n ? (t.parentNode && (t.outerHTML = e.outerHTML), st.support.html5Clone && e.innerHTML && !st.trim(t.innerHTML) && (t.innerHTML = e.innerHTML)) : "input" === n && Zt.test(e.type) ? (t.defaultChecked = t.checked = e.checked, t.value !== e.value && (t.value = e.value)) : "option" === n ? t.defaultSelected = t.selected = e.defaultSelected : ("input" === n || "textarea" === n) && (t.defaultValue = e.defaultValue)
}
}
function b(e, n) {
var r, i, o = 0, a = e.getElementsByTagName !== t ? e.getElementsByTagName(n || "*") : e.querySelectorAll !== t ? e.querySelectorAll(n || "*") : t;
if (!a)for (a = [], r = e.childNodes || e; null != (i = r[o]); o++)!n || st.nodeName(i, n) ? a.push(i) : st.merge(a, b(i, n));
return n === t || n && st.nodeName(e, n) ? st.merge([e], a) : a
}
function x(e) {
Zt.test(e.type) && (e.defaultChecked = e.checked)
}
function T(e, t) {
if (t in e)return t;
for (var n = t.charAt(0).toUpperCase() + t.slice(1), r = t, i = Nn.length; i--;)if (t = Nn[i] + n, t in e)return t;
return r
}
function w(e, t) {
return e = t || e, "none" === st.css(e, "display") || !st.contains(e.ownerDocument, e)
}
function N(e, t) {
for (var n, r = [], i = 0, o = e.length; o > i; i++)n = e[i], n.style && (r[i] = st._data(n, "olddisplay"), t ? (r[i] || "none" !== n.style.display || (n.style.display = ""), "" === n.style.display && w(n) && (r[i] = st._data(n, "olddisplay", S(n.nodeName)))) : r[i] || w(n) || st._data(n, "olddisplay", st.css(n, "display")));
for (i = 0; o > i; i++)n = e[i], n.style && (t && "none" !== n.style.display && "" !== n.style.display || (n.style.display = t ? r[i] || "" : "none"));
return e
}
function C(e, t, n) {
var r = mn.exec(t);
return r ? Math.max(0, r[1] - (n || 0)) + (r[2] || "px") : t
}
function k(e, t, n, r, i) {
for (var o = n === (r ? "border" : "content") ? 4 : "width" === t ? 1 : 0, a = 0; 4 > o; o += 2)"margin" === n && (a += st.css(e, n + wn[o], !0, i)), r ? ("content" === n && (a -= st.css(e, "padding" + wn[o], !0, i)), "margin" !== n && (a -= st.css(e, "border" + wn[o] + "Width", !0, i))) : (a += st.css(e, "padding" + wn[o], !0, i), "padding" !== n && (a += st.css(e, "border" + wn[o] + "Width", !0, i)));
return a
}
function E(e, t, n) {
var r = !0, i = "width" === t ? e.offsetWidth : e.offsetHeight, o = ln(e), a = st.support.boxSizing && "border-box" === st.css(e, "boxSizing", !1, o);
if (0 >= i || null == i) {
if (i = un(e, t, o), (0 > i || null == i) && (i = e.style[t]), yn.test(i))return i;
r = a && (st.support.boxSizingReliable || i === e.style[t]), i = parseFloat(i) || 0
}
return i + k(e, t, n || (a ? "border" : "content"), r, o) + "px"
}
function S(e) {
var t = V, n = bn[e];
return n || (n = A(e, t), "none" !== n && n || (cn = (cn || st("<iframe frameborder='0' width='0' height='0'/>").css("cssText", "display:block !important")).appendTo(t.documentElement), t = (cn[0].contentWindow || cn[0].contentDocument).document, t.write("<!doctype html><html><body>"), t.close(), n = A(e, t), cn.detach()), bn[e] = n), n
}
function A(e, t) {
var n = st(t.createElement(e)).appendTo(t.body), r = st.css(n[0], "display");
return n.remove(), r
}
function j(e, t, n, r) {
var i;
if (st.isArray(t))st.each(t, function (t, i) {
n || kn.test(e) ? r(e, i) : j(e + "[" + ("object" == typeof i ? t : "") + "]", i, n, r)
}); else if (n || "object" !== st.type(t))r(e, t); else for (i in t)j(e + "[" + i + "]", t[i], n, r)
}
function D(e) {
return function (t, n) {
"string" != typeof t && (n = t, t = "*");
var r, i = 0, o = t.toLowerCase().match(lt) || [];
if (st.isFunction(n))for (; r = o[i++];)"+" === r[0] ? (r = r.slice(1) || "*", (e[r] = e[r] || []).unshift(n)) : (e[r] = e[r] || []).push(n)
}
}
function L(e, n, r, i) {
function o(u) {
var l;
return a[u] = !0, st.each(e[u] || [], function (e, u) {
var c = u(n, r, i);
return "string" != typeof c || s || a[c] ? s ? !(l = c) : t : (n.dataTypes.unshift(c), o(c), !1)
}), l
}
var a = {}, s = e === $n;
return o(n.dataTypes[0]) || !a["*"] && o("*")
}
function H(e, n) {
var r, i, o = st.ajaxSettings.flatOptions || {};
for (r in n)n[r] !== t && ((o[r] ? e : i || (i = {}))[r] = n[r]);
return i && st.extend(!0, e, i), e
}
function M(e, n, r) {
var i, o, a, s, u = e.contents, l = e.dataTypes, c = e.responseFields;
for (o in c)o in r && (n[c[o]] = r[o]);
for (; "*" === l[0];)l.shift(), i === t && (i = e.mimeType || n.getResponseHeader("Content-Type"));
if (i)for (o in u)if (u[o] && u[o].test(i)) {
l.unshift(o);
break
}
if (l[0]in r)a = l[0]; else {
for (o in r) {
if (!l[0] || e.converters[o + " " + l[0]]) {
a = o;
break
}
s || (s = o)
}
a = a || s
}
return a ? (a !== l[0] && l.unshift(a), r[a]) : t
}
function q(e, t) {
var n, r, i, o, a = {}, s = 0, u = e.dataTypes.slice(), l = u[0];
if (e.dataFilter && (t = e.dataFilter(t, e.dataType)), u[1])for (n in e.converters)a[n.toLowerCase()] = e.converters[n];
for (; i = u[++s];)if ("*" !== i) {
if ("*" !== l && l !== i) {
if (n = a[l + " " + i] || a["* " + i], !n)for (r in a)if (o = r.split(" "), o[1] === i && (n = a[l + " " + o[0]] || a["* " + o[0]])) {
n === !0 ? n = a[r] : a[r] !== !0 && (i = o[0], u.splice(s--, 0, i));
break
}
if (n !== !0)if (n && e["throws"])t = n(t); else try {
t = n(t)
} catch (c) {
return {state: "parsererror", error: n ? c : "No conversion from " + l + " to " + i}
}
}
l = i
}
return {state: "success", data: t}
}
function _() {
try {
return new e.XMLHttpRequest
} catch (t) {
}
}
function F() {
try {
return new e.ActiveXObject("Microsoft.XMLHTTP")
} catch (t) {
}
}
function O() {
return setTimeout(function () {
Qn = t
}), Qn = st.now()
}
function B(e, t) {
st.each(t, function (t, n) {
for (var r = (rr[t] || []).concat(rr["*"]), i = 0, o = r.length; o > i; i++)if (r[i].call(e, t, n))return
})
}
function P(e, t, n) {
var r, i, o = 0, a = nr.length, s = st.Deferred().always(function () {
delete u.elem
}), u = function () {
if (i)return !1;
for (var t = Qn || O(), n = Math.max(0, l.startTime + l.duration - t), r = n / l.duration || 0, o = 1 - r, a = 0, u = l.tweens.length; u > a; a++)l.tweens[a].run(o);
return s.notifyWith(e, [l, o, n]), 1 > o && u ? n : (s.resolveWith(e, [l]), !1)
}, l = s.promise({
elem: e,
props: st.extend({}, t),
opts: st.extend(!0, {specialEasing: {}}, n),
originalProperties: t,
originalOptions: n,
startTime: Qn || O(),
duration: n.duration,
tweens: [],
createTween: function (t, n) {
var r = st.Tween(e, l.opts, t, n, l.opts.specialEasing[t] || l.opts.easing);
return l.tweens.push(r), r
},
stop: function (t) {
var n = 0, r = t ? l.tweens.length : 0;
if (i)return this;
for (i = !0; r > n; n++)l.tweens[n].run(1);
return t ? s.resolveWith(e, [l, t]) : s.rejectWith(e, [l, t]), this
}
}), c = l.props;
for (R(c, l.opts.specialEasing); a > o; o++)if (r = nr[o].call(l, e, c, l.opts))return r;
return B(l, c), st.isFunction(l.opts.start) && l.opts.start.call(e, l), st.fx.timer(st.extend(u, {
elem: e,
anim: l,
queue: l.opts.queue
})), l.progress(l.opts.progress).done(l.opts.done, l.opts.complete).fail(l.opts.fail).always(l.opts.always)
}
function R(e, t) {
var n, r, i, o, a;
for (n in e)if (r = st.camelCase(n), i = t[r], o = e[n], st.isArray(o) && (i = o[1], o = e[n] = o[0]), n !== r && (e[r] = o, delete e[n]), a = st.cssHooks[r], a && "expand"in a) {
o = a.expand(o), delete e[r];
for (n in o)n in e || (e[n] = o[n], t[n] = i)
} else t[r] = i
}
function W(e, t, n) {
var r, i, o, a, s, u, l, c, f, p = this, d = e.style, h = {}, g = [], m = e.nodeType && w(e);
n.queue || (c = st._queueHooks(e, "fx"), null == c.unqueued && (c.unqueued = 0, f = c.empty.fire, c.empty.fire = function () {
c.unqueued || f()
}), c.unqueued++, p.always(function () {
p.always(function () {
c.unqueued--, st.queue(e, "fx").length || c.empty.fire()
})
})), 1 === e.nodeType && ("height"in t || "width"in t) && (n.overflow = [d.overflow, d.overflowX, d.overflowY], "inline" === st.css(e, "display") && "none" === st.css(e, "float") && (st.support.inlineBlockNeedsLayout && "inline" !== S(e.nodeName) ? d.zoom = 1 : d.display = "inline-block")), n.overflow && (d.overflow = "hidden", st.support.shrinkWrapBlocks || p.done(function () {
d.overflow = n.overflow[0], d.overflowX = n.overflow[1], d.overflowY = n.overflow[2]
}));
for (r in t)if (o = t[r], Zn.exec(o)) {
if (delete t[r], u = u || "toggle" === o, o === (m ? "hide" : "show"))continue;
g.push(r)
}
if (a = g.length) {
s = st._data(e, "fxshow") || st._data(e, "fxshow", {}), "hidden"in s && (m = s.hidden), u && (s.hidden = !m), m ? st(e).show() : p.done(function () {
st(e).hide()
}), p.done(function () {
var t;
st._removeData(e, "fxshow");
for (t in h)st.style(e, t, h[t])
});
for (r = 0; a > r; r++)i = g[r], l = p.createTween(i, m ? s[i] : 0), h[i] = s[i] || st.style(e, i), i in s || (s[i] = l.start, m && (l.end = l.start, l.start = "width" === i || "height" === i ? 1 : 0))
}
}
function $(e, t, n, r, i) {
return new $.prototype.init(e, t, n, r, i)
}
function I(e, t) {
var n, r = {height: e}, i = 0;
for (t = t ? 1 : 0; 4 > i; i += 2 - t)n = wn[i], r["margin" + n] = r["padding" + n] = e;
return t && (r.opacity = r.width = e), r
}
function z(e) {
return st.isWindow(e) ? e : 9 === e.nodeType ? e.defaultView || e.parentWindow : !1
}
var X, U, V = e.document, Y = e.location, J = e.jQuery, G = e.$, Q = {}, K = [], Z = "1.9.0", et = K.concat, tt = K.push, nt = K.slice, rt = K.indexOf, it = Q.toString, ot = Q.hasOwnProperty, at = Z.trim, st = function (e, t) {
return new st.fn.init(e, t, X)
}, ut = /[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source, lt = /\S+/g, ct = /^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g, ft = /^(?:(<[\w\W]+>)[^>]*|#([\w-]*))$/, pt = /^<(\w+)\s*\/?>(?:<\/\1>|)$/, dt = /^[\],:{}\s]*$/, ht = /(?:^|:|,)(?:\s*\[)+/g, gt = /\\(?:["\\\/bfnrt]|u[\da-fA-F]{4})/g, mt = /"[^"\\\r\n]*"|true|false|null|-?(?:\d+\.|)\d+(?:[eE][+-]?\d+|)/g, yt = /^-ms-/, vt = /-([\da-z])/gi, bt = function (e, t) {
return t.toUpperCase()
}, xt = function () {
V.addEventListener ? (V.removeEventListener("DOMContentLoaded", xt, !1), st.ready()) : "complete" === V.readyState && (V.detachEvent("onreadystatechange", xt), st.ready())
};
st.fn = st.prototype = {
jquery: Z, constructor: st, init: function (e, n, r) {
var i, o;
if (!e)return this;
if ("string" == typeof e) {
if (i = "<" === e.charAt(0) && ">" === e.charAt(e.length - 1) && e.length >= 3 ? [null, e, null] : ft.exec(e), !i || !i[1] && n)return !n || n.jquery ? (n || r).find(e) : this.constructor(n).find(e);
if (i[1]) {
if (n = n instanceof st ? n[0] : n, st.merge(this, st.parseHTML(i[1], n && n.nodeType ? n.ownerDocument || n : V, !0)), pt.test(i[1]) && st.isPlainObject(n))for (i in n)st.isFunction(this[i]) ? this[i](n[i]) : this.attr(i, n[i]);
return this
}
if (o = V.getElementById(i[2]), o && o.parentNode) {
if (o.id !== i[2])return r.find(e);
this.length = 1, this[0] = o
}
return this.context = V, this.selector = e, this
}
return e.nodeType ? (this.context = this[0] = e, this.length = 1, this) : st.isFunction(e) ? r.ready(e) : (e.selector !== t && (this.selector = e.selector, this.context = e.context), st.makeArray(e, this))
}, selector: "", length: 0, size: function () { | return this.length
}, toArray: function () {
return nt.call(this)
}, get: function (e) {
return null == e ? this.toArray() : 0 > e ? this[this.length + e] : this[e]
}, pushStack: function (e) {
var t = st.merge(this.constructor(), e);
return t.prevObject = this, t.context = this.context, t
}, each: function (e, t) {
return st.each(this, e, t)
}, ready: function (e) {
return st.ready.promise().done(e), this
}, slice: function () {
return this.pushStack(nt.apply(this, arguments))
}, first: function () {
return this.eq(0)
}, last: function () {
return this.eq(-1)
}, eq: function (e) {
var t = this.length, n = +e + (0 > e ? t : 0);
return this.pushStack(n >= 0 && t > n ? [this[n]] : [])
}, map: function (e) {
return this.pushStack(st.map(this, function (t, n) {
return e.call(t, n, t)
}))
}, end: function () {
return this.prevObject || this.constructor(null)
}, push: tt, sort: [].sort, splice: [].splice
}, st.fn.init.prototype = st.fn, st.extend = st.fn.extend = function () {
var e, n, r, i, o, a, s = arguments[0] || {}, u = 1, l = arguments.length, c = !1;
for ("boolean" == typeof s && (c = s, s = arguments[1] || {}, u = 2), "object" == typeof s || st.isFunction(s) || (s = {}), l === u && (s = this, --u); l > u; u++)if (null != (e = arguments[u]))for (n in e)r = s[n], i = e[n], s !== i && (c && i && (st.isPlainObject(i) || (o = st.isArray(i))) ? (o ? (o = !1, a = r && st.isArray(r) ? r : []) : a = r && st.isPlainObject(r) ? r : {}, s[n] = st.extend(c, a, i)) : i !== t && (s[n] = i));
return s
}, st.extend({
noConflict: function (t) {
return e.$ === st && (e.$ = G), t && e.jQuery === st && (e.jQuery = J), st
}, isReady: !1, readyWait: 1, holdReady: function (e) {
e ? st.readyWait++ : st.ready(!0)
}, ready: function (e) {
if (e === !0 ? !--st.readyWait : !st.isReady) {
if (!V.body)return setTimeout(st.ready);
st.isReady = !0, e !== !0 && --st.readyWait > 0 || (U.resolveWith(V, [st]), st.fn.trigger && st(V).trigger("ready").off("ready"))
}
}, isFunction: function (e) {
return "function" === st.type(e)
}, isArray: Array.isArray || function (e) {
return "array" === st.type(e)
}, isWindow: function (e) {
return null != e && e == e.window
}, isNumeric: function (e) {
return !isNaN(parseFloat(e)) && isFinite(e)
}, type: function (e) {
return null == e ? e + "" : "object" == typeof e || "function" == typeof e ? Q[it.call(e)] || "object" : typeof e
}, isPlainObject: function (e) {
if (!e || "object" !== st.type(e) || e.nodeType || st.isWindow(e))return !1;
try {
if (e.constructor && !ot.call(e, "constructor") && !ot.call(e.constructor.prototype, "isPrototypeOf"))return !1
} catch (n) {
return !1
}
var r;
for (r in e);
return r === t || ot.call(e, r)
}, isEmptyObject: function (e) {
var t;
for (t in e)return !1;
return !0
}, error: function (e) {
throw Error(e)
}, parseHTML: function (e, t, n) {
if (!e || "string" != typeof e)return null;
"boolean" == typeof t && (n = t, t = !1), t = t || V;
var r = pt.exec(e), i = !n && [];
return r ? [t.createElement(r[1])] : (r = st.buildFragment([e], t, i), i && st(i).remove(), st.merge([], r.childNodes))
}, parseJSON: function (n) {
return e.JSON && e.JSON.parse ? e.JSON.parse(n) : null === n ? n : "string" == typeof n && (n = st.trim(n), n && dt.test(n.replace(gt, "@").replace(mt, "]").replace(ht, ""))) ? Function("return " + n)() : (st.error("Invalid JSON: " + n), t)
}, parseXML: function (n) {
var r, i;
if (!n || "string" != typeof n)return null;
try {
e.DOMParser ? (i = new DOMParser, r = i.parseFromString(n, "text/xml")) : (r = new ActiveXObject("Microsoft.XMLDOM"), r.async = "false", r.loadXML(n))
} catch (o) {
r = t
}
return r && r.documentElement && !r.getElementsByTagName("parsererror").length || st.error("Invalid XML: " + n), r
}, noop: function () {
}, globalEval: function (t) {
t && st.trim(t) && (e.execScript || function (t) {
e.eval.call(e, t)
})(t)
}, camelCase: function (e) {
return e.replace(yt, "ms-").replace(vt, bt)
}, nodeName: function (e, t) {
return e.nodeName && e.nodeName.toLowerCase() === t.toLowerCase()
}, each: function (e, t, r) {
var i, o = 0, a = e.length, s = n(e);
if (r) {
if (s)for (; a > o && (i = t.apply(e[o], r), i !== !1); o++); else for (o in e)if (i = t.apply(e[o], r), i === !1)break
} else if (s)for (; a > o && (i = t.call(e[o], o, e[o]), i !== !1); o++); else for (o in e)if (i = t.call(e[o], o, e[o]), i === !1)break;
return e
}, trim: at && !at.call("\ufeff\u00a0") ? function (e) {
return null == e ? "" : at.call(e)
} : function (e) {
return null == e ? "" : (e + "").replace(ct, "")
}, makeArray: function (e, t) {
var r = t || [];
return null != e && (n(Object(e)) ? st.merge(r, "string" == typeof e ? [e] : e) : tt.call(r, e)), r
}, inArray: function (e, t, n) {
var r;
if (t) {
if (rt)return rt.call(t, e, n);
for (r = t.length, n = n ? 0 > n ? Math.max(0, r + n) : n : 0; r > n; n++)if (n in t && t[n] === e)return n
}
return -1
}, merge: function (e, n) {
var r = n.length, i = e.length, o = 0;
if ("number" == typeof r)for (; r > o; o++)e[i++] = n[o]; else for (; n[o] !== t;)e[i++] = n[o++];
return e.length = i, e
}, grep: function (e, t, n) {
var r, i = [], o = 0, a = e.length;
for (n = !!n; a > o; o++)r = !!t(e[o], o), n !== r && i.push(e[o]);
return i
}, map: function (e, t, r) {
var i, o = 0, a = e.length, s = n(e), u = [];
if (s)for (; a > o; o++)i = t(e[o], o, r), null != i && (u[u.length] = i); else for (o in e)i = t(e[o], o, r), null != i && (u[u.length] = i);
return et.apply([], u)
}, guid: 1, proxy: function (e, n) {
var r, i, o;
return "string" == typeof n && (r = e[n], n = e, e = r), st.isFunction(e) ? (i = nt.call(arguments, 2), o = function () {
return e.apply(n || this, i.concat(nt.call(arguments)))
}, o.guid = e.guid = e.guid || st.guid++, o) : t
}, access: function (e, n, r, i, o, a, s) {
var u = 0, l = e.length, c = null == r;
if ("object" === st.type(r)) {
o = !0;
for (u in r)st.access(e, n, u, r[u], !0, a, s)
} else if (i !== t && (o = !0, st.isFunction(i) || (s = !0), c && (s ? (n.call(e, i), n = null) : (c = n, n = function (e, t, n) {
return c.call(st(e), n)
})), n))for (; l > u; u++)n(e[u], r, s ? i : i.call(e[u], u, n(e[u], r)));
return o ? e : c ? n.call(e) : l ? n(e[0], r) : a
}, now: function () {
return (new Date).getTime()
}
}), st.ready.promise = function (t) {
if (!U)if (U = st.Deferred(), "complete" === V.readyState)setTimeout(st.ready); else if (V.addEventListener)V.addEventListener("DOMContentLoaded", xt, !1), e.addEventListener("load", st.ready, !1); else {
V.attachEvent("onreadystatechange", xt), e.attachEvent("onload", st.ready);
var n = !1;
try {
n = null == e.frameElement && V.documentElement
} catch (r) {
}
n && n.doScroll && function i() {
if (!st.isReady) {
try {
n.doScroll("left")
} catch (e) {
return setTimeout(i, 50)
}
st.ready()
}
}()
}
return U.promise(t)
}, st.each("Boolean Number String Function Array Date RegExp Object Error".split(" "), function (e, t) {
Q["[object " + t + "]"] = t.toLowerCase()
}), X = st(V);
var Tt = {};
st.Callbacks = function (e) {
e = "string" == typeof e ? Tt[e] || r(e) : st.extend({}, e);
var n, i, o, a, s, u, l = [], c = !e.once && [], f = function (t) {
for (n = e.memory && t, i = !0, u = a || 0, a = 0, s = l.length, o = !0; l && s > u; u++)if (l[u].apply(t[0], t[1]) === !1 && e.stopOnFalse) {
n = !1;
break
}
o = !1, l && (c ? c.length && f(c.shift()) : n ? l = [] : p.disable())
}, p = {
add: function () {
if (l) {
var t = l.length;
(function r(t) {
st.each(t, function (t, n) {
var i = st.type(n);
"function" === i ? e.unique && p.has(n) || l.push(n) : n && n.length && "string" !== i && r(n)
})
})(arguments), o ? s = l.length : n && (a = t, f(n))
}
return this
}, remove: function () {
return l && st.each(arguments, function (e, t) {
for (var n; (n = st.inArray(t, l, n)) > -1;)l.splice(n, 1), o && (s >= n && s--, u >= n && u--)
}), this
}, has: function (e) {
return st.inArray(e, l) > -1
}, empty: function () {
return l = [], this
}, disable: function () {
return l = c = n = t, this
}, disabled: function () {
return !l
}, lock: function () {
return c = t, n || p.disable(), this
}, locked: function () {
return !c
}, fireWith: function (e, t) {
return t = t || [], t = [e, t.slice ? t.slice() : t], !l || i && !c || (o ? c.push(t) : f(t)), this
}, fire: function () {
return p.fireWith(this, arguments), this
}, fired: function () {
return !!i
}
};
return p
}, st.extend({
Deferred: function (e) {
var t = [["resolve", "done", st.Callbacks("once memory"), "resolved"], ["reject", "fail", st.Callbacks("once memory"), "rejected"], ["notify", "progress", st.Callbacks("memory")]], n = "pending", r = {
state: function () {
return n
}, always: function () {
return i.done(arguments).fail(arguments), this
}, then: function () {
var e = arguments;
return st.Deferred(function (n) {
st.each(t, function (t, o) {
var a = o[0], s = st.isFunction(e[t]) && e[t];
i[o[1]](function () {
var e = s && s.apply(this, arguments);
e && st.isFunction(e.promise) ? e.promise().done(n.resolve).fail(n.reject).progress(n.notify) : n[a + "With"](this === r ? n.promise() : this, s ? [e] : arguments)
})
}), e = null
}).promise()
}, promise: function (e) {
return null != e ? st.extend(e, r) : r
}
}, i = {};
return r.pipe = r.then, st.each(t, function (e, o) {
var a = o[2], s = o[3];
r[o[1]] = a.add, s && a.add(function () {
n = s
}, t[1 ^ e][2].disable, t[2][2].lock), i[o[0]] = function () {
return i[o[0] + "With"](this === i ? r : this, arguments), this
}, i[o[0] + "With"] = a.fireWith
}), r.promise(i), e && e.call(i, i), i
}, when: function (e) {
var t, n, r, i = 0, o = nt.call(arguments), a = o.length, s = 1 !== a || e && st.isFunction(e.promise) ? a : 0, u = 1 === s ? e : st.Deferred(), l = function (e, n, r) {
return function (i) {
n[e] = this, r[e] = arguments.length > 1 ? nt.call(arguments) : i, r === t ? u.notifyWith(n, r) : --s || u.resolveWith(n, r)
}
};
if (a > 1)for (t = Array(a), n = Array(a), r = Array(a); a > i; i++)o[i] && st.isFunction(o[i].promise) ? o[i].promise().done(l(i, r, o)).fail(u.reject).progress(l(i, n, t)) : --s;
return s || u.resolveWith(r, o), u.promise()
}
}), st.support = function () {
var n, r, i, o, a, s, u, l, c, f, p = V.createElement("div");
if (p.setAttribute("className", "t"), p.innerHTML = " <link/><table></table><a href='/a'>a</a><input type='checkbox'/>", r = p.getElementsByTagName("*"), i = p.getElementsByTagName("a")[0], !r || !i || !r.length)return {};
o = V.createElement("select"), a = o.appendChild(V.createElement("option")), s = p.getElementsByTagName("input")[0], i.style.cssText = "top:1px;float:left;opacity:.5", n = {
getSetAttribute: "t" !== p.className,
leadingWhitespace: 3 === p.firstChild.nodeType,
tbody: !p.getElementsByTagName("tbody").length,
htmlSerialize: !!p.getElementsByTagName("link").length,
style: /top/.test(i.getAttribute("style")),
hrefNormalized: "/a" === i.getAttribute("href"),
opacity: /^0.5/.test(i.style.opacity),
cssFloat: !!i.style.cssFloat,
checkOn: !!s.value,
optSelected: a.selected,
enctype: !!V.createElement("form").enctype,
html5Clone: "<:nav></:nav>" !== V.createElement("nav").cloneNode(!0).outerHTML,
boxModel: "CSS1Compat" === V.compatMode,
deleteExpando: !0,
noCloneEvent: !0,
inlineBlockNeedsLayout: !1,
shrinkWrapBlocks: !1,
reliableMarginRight: !0,
boxSizingReliable: !0,
pixelPosition: !1
}, s.checked = !0, n.noCloneChecked = s.cloneNode(!0).checked, o.disabled = !0, n.optDisabled = !a.disabled;
try {
delete p.test
} catch (d) {
n.deleteExpando = !1
}
s = V.createElement("input"), s.setAttribute("value", ""), n.input = "" === s.getAttribute("value"), s.value = "t", s.setAttribute("type", "radio"), n.radioValue = "t" === s.value, s.setAttribute("checked", "t"), s.setAttribute("name", "t"), u = V.createDocumentFragment(), u.appendChild(s), n.appendChecked = s.checked, n.checkClone = u.cloneNode(!0).cloneNode(!0).lastChild.checked, p.attachEvent && (p.attachEvent("onclick", function () {
n.noCloneEvent = !1
}), p.cloneNode(!0).click());
for (f in{
submit: !0,
change: !0,
focusin: !0
})p.setAttribute(l = "on" + f, "t"), n[f + "Bubbles"] = l in e || p.attributes[l].expando === !1;
return p.style.backgroundClip = "content-box", p.cloneNode(!0).style.backgroundClip = "", n.clearCloneStyle = "content-box" === p.style.backgroundClip, st(function () {
var r, i, o, a = "padding:0;margin:0;border:0;display:block;box-sizing:content-box;-moz-box-sizing:content-box;-webkit-box-sizing:content-box;", s = V.getElementsByTagName("body")[0];
s && (r = V.createElement("div"), r.style.cssText = "border:0;width:0;height:0;position:absolute;top:0;left:-9999px;margin-top:1px", s.appendChild(r).appendChild(p), p.innerHTML = "<table><tr><td></td><td>t</td></tr></table>", o = p.getElementsByTagName("td"), o[0].style.cssText = "padding:0;margin:0;border:0;display:none", c = 0 === o[0].offsetHeight, o[0].style.display = "", o[1].style.display = "none", n.reliableHiddenOffsets = c && 0 === o[0].offsetHeight, p.innerHTML = "", p.style.cssText = "box-sizing:border-box;-moz-box-sizing:border-box;-webkit-box-sizing:border-box;padding:1px;border:1px;display:block;width:4px;margin-top:1%;position:absolute;top:1%;", n.boxSizing = 4 === p.offsetWidth, n.doesNotIncludeMarginInBodyOffset = 1 !== s.offsetTop, e.getComputedStyle && (n.pixelPosition = "1%" !== (e.getComputedStyle(p, null) || {}).top, n.boxSizingReliable = "4px" === (e.getComputedStyle(p, null) || {width: "4px"}).width, i = p.appendChild(V.createElement("div")), i.style.cssText = p.style.cssText = a, i.style.marginRight = i.style.width = "0", p.style.width = "1px", n.reliableMarginRight = !parseFloat((e.getComputedStyle(i, null) || {}).marginRight)), p.style.zoom !== t && (p.innerHTML = "", p.style.cssText = a + "width:1px;padding:1px;display:inline;zoom:1", n.inlineBlockNeedsLayout = 3 === p.offsetWidth, p.style.display = "block", p.innerHTML = "<div></div>", p.firstChild.style.width = "5px", n.shrinkWrapBlocks = 3 !== p.offsetWidth, s.style.zoom = 1), s.removeChild(r), r = p = o = i = null)
}), r = o = u = a = i = s = null, n
}();
var wt = /(?:\{[\s\S]*\}|\[[\s\S]*\])$/, Nt = /([A-Z])/g;
st.extend({
cache: {},
expando: "jQuery" + (Z + Math.random()).replace(/\D/g, ""),
noData: {embed: !0, object: "clsid:D27CDB6E-AE6D-11cf-96B8-444553540000", applet: !0},
hasData: function (e) {
return e = e.nodeType ? st.cache[e[st.expando]] : e[st.expando], !!e && !s(e)
},
data: function (e, t, n) {
return i(e, t, n, !1)
},
removeData: function (e, t) {
return o(e, t, !1)
},
_data: function (e, t, n) {
return i(e, t, n, !0)
},
_removeData: function (e, t) {
return o(e, t, !0)
},
acceptData: function (e) {
var t = e.nodeName && st.noData[e.nodeName.toLowerCase()];
return !t || t !== !0 && e.getAttribute("classid") === t
}
}), st.fn.extend({
data: function (e, n) {
var r, i, o = this[0], s = 0, u = null;
if (e === t) {
if (this.length && (u = st.data(o), 1 === o.nodeType && !st._data(o, "parsedAttrs"))) {
for (r = o.attributes; r.length > s; s++)i = r[s].name, i.indexOf("data-") || (i = st.camelCase(i.substring(5)), a(o, i, u[i]));
st._data(o, "parsedAttrs", !0)
}
return u
}
return "object" == typeof e ? this.each(function () {
st.data(this, e)
}) : st.access(this, function (n) {
return n === t ? o ? a(o, e, st.data(o, e)) : null : (this.each(function () {
st.data(this, e, n)
}), t)
}, null, n, arguments.length > 1, null, !0)
}, removeData: function (e) {
return this.each(function () {
st.removeData(this, e)
})
}
}), st.extend({
queue: function (e, n, r) {
var i;
return e ? (n = (n || "fx") + "queue", i = st._data(e, n), r && (!i || st.isArray(r) ? i = st._data(e, n, st.makeArray(r)) : i.push(r)), i || []) : t
}, dequeue: function (e, t) {
t = t || "fx";
var n = st.queue(e, t), r = n.length, i = n.shift(), o = st._queueHooks(e, t), a = function () {
st.dequeue(e, t)
};
"inprogress" === i && (i = n.shift(), r--), o.cur = i, i && ("fx" === t && n.unshift("inprogress"), delete o.stop, i.call(e, a, o)), !r && o && o.empty.fire()
}, _queueHooks: function (e, t) {
var n = t + "queueHooks";
return st._data(e, n) || st._data(e, n, {
empty: st.Callbacks("once memory").add(function () {
st._removeData(e, t + "queue"), st._removeData(e, n)
})
})
}
}), st.fn.extend({
queue: function (e, n) {
var r = 2;
return "string" != typeof e && (n = e, e = "fx", r--), r > arguments.length ? st.queue(this[0], e) : n === t ? this : this.each(function () {
var t = st.queue(this, e, n);
st._queueHooks(this, e), "fx" === e && "inprogress" !== t[0] && st.dequeue(this, e)
})
}, dequeue: function (e) {
return this.each(function () {
st.dequeue(this, e)
})
}, delay: function (e, t) {
return e = st.fx ? st.fx.speeds[e] || e : e, t = t || "fx", this.queue(t, function (t, n) {
var r = setTimeout(t, e);
n.stop = function () {
clearTimeout(r)
}
})
}, clearQueue: function (e) {
return this.queue(e || "fx", [])
}, promise: function (e, n) {
var r, i = 1, o = st.Deferred(), a = this, s = this.length, u = function () {
--i || o.resolveWith(a, [a])
};
for ("string" != typeof e && (n = e, e = t), e = e || "fx"; s--;)r = st._data(a[s], e + "queueHooks"), r && r.empty && (i++, r.empty.add(u));
return u(), o.promise(n)
}
});
var Ct, kt, Et = /[\t\r\n]/g, St = /\r/g, At = /^(?:input|select|textarea|button|object)$/i, jt = /^(?:a|area)$/i, Dt = /^(?:checked|selected|autofocus|autoplay|async|controls|defer|disabled|hidden|loop|multiple|open|readonly|required|scoped)$/i, Lt = /^(?:checked|selected)$/i, Ht = st.support.getSetAttribute, Mt = st.support.input;
st.fn.extend({
attr: function (e, t) {
return st.access(this, st.attr, e, t, arguments.length > 1)
}, removeAttr: function (e) {
return this.each(function () {
st.removeAttr(this, e)
})
}, prop: function (e, t) {
return st.access(this, st.prop, e, t, arguments.length > 1)
}, removeProp: function (e) {
return e = st.propFix[e] || e, this.each(function () {
try {
this[e] = t, delete this[e]
} catch (n) {
}
})
}, addClass: function (e) {
var t, n, r, i, o, a = 0, s = this.length, u = "string" == typeof e && e;
if (st.isFunction(e))return this.each(function (t) {
st(this).addClass(e.call(this, t, this.className))
});
if (u)for (t = (e || "").match(lt) || []; s > a; a++)if (n = this[a], r = 1 === n.nodeType && (n.className ? (" " + n.className + " ").replace(Et, " ") : " ")) {
for (o = 0; i = t[o++];)0 > r.indexOf(" " + i + " ") && (r += i + " ");
n.className = st.trim(r)
}
return this
}, removeClass: function (e) {
var t, n, r, i, o, a = 0, s = this.length, u = 0 === arguments.length || "string" == typeof e && e;
if (st.isFunction(e))return this.each(function (t) {
st(this).removeClass(e.call(this, t, this.className))
});
if (u)for (t = (e || "").match(lt) || []; s > a; a++)if (n = this[a], r = 1 === n.nodeType && (n.className ? (" " + n.className + " ").replace(Et, " ") : "")) {
for (o = 0; i = t[o++];)for (; r.indexOf(" " + i + " ") >= 0;)r = r.replace(" " + i + " ", " ");
n.className = e ? st.trim(r) : ""
}
return this
}, toggleClass: function (e, t) {
var n = typeof e, r = "boolean" == typeof t;
return st.isFunction(e) ? this.each(function (n) {
st(this).toggleClass(e.call(this, n, this.className, t), t)
}) : this.each(function () {
if ("string" === n)for (var i, o = 0, a = st(this), s = t, u = e.match(lt) || []; i = u[o++];)s = r ? s : !a.hasClass(i), a[s ? "addClass" : "removeClass"](i); else("undefined" === n || "boolean" === n) && (this.className && st._data(this, "__className__", this.className), this.className = this.className || e === !1 ? "" : st._data(this, "__className__") || "")
})
}, hasClass: function (e) {
for (var t = " " + e + " ", n = 0, r = this.length; r > n; n++)if (1 === this[n].nodeType && (" " + this[n].className + " ").replace(Et, " ").indexOf(t) >= 0)return !0;
return !1
}, val: function (e) {
var n, r, i, o = this[0];
{
if (arguments.length)return i = st.isFunction(e), this.each(function (r) {
var o, a = st(this);
1 === this.nodeType && (o = i ? e.call(this, r, a.val()) : e, null == o ? o = "" : "number" == typeof o ? o += "" : st.isArray(o) && (o = st.map(o, function (e) {
return null == e ? "" : e + ""
})), n = st.valHooks[this.type] || st.valHooks[this.nodeName.toLowerCase()], n && "set"in n && n.set(this, o, "value") !== t || (this.value = o))
});
if (o)return n = st.valHooks[o.type] || st.valHooks[o.nodeName.toLowerCase()], n && "get"in n && (r = n.get(o, "value")) !== t ? r : (r = o.value, "string" == typeof r ? r.replace(St, "") : null == r ? "" : r)
}
}
}), st.extend({
valHooks: {
option: {
get: function (e) {
var t = e.attributes.value;
return !t || t.specified ? e.value : e.text
}
}, select: {
get: function (e) {
for (var t, n, r = e.options, i = e.selectedIndex, o = "select-one" === e.type || 0 > i, a = o ? null : [], s = o ? i + 1 : r.length, u = 0 > i ? s : o ? i : 0; s > u; u++)if (n = r[u], !(!n.selected && u !== i || (st.support.optDisabled ? n.disabled : null !== n.getAttribute("disabled")) || n.parentNode.disabled && st.nodeName(n.parentNode, "optgroup"))) {
if (t = st(n).val(), o)return t;
a.push(t)
}
return a
}, set: function (e, t) {
var n = st.makeArray(t);
return st(e).find("option").each(function () {
this.selected = st.inArray(st(this).val(), n) >= 0
}), n.length || (e.selectedIndex = -1), n
}
}
},
attr: function (e, n, r) {
var i, o, a, s = e.nodeType;
if (e && 3 !== s && 8 !== s && 2 !== s)return e.getAttribute === t ? st.prop(e, n, r) : (a = 1 !== s || !st.isXMLDoc(e), a && (n = n.toLowerCase(), o = st.attrHooks[n] || (Dt.test(n) ? kt : Ct)), r === t ? o && a && "get"in o && null !== (i = o.get(e, n)) ? i : (e.getAttribute !== t && (i = e.getAttribute(n)), null == i ? t : i) : null !== r ? o && a && "set"in o && (i = o.set(e, r, n)) !== t ? i : (e.setAttribute(n, r + ""), r) : (st.removeAttr(e, n), t))
},
removeAttr: function (e, t) {
var n, r, i = 0, o = t && t.match(lt);
if (o && 1 === e.nodeType)for (; n = o[i++];)r = st.propFix[n] || n, Dt.test(n) ? !Ht && Lt.test(n) ? e[st.camelCase("default-" + n)] = e[r] = !1 : e[r] = !1 : st.attr(e, n, ""), e.removeAttribute(Ht ? n : r)
},
attrHooks: {
type: {
set: function (e, t) {
if (!st.support.radioValue && "radio" === t && st.nodeName(e, "input")) {
var n = e.value;
return e.setAttribute("type", t), n && (e.value = n), t
}
}
}
},
propFix: {
tabindex: "tabIndex",
readonly: "readOnly",
"for": "htmlFor",
"class": "className",
maxlength: "maxLength",
cellspacing: "cellSpacing",
cellpadding: "cellPadding",
rowspan: "rowSpan",
colspan: "colSpan",
usemap: "useMap",
frameborder: "frameBorder",
contenteditable: "contentEditable"
},
prop: function (e, n, r) {
var i, o, a, s = e.nodeType;
if (e && 3 !== s && 8 !== s && 2 !== s)return a = 1 !== s || !st.isXMLDoc(e), a && (n = st.propFix[n] || n, o = st.propHooks[n]), r !== t ? o && "set"in o && (i = o.set(e, r, n)) !== t ? i : e[n] = r : o && "get"in o && null !== (i = o.get(e, n)) ? i : e[n]
},
propHooks: {
tabIndex: {
get: function (e) {
var n = e.getAttributeNode("tabindex");
return n && n.specified ? parseInt(n.value, 10) : At.test(e.nodeName) || jt.test(e.nodeName) && e.href ? 0 : t
}
}
}
}), kt = {
get: function (e, n) {
var r = st.prop(e, n), i = "boolean" == typeof r && e.getAttribute(n), o = "boolean" == typeof r ? Mt && Ht ? null != i : Lt.test(n) ? e[st.camelCase("default-" + n)] : !!i : e.getAttributeNode(n);
return o && o.value !== !1 ? n.toLowerCase() : t
}, set: function (e, t, n) {
return t === !1 ? st.removeAttr(e, n) : Mt && Ht || !Lt.test(n) ? e.setAttribute(!Ht && st.propFix[n] || n, n) : e[st.camelCase("default-" + n)] = e[n] = !0, n
}
}, Mt && Ht || (st.attrHooks.value = {
get: function (e, n) {
var r = e.getAttributeNode(n);
return st.nodeName(e, "input") ? e.defaultValue : r && r.specified ? r.value : t
}, set: function (e, n, r) {
return st.nodeName(e, "input") ? (e.defaultValue = n, t) : Ct && Ct.set(e, n, r)
}
}), Ht || (Ct = st.valHooks.button = {
get: function (e, n) {
var r = e.getAttributeNode(n);
return r && ("id" === n || "name" === n || "coords" === n ? "" !== r.value : r.specified) ? r.value : t
}, set: function (e, n, r) {
var i = e.getAttributeNode(r);
return i || e.setAttributeNode(i = e.ownerDocument.createAttribute(r)), i.value = n += "", "value" === r || n === e.getAttribute(r) ? n : t
}
}, st.attrHooks.contenteditable = {
get: Ct.get, set: function (e, t, n) {
Ct.set(e, "" === t ? !1 : t, n)
}
}, st.each(["width", "height"], function (e, n) {
st.attrHooks[n] = st.extend(st.attrHooks[n], {
set: function (e, r) {
return "" === r ? (e.setAttribute(n, "auto"), r) : t
}
})
})), st.support.hrefNormalized || (st.each(["href", "src", "width", "height"], function (e, n) {
st.attrHooks[n] = st.extend(st.attrHooks[n], {
get: function (e) {
var r = e.getAttribute(n, 2);
return null == r ? t : r
}
})
}), st.each(["href", "src"], function (e, t) {
st.propHooks[t] = {
get: function (e) {
return e.getAttribute(t, 4)
}
}
})), st.support.style || (st.attrHooks.style = {
get: function (e) {
return e.style.cssText || t
}, set: function (e, t) {
return e.style.cssText = t + ""
}
}), st.support.optSelected || (st.propHooks.selected = st.extend(st.propHooks.selected, {
get: function (e) {
var t = e.parentNode;
return t && (t.selectedIndex, t.parentNode && t.parentNode.selectedIndex), null
}
})), st.support.enctype || (st.propFix.enctype = "encoding"), st.support.checkOn || st.each(["radio", "checkbox"], function () {
st.valHooks[this] = {
get: function (e) {
return null === e.getAttribute("value") ? "on" : e.value
}
}
}), st.each(["radio", "checkbox"], function () {
st.valHooks[this] = st.extend(st.valHooks[this], {
set: function (e, n) {
return st.isArray(n) ? e.checked = st.inArray(st(e).val(), n) >= 0 : t
}
})
});
var qt = /^(?:input|select|textarea)$/i, _t = /^key/, Ft = /^(?:mouse|contextmenu)|click/, Ot = /^(?:focusinfocus|focusoutblur)$/, Bt = /^([^.]*)(?:\.(.+)|)$/;
st.event = {
global: {},
add: function (e, n, r, i, o) {
var a, s, u, l, c, f, p, d, h, g, m, y = 3 !== e.nodeType && 8 !== e.nodeType && st._data(e);
if (y) {
for (r.handler && (a = r, r = a.handler, o = a.selector), r.guid || (r.guid = st.guid++), (l = y.events) || (l = y.events = {}), (s = y.handle) || (s = y.handle = function (e) {
return st === t || e && st.event.triggered === e.type ? t : st.event.dispatch.apply(s.elem, arguments)
}, s.elem = e), n = (n || "").match(lt) || [""], c = n.length; c--;)u = Bt.exec(n[c]) || [], h = m = u[1], g = (u[2] || "").split(".").sort(), p = st.event.special[h] || {}, h = (o ? p.delegateType : p.bindType) || h, p = st.event.special[h] || {}, f = st.extend({
type: h,
origType: m,
data: i,
handler: r,
guid: r.guid,
selector: o,
needsContext: o && st.expr.match.needsContext.test(o),
namespace: g.join(".")
}, a), (d = l[h]) || (d = l[h] = [], d.delegateCount = 0, p.setup && p.setup.call(e, i, g, s) !== !1 || (e.addEventListener ? e.addEventListener(h, s, !1) : e.attachEvent && e.attachEvent("on" + h, s))), p.add && (p.add.call(e, f), f.handler.guid || (f.handler.guid = r.guid)), o ? d.splice(d.delegateCount++, 0, f) : d.push(f), st.event.global[h] = !0;
e = null
}
},
remove: function (e, t, n, r, i) {
var o, a, s, u, l, c, f, p, d, h, g, m = st.hasData(e) && st._data(e);
if (m && (u = m.events)) {
for (t = (t || "").match(lt) || [""], l = t.length; l--;)if (s = Bt.exec(t[l]) || [], d = g = s[1], h = (s[2] || "").split(".").sort(), d) {
for (f = st.event.special[d] || {}, d = (r ? f.delegateType : f.bindType) || d, p = u[d] || [], s = s[2] && RegExp("(^|\\.)" + h.join("\\.(?:.*\\.|)") + "(\\.|$)"), a = o = p.length; o--;)c = p[o], !i && g !== c.origType || n && n.guid !== c.guid || s && !s.test(c.namespace) || r && r !== c.selector && ("**" !== r || !c.selector) || (p.splice(o, 1), c.selector && p.delegateCount--, f.remove && f.remove.call(e, c));
a && !p.length && (f.teardown && f.teardown.call(e, h, m.handle) !== !1 || st.removeEvent(e, d, m.handle), delete u[d])
} else for (d in u)st.event.remove(e, d + t[l], n, r, !0);
st.isEmptyObject(u) && (delete m.handle, st._removeData(e, "events"))
}
},
trigger: function (n, r, i, o) {
var a, s, u, l, c, f, p, d = [i || V], h = n.type || n, g = n.namespace ? n.namespace.split(".") : [];
if (s = u = i = i || V, 3 !== i.nodeType && 8 !== i.nodeType && !Ot.test(h + st.event.triggered) && (h.indexOf(".") >= 0 && (g = h.split("."), h = g.shift(), g.sort()), c = 0 > h.indexOf(":") && "on" + h, n = n[st.expando] ? n : new st.Event(h, "object" == typeof n && n), n.isTrigger = !0, n.namespace = g.join("."), n.namespace_re = n.namespace ? RegExp("(^|\\.)" + g.join("\\.(?:.*\\.|)") + "(\\.|$)") : null, n.result = t, n.target || (n.target = i), r = null == r ? [n] : st.makeArray(r, [n]), p = st.event.special[h] || {}, o || !p.trigger || p.trigger.apply(i, r) !== !1)) {
if (!o && !p.noBubble && !st.isWindow(i)) {
for (l = p.delegateType || h, Ot.test(l + h) || (s = s.parentNode); s; s = s.parentNode)d.push(s), u = s;
u === (i.ownerDocument || V) && d.push(u.defaultView || u.parentWindow || e)
}
for (a = 0; (s = d[a++]) && !n.isPropagationStopped();)n.type = a > 1 ? l : p.bindType || h, f = (st._data(s, "events") || {})[n.type] && st._data(s, "handle"), f && f.apply(s, r), f = c && s[c], f && st.acceptData(s) && f.apply && f.apply(s, r) === !1 && n.preventDefault();
if (n.type = h, !(o || n.isDefaultPrevented() || p._default && p._default.apply(i.ownerDocument, r) !== !1 || "click" === h && st.nodeName(i, "a") || !st.acceptData(i) || !c || !i[h] || st.isWindow(i))) {
u = i[c], u && (i[c] = null), st.event.triggered = h;
try {
i[h]()
} catch (m) {
}
st.event.triggered = t, u && (i[c] = u)
}
return n.result
}
},
dispatch: function (e) {
e = st.event.fix(e);
var n, r, i, o, a, s = [], u = nt.call(arguments), l = (st._data(this, "events") || {})[e.type] || [], c = st.event.special[e.type] || {};
if (u[0] = e, e.delegateTarget = this, !c.preDispatch || c.preDispatch.call(this, e) !== !1) {
for (s = st.event.handlers.call(this, e, l), n = 0; (o = s[n++]) && !e.isPropagationStopped();)for (e.currentTarget = o.elem, r = 0; (a = o.handlers[r++]) && !e.isImmediatePropagationStopped();)(!e.namespace_re || e.namespace_re.test(a.namespace)) && (e.handleObj = a, e.data = a.data, i = ((st.event.special[a.origType] || {}).handle || a.handler).apply(o.elem, u), i !== t && (e.result = i) === !1 && (e.preventDefault(), e.stopPropagation()));
return c.postDispatch && c.postDispatch.call(this, e), e.result
}
},
handlers: function (e, n) {
var r, i, o, a, s = [], u = n.delegateCount, l = e.target;
if (u && l.nodeType && (!e.button || "click" !== e.type))for (; l != this; l = l.parentNode || this)if (l.disabled !== !0 || "click" !== e.type) {
for (i = [], r = 0; u > r; r++)a = n[r], o = a.selector + " ", i[o] === t && (i[o] = a.needsContext ? st(o, this).index(l) >= 0 : st.find(o, this, null, [l]).length), i[o] && i.push(a);
i.length && s.push({elem: l, handlers: i})
}
return n.length > u && s.push({elem: this, handlers: n.slice(u)}), s
},
fix: function (e) {
if (e[st.expando])return e;
var t, n, r = e, i = st.event.fixHooks[e.type] || {}, o = i.props ? this.props.concat(i.props) : this.props;
for (e = new st.Event(r), t = o.length; t--;)n = o[t], e[n] = r[n];
return e.target || (e.target = r.srcElement || V), 3 === e.target.nodeType && (e.target = e.target.parentNode), e.metaKey = !!e.metaKey, i.filter ? i.filter(e, r) : e
},
props: "altKey bubbles cancelable ctrlKey currentTarget eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "),
fixHooks: {},
keyHooks: {
props: "char charCode key keyCode".split(" "), filter: function (e, t) {
return null == e.which && (e.which = null != t.charCode ? t.charCode : t.keyCode), e
}
},
mouseHooks: {
props: "button buttons clientX clientY fromElement offsetX offsetY pageX pageY screenX screenY toElement".split(" "),
filter: function (e, n) {
var r, i, o, a = n.button, s = n.fromElement;
return null == e.pageX && null != n.clientX && (r = e.target.ownerDocument || V, i = r.documentElement, o = r.body, e.pageX = n.clientX + (i && i.scrollLeft || o && o.scrollLeft || 0) - (i && i.clientLeft || o && o.clientLeft || 0), e.pageY = n.clientY + (i && i.scrollTop || o && o.scrollTop || 0) - (i && i.clientTop || o && o.clientTop || 0)), !e.relatedTarget && s && (e.relatedTarget = s === e.target ? n.toElement : s), e.which || a === t || (e.which = 1 & a ? 1 : 2 & a ? 3 : 4 & a ? 2 : 0), e
}
},
special: {
load: {noBubble: !0}, click: {
trigger: function () {
return st.nodeName(this, "input") && "checkbox" === this.type && this.click ? (this.click(), !1) : t
}
}, focus: {
trigger: function () {
if (this !== V.activeElement && this.focus)try {
return this.focus(), !1
} catch (e) {
}
}, delegateType: "focusin"
}, blur: {
trigger: function () {
return this === V.activeElement && this.blur ? (this.blur(), !1) : t
}, delegateType: "focusout"
}, beforeunload: {
postDispatch: function (e) {
e.result !== t && (e.originalEvent.returnValue = e.result)
}
}
},
simulate: function (e, t, n, r) {
var i = st.extend(new st.Event, n, {type: e, isSimulated: !0, originalEvent: {}});
r ? st.event.trigger(i, null, t) : st.event.dispatch.call(t, i), i.isDefaultPrevented() && n.preventDefault()
}
}, st.removeEvent = V.removeEventListener ? function (e, t, n) {
e.removeEventListener && e.removeEventListener(t, n, !1)
} : function (e, n, r) {
var i = "on" + n;
e.detachEvent && (e[i] === t && (e[i] = null), e.detachEvent(i, r))
}, st.Event = function (e, n) {
return this instanceof st.Event ? (e && e.type ? (this.originalEvent = e, this.type = e.type, this.isDefaultPrevented = e.defaultPrevented || e.returnValue === !1 || e.getPreventDefault && e.getPreventDefault() ? u : l) : this.type = e, n && st.extend(this, n), this.timeStamp = e && e.timeStamp || st.now(), this[st.expando] = !0, t) : new st.Event(e, n)
}, st.Event.prototype = {
isDefaultPrevented: l,
isPropagationStopped: l,
isImmediatePropagationStopped: l,
preventDefault: function () {
var e = this.originalEvent;
this.isDefaultPrevented = u, e && (e.preventDefault ? e.preventDefault() : e.returnValue = !1)
},
stopPropagation: function () {
var e = this.originalEvent;
this.isPropagationStopped = u, e && (e.stopPropagation && e.stopPropagation(), e.cancelBubble = !0)
},
stopImmediatePropagation: function () {
this.isImmediatePropagationStopped = u, this.stopPropagation()
}
}, st.each({mouseenter: "mouseover", mouseleave: "mouseout"}, function (e, t) {
st.event.special[e] = {
delegateType: t, bindType: t, handle: function (e) {
var n, r = this, i = e.relatedTarget, o = e.handleObj;
return (!i || i !== r && !st.contains(r, i)) && (e.type = o.origType, n = o.handler.apply(this, arguments), e.type = t), n
}
}
}), st.support.submitBubbles || (st.event.special.submit = {
setup: function () {
return st.nodeName(this, "form") ? !1 : (st.event.add(this, "click._submit keypress._submit", function (e) {
var n = e.target, r = st.nodeName(n, "input") || st.nodeName(n, "button") ? n.form : t;
r && !st._data(r, "submitBubbles") && (st.event.add(r, "submit._submit", function (e) {
e._submit_bubble = !0
}), st._data(r, "submitBubbles", !0))
}), t)
}, postDispatch: function (e) {
e._submit_bubble && (delete e._submit_bubble, this.parentNode && !e.isTrigger && st.event.simulate("submit", this.parentNode, e, !0))
}, teardown: function () {
return st.nodeName(this, "form") ? !1 : (st.event.remove(this, "._submit"), t)
}
}), st.support.changeBubbles || (st.event.special.change = {
setup: function () {
return qt.test(this.nodeName) ? (("checkbox" === this.type || "radio" === this.type) && (st.event.add(this, "propertychange._change", function (e) {
"checked" === e.originalEvent.propertyName && (this._just_changed = !0)
}), st.event.add(this, "click._change", function (e) {
this._just_changed && !e.isTrigger && (this._just_changed = !1), st.event.simulate("change", this, e, !0)
})), !1) : (st.event.add(this, "beforeactivate._change", function (e) {
var t = e.target;
qt.test(t.nodeName) && !st._data(t, "changeBubbles") && (st.event.add(t, "change._change", function (e) {
!this.parentNode || e.isSimulated || e.isTrigger || st.event.simulate("change", this.parentNode, e, !0)
}), st._data(t, "changeBubbles", !0))
}), t)
}, handle: function (e) {
var n = e.target;
return this !== n || e.isSimulated || e.isTrigger || "radio" !== n.type && "checkbox" !== n.type ? e.handleObj.handler.apply(this, arguments) : t
}, teardown: function () {
return st.event.remove(this, "._change"), !qt.test(this.nodeName)
}
}), st.support.focusinBubbles || st.each({focus: "focusin", blur: "focusout"}, function (e, t) {
var n = 0, r = function (e) {
st.event.simulate(t, e.target, st.event.fix(e), !0)
};
st.event.special[t] = {
setup: function () {
0 === n++ && V.addEventListener(e, r, !0)
}, teardown: function () {
0 === --n && V.removeEventListener(e, r, !0)
}
}
}), st.fn.extend({
on: function (e, n, r, i, o) {
var a, s;
if ("object" == typeof e) {
"string" != typeof n && (r = r || n, n = t);
for (s in e)this.on(s, n, r, e[s], o);
return this
}
if (null == r && null == i ? (i = n, r = n = t) : null == i && ("string" == typeof n ? (i = r, r = t) : (i = r, r = n, n = t)), i === !1)i = l; else if (!i)return this;
return 1 === o && (a = i, i = function (e) {
return st().off(e), a.apply(this, arguments)
}, i.guid = a.guid || (a.guid = st.guid++)), this.each(function () {
st.event.add(this, e, i, r, n)
})
}, one: function (e, t, n, r) {
return this.on(e, t, n, r, 1)
}, off: function (e, n, r) {
var i, o;
if (e && e.preventDefault && e.handleObj)return i = e.handleObj, st(e.delegateTarget).off(i.namespace ? i.origType + "." + i.namespace : i.origType, i.selector, i.handler), this;
if ("object" == typeof e) {
for (o in e)this.off(o, n, e[o]);
return this
}
return (n === !1 || "function" == typeof n) && (r = n, n = t), r === !1 && (r = l), this.each(function () {
st.event.remove(this, e, r, n)
})
}, bind: function (e, t, n) {
return this.on(e, null, t, n)
}, unbind: function (e, t) {
return this.off(e, null, t)
}, delegate: function (e, t, n, r) {
return this.on(t, e, n, r)
}, undelegate: function (e, t, n) {
return 1 === arguments.length ? this.off(e, "**") : this.off(t, e || "**", n)
}, trigger: function (e, t) {
return this.each(function () {
st.event.trigger(e, t, this)
})
}, triggerHandler: function (e, n) {
var r = this[0];
return r ? st.event.trigger(e, n, r, !0) : t
}, hover: function (e, t) {
return this.mouseenter(e).mouseleave(t || e)
}
}), st.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error contextmenu".split(" "), function (e, t) {
st.fn[t] = function (e, n) {
return arguments.length > 0 ? this.on(t, null, e, n) : this.trigger(t)
}, _t.test(t) && (st.event.fixHooks[t] = st.event.keyHooks), Ft.test(t) && (st.event.fixHooks[t] = st.event.mouseHooks)
}), function (e, t) {
function n(e) {
return ht.test(e + "")
}
function r() {
var e, t = [];
return e = function (n, r) {
return t.push(n += " ") > C.cacheLength && delete e[t.shift()], e[n] = r
}
}
function i(e) {
return e[P] = !0, e
}
function o(e) {
var t = L.createElement("div");
try {
return e(t)
} catch (n) {
return !1
} finally {
t = null
}
}
function a(e, t, n, r) {
var i, o, a, s, u, l, c, d, h, g;
if ((t ? t.ownerDocument || t : R) !== L && D(t), t = t || L, n = n || [], !e || "string" != typeof e)return n;
if (1 !== (s = t.nodeType) && 9 !== s)return [];
if (!M && !r) {
if (i = gt.exec(e))if (a = i[1]) {
if (9 === s) {
if (o = t.getElementById(a), !o || !o.parentNode)return n;
if (o.id === a)return n.push(o), n
} else if (t.ownerDocument && (o = t.ownerDocument.getElementById(a)) && O(t, o) && o.id === a)return n.push(o), n
} else {
if (i[2])return Q.apply(n, K.call(t.getElementsByTagName(e), 0)), n;
if ((a = i[3]) && W.getByClassName && t.getElementsByClassName)return Q.apply(n, K.call(t.getElementsByClassName(a), 0)), n
}
if (W.qsa && !q.test(e)) {
if (c = !0, d = P, h = t, g = 9 === s && e, 1 === s && "object" !== t.nodeName.toLowerCase()) {
for (l = f(e), (c = t.getAttribute("id")) ? d = c.replace(vt, "\\$&") : t.setAttribute("id", d), d = "[id='" + d + "'] ", u = l.length; u--;)l[u] = d + p(l[u]);
h = dt.test(e) && t.parentNode || t, g = l.join(",")
}
if (g)try {
return Q.apply(n, K.call(h.querySelectorAll(g), 0)), n
} catch (m) {
} finally {
c || t.removeAttribute("id")
}
}
}
return x(e.replace(at, "$1"), t, n, r)
}
function s(e, t) {
for (var n = e && t && e.nextSibling; n; n = n.nextSibling)if (n === t)return -1;
return e ? 1 : -1
}
function u(e) {
return function (t) {
var n = t.nodeName.toLowerCase();
return "input" === n && t.type === e
}
}
function l(e) {
return function (t) {
var n = t.nodeName.toLowerCase();
return ("input" === n || "button" === n) && t.type === e
}
}
function c(e) {
return i(function (t) {
return t = +t, i(function (n, r) {
for (var i, o = e([], n.length, t), a = o.length; a--;)n[i = o[a]] && (n[i] = !(r[i] = n[i]))
})
})
}
function f(e, t) {
var n, r, i, o, s, u, l, c = X[e + " "];
if (c)return t ? 0 : c.slice(0);
for (s = e, u = [], l = C.preFilter; s;) {
(!n || (r = ut.exec(s))) && (r && (s = s.slice(r[0].length) || s), u.push(i = [])), n = !1, (r = lt.exec(s)) && (n = r.shift(), i.push({
value: n,
type: r[0].replace(at, " ")
}), s = s.slice(n.length));
for (o in C.filter)!(r = pt[o].exec(s)) || l[o] && !(r = l[o](r)) || (n = r.shift(), i.push({
value: n,
type: o,
matches: r
}), s = s.slice(n.length));
if (!n)break
}
return t ? s.length : s ? a.error(e) : X(e, u).slice(0)
}
function p(e) {
for (var t = 0, n = e.length, r = ""; n > t; t++)r += e[t].value;
return r
}
function d(e, t, n) {
var r = t.dir, i = n && "parentNode" === t.dir, o = I++;
return t.first ? function (t, n, o) {
for (; t = t[r];)if (1 === t.nodeType || i)return e(t, n, o)
} : function (t, n, a) {
var s, u, l, c = $ + " " + o;
if (a) {
for (; t = t[r];)if ((1 === t.nodeType || i) && e(t, n, a))return !0
} else for (; t = t[r];)if (1 === t.nodeType || i)if (l = t[P] || (t[P] = {}), (u = l[r]) && u[0] === c) {
if ((s = u[1]) === !0 || s === N)return s === !0
} else if (u = l[r] = [c], u[1] = e(t, n, a) || N, u[1] === !0)return !0
}
}
function h(e) {
return e.length > 1 ? function (t, n, r) {
for (var i = e.length; i--;)if (!e[i](t, n, r))return !1;
return !0
} : e[0]
}
function g(e, t, n, r, i) {
for (var o, a = [], s = 0, u = e.length, l = null != t; u > s; s++)(o = e[s]) && (!n || n(o, r, i)) && (a.push(o), l && t.push(s));
return a
}
function m(e, t, n, r, o, a) {
return r && !r[P] && (r = m(r)), o && !o[P] && (o = m(o, a)), i(function (i, a, s, u) {
var l, c, f, p = [], d = [], h = a.length, m = i || b(t || "*", s.nodeType ? [s] : s, []), y = !e || !i && t ? m : g(m, p, e, s, u), v = n ? o || (i ? e : h || r) ? [] : a : y;
if (n && n(y, v, s, u), r)for (l = g(v, d), r(l, [], s, u), c = l.length; c--;)(f = l[c]) && (v[d[c]] = !(y[d[c]] = f));
if (i) {
if (o || e) {
if (o) {
for (l = [], c = v.length; c--;)(f = v[c]) && l.push(y[c] = f);
o(null, v = [], l, u)
}
for (c = v.length; c--;)(f = v[c]) && (l = o ? Z.call(i, f) : p[c]) > -1 && (i[l] = !(a[l] = f))
}
} else v = g(v === a ? v.splice(h, v.length) : v), o ? o(null, a, v, u) : Q.apply(a, v)
})
}
function y(e) {
for (var t, n, r, i = e.length, o = C.relative[e[0].type], a = o || C.relative[" "], s = o ? 1 : 0, u = d(function (e) {
return e === t
}, a, !0), l = d(function (e) {
return Z.call(t, e) > -1
}, a, !0), c = [function (e, n, r) {
return !o && (r || n !== j) || ((t = n).nodeType ? u(e, n, r) : l(e, n, r))
}]; i > s; s++)if (n = C.relative[e[s].type])c = [d(h(c), n)]; else {
if (n = C.filter[e[s].type].apply(null, e[s].matches), n[P]) {
for (r = ++s; i > r && !C.relative[e[r].type]; r++);
return m(s > 1 && h(c), s > 1 && p(e.slice(0, s - 1)).replace(at, "$1"), n, r > s && y(e.slice(s, r)), i > r && y(e = e.slice(r)), i > r && p(e))
}
c.push(n)
}
return h(c)
}
function v(e, t) {
var n = 0, r = t.length > 0, o = e.length > 0, s = function (i, s, u, l, c) {
var f, p, d, h = [], m = 0, y = "0", v = i && [], b = null != c, x = j, T = i || o && C.find.TAG("*", c && s.parentNode || s), w = $ += null == x ? 1 : Math.E;
for (b && (j = s !== L && s, N = n); null != (f = T[y]); y++) {
if (o && f) {
for (p = 0; d = e[p]; p++)if (d(f, s, u)) {
l.push(f);
break
}
b && ($ = w, N = ++n)
}
r && ((f = !d && f) && m--, i && v.push(f))
}
if (m += y, r && y !== m) {
for (p = 0; d = t[p]; p++)d(v, h, s, u);
if (i) {
if (m > 0)for (; y--;)v[y] || h[y] || (h[y] = G.call(l));
h = g(h)
}
Q.apply(l, h), b && !i && h.length > 0 && m + t.length > 1 && a.uniqueSort(l)
}
return b && ($ = w, j = x), v
};
return r ? i(s) : s
}
function b(e, t, n) {
for (var r = 0, i = t.length; i > r; r++)a(e, t[r], n);
return n
}
function x(e, t, n, r) {
var i, o, a, s, u, l = f(e);
if (!r && 1 === l.length) {
if (o = l[0] = l[0].slice(0), o.length > 2 && "ID" === (a = o[0]).type && 9 === t.nodeType && !M && C.relative[o[1].type]) {
if (t = C.find.ID(a.matches[0].replace(xt, Tt), t)[0], !t)return n;
e = e.slice(o.shift().value.length)
}
for (i = pt.needsContext.test(e) ? -1 : o.length - 1; i >= 0 && (a = o[i], !C.relative[s = a.type]); i--)if ((u = C.find[s]) && (r = u(a.matches[0].replace(xt, Tt), dt.test(o[0].type) && t.parentNode || t))) {
if (o.splice(i, 1), e = r.length && p(o), !e)return Q.apply(n, K.call(r, 0)), n;
break
}
}
return S(e, l)(r, t, M, n, dt.test(e)), n
}
function T() {
}
var w, N, C, k, E, S, A, j, D, L, H, M, q, _, F, O, B, P = "sizzle" + -new Date, R = e.document, W = {}, $ = 0, I = 0, z = r(), X = r(), U = r(), V = typeof t, Y = 1 << 31, J = [], G = J.pop, Q = J.push, K = J.slice, Z = J.indexOf || function (e) {
for (var t = 0, n = this.length; n > t; t++)if (this[t] === e)return t;
return -1
}, et = "[\\x20\\t\\r\\n\\f]", tt = "(?:\\\\.|[\\w-]|[^\\x00-\\xa0])+", nt = tt.replace("w", "w#"), rt = "([*^$|!~]?=)", it = "\\[" + et + "*(" + tt + ")" + et + "*(?:" + rt + et + "*(?:(['\"])((?:\\\\.|[^\\\\])*?)\\3|(" + nt + ")|)|)" + et + "*\\]", ot = ":(" + tt + ")(?:\\(((['\"])((?:\\\\.|[^\\\\])*?)\\3|((?:\\\\.|[^\\\\()[\\]]|" + it.replace(3, 8) + ")*)|.*)\\)|)", at = RegExp("^" + et + "+|((?:^|[^\\\\])(?:\\\\.)*)" + et + "+$", "g"), ut = RegExp("^" + et + "*," + et + "*"), lt = RegExp("^" + et + "*([\\x20\\t\\r\\n\\f>+~])" + et + "*"), ct = RegExp(ot), ft = RegExp("^" + nt + "$"), pt = {
ID: RegExp("^#(" + tt + ")"),
CLASS: RegExp("^\\.(" + tt + ")"),
NAME: RegExp("^\\[name=['\"]?(" + tt + ")['\"]?\\]"),
TAG: RegExp("^(" + tt.replace("w", "w*") + ")"),
ATTR: RegExp("^" + it),
PSEUDO: RegExp("^" + ot),
CHILD: RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\(" + et + "*(even|odd|(([+-]|)(\\d*)n|)" + et + "*(?:([+-]|)" + et + "*(\\d+)|))" + et + "*\\)|)", "i"),
needsContext: RegExp("^" + et + "*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\(" + et + "*((?:-\\d)?\\d*)" + et + "*\\)|)(?=[^-]|$)", "i")
}, dt = /[\x20\t\r\n\f]*[+~]/, ht = /\{\s*\[native code\]\s*\}/, gt = /^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/, mt = /^(?:input|select|textarea|button)$/i, yt = /^h\d$/i, vt = /'|\\/g, bt = /\=[\x20\t\r\n\f]*([^'"\]]*)[\x20\t\r\n\f]*\]/g, xt = /\\([\da-fA-F]{1,6}[\x20\t\r\n\f]?|.)/g, Tt = function (e, t) {
var n = "0x" + t - 65536;
return n !== n ? t : 0 > n ? String.fromCharCode(n + 65536) : String.fromCharCode(55296 | n >> 10, 56320 | 1023 & n)
};
try {
K.call(H.childNodes, 0)[0].nodeType
} catch (wt) {
K = function (e) {
for (var t, n = []; t = this[e]; e++)n.push(t);
return n
}
}
E = a.isXML = function (e) {
var t = e && (e.ownerDocument || e).documentElement;
return t ? "HTML" !== t.nodeName : !1
}, D = a.setDocument = function (e) {
var r = e ? e.ownerDocument || e : R;
return r !== L && 9 === r.nodeType && r.documentElement ? (L = r, H = r.documentElement, M = E(r), W.tagNameNoComments = o(function (e) {
return e.appendChild(r.createComment("")), !e.getElementsByTagName("*").length
}), W.attributes = o(function (e) {
e.innerHTML = "<select></select>";
var t = typeof e.lastChild.getAttribute("multiple");
return "boolean" !== t && "string" !== t
}), W.getByClassName = o(function (e) {
return e.innerHTML = "<div class='hidden e'></div><div class='hidden'></div>", e.getElementsByClassName && e.getElementsByClassName("e").length ? (e.lastChild.className = "e", 2 === e.getElementsByClassName("e").length) : !1
}), W.getByName = o(function (e) {
e.id = P + 0, e.innerHTML = "<a name='" + P + "'></a><div name='" + P + "'></div>", H.insertBefore(e, H.firstChild);
var t = r.getElementsByName && r.getElementsByName(P).length === 2 + r.getElementsByName(P + 0).length;
return W.getIdNotName = !r.getElementById(P), H.removeChild(e), t
}), C.attrHandle = o(function (e) {
return e.innerHTML = "<a href='#'></a>", e.firstChild && typeof e.firstChild.getAttribute !== V && "#" === e.firstChild.getAttribute("href")
}) ? {} : {
href: function (e) {
return e.getAttribute("href", 2)
}, type: function (e) {
return e.getAttribute("type")
}
}, W.getIdNotName ? (C.find.ID = function (e, t) {
if (typeof t.getElementById !== V && !M) {
var n = t.getElementById(e);
return n && n.parentNode ? [n] : []
}
}, C.filter.ID = function (e) {
var t = e.replace(xt, Tt);
return function (e) {
return e.getAttribute("id") === t
}
}) : (C.find.ID = function (e, n) {
if (typeof n.getElementById !== V && !M) {
var r = n.getElementById(e);
return r ? r.id === e || typeof r.getAttributeNode !== V && r.getAttributeNode("id").value === e ? [r] : t : []
}
}, C.filter.ID = function (e) {
var t = e.replace(xt, Tt);
return function (e) {
var n = typeof e.getAttributeNode !== V && e.getAttributeNode("id");
return n && n.value === t
}
}), C.find.TAG = W.tagNameNoComments ? function (e, n) {
return typeof n.getElementsByTagName !== V ? n.getElementsByTagName(e) : t
} : function (e, t) {
var n, r = [], i = 0, o = t.getElementsByTagName(e);
if ("*" === e) {
for (; n = o[i]; i++)1 === n.nodeType && r.push(n);
return r
}
return o
}, C.find.NAME = W.getByName && function (e, n) {
return typeof n.getElementsByName !== V ? n.getElementsByName(name) : t
}, C.find.CLASS = W.getByClassName && function (e, n) {
return typeof n.getElementsByClassName === V || M ? t : n.getElementsByClassName(e)
}, _ = [], q = [":focus"], (W.qsa = n(r.querySelectorAll)) && (o(function (e) {
e.innerHTML = "<select><option selected=''></option></select>", e.querySelectorAll("[selected]").length || q.push("\\[" + et + "*(?:checked|disabled|ismap|multiple|readonly|selected|value)"), e.querySelectorAll(":checked").length || q.push(":checked")
}), o(function (e) {
e.innerHTML = "<input type='hidden' i=''/>", e.querySelectorAll("[i^='']").length && q.push("[*^$]=" + et + "*(?:\"\"|'')"), e.querySelectorAll(":enabled").length || q.push(":enabled", ":disabled"), e.querySelectorAll("*,:x"), q.push(",.*:")
})), (W.matchesSelector = n(F = H.matchesSelector || H.mozMatchesSelector || H.webkitMatchesSelector || H.oMatchesSelector || H.msMatchesSelector)) && o(function (e) {
W.disconnectedMatch = F.call(e, "div"), F.call(e, "[s!='']:x"), _.push("!=", ot)
}), q = RegExp(q.join("|")), _ = RegExp(_.join("|")), O = n(H.contains) || H.compareDocumentPosition ? function (e, t) {
var n = 9 === e.nodeType ? e.documentElement : e, r = t && t.parentNode;
return e === r || !(!r || 1 !== r.nodeType || !(n.contains ? n.contains(r) : e.compareDocumentPosition && 16 & e.compareDocumentPosition(r)))
} : function (e, t) {
if (t)for (; t = t.parentNode;)if (t === e)return !0;
return !1
}, B = H.compareDocumentPosition ? function (e, t) {
var n;
return e === t ? (A = !0, 0) : (n = t.compareDocumentPosition && e.compareDocumentPosition && e.compareDocumentPosition(t)) ? 1 & n || e.parentNode && 11 === e.parentNode.nodeType ? e === r || O(R, e) ? -1 : t === r || O(R, t) ? 1 : 0 : 4 & n ? -1 : 1 : e.compareDocumentPosition ? -1 : 1
} : function (e, t) {
var n, i = 0, o = e.parentNode, a = t.parentNode, u = [e], l = [t];
if (e === t)return A = !0, 0;
if (e.sourceIndex && t.sourceIndex)return (~t.sourceIndex || Y) - (O(R, e) && ~e.sourceIndex || Y);
if (!o || !a)return e === r ? -1 : t === r ? 1 : o ? -1 : a ? 1 : 0;
if (o === a)return s(e, t);
for (n = e; n = n.parentNode;)u.unshift(n);
for (n = t; n = n.parentNode;)l.unshift(n);
for (; u[i] === l[i];)i++;
return i ? s(u[i], l[i]) : u[i] === R ? -1 : l[i] === R ? 1 : 0
}, A = !1, [0, 0].sort(B), W.detectDuplicates = A, L) : L
}, a.matches = function (e, t) {
return a(e, null, null, t)
}, a.matchesSelector = function (e, t) {
if ((e.ownerDocument || e) !== L && D(e), t = t.replace(bt, "='$1']"), !(!W.matchesSelector || M || _ && _.test(t) || q.test(t)))try {
var n = F.call(e, t);
if (n || W.disconnectedMatch || e.document && 11 !== e.document.nodeType)return n
} catch (r) {
}
return a(t, L, null, [e]).length > 0
}, a.contains = function (e, t) {
return (e.ownerDocument || e) !== L && D(e), O(e, t)
}, a.attr = function (e, t) {
var n;
return (e.ownerDocument || e) !== L && D(e), M || (t = t.toLowerCase()), (n = C.attrHandle[t]) ? n(e) : M || W.attributes ? e.getAttribute(t) : ((n = e.getAttributeNode(t)) || e.getAttribute(t)) && e[t] === !0 ? t : n && n.specified ? n.value : null
}, a.error = function (e) {
throw Error("Syntax error, unrecognized expression: " + e)
}, a.uniqueSort = function (e) {
var t, n = [], r = 1, i = 0;
if (A = !W.detectDuplicates, e.sort(B), A) {
for (; t = e[r]; r++)t === e[r - 1] && (i = n.push(r));
for (; i--;)e.splice(n[i], 1)
}
return e
}, k = a.getText = function (e) {
var t, n = "", r = 0, i = e.nodeType;
if (i) {
if (1 === i || 9 === i || 11 === i) {
if ("string" == typeof e.textContent)return e.textContent;
for (e = e.firstChild; e; e = e.nextSibling)n += k(e)
} else if (3 === i || 4 === i)return e.nodeValue
} else for (; t = e[r]; r++)n += k(t);
return n
}, C = a.selectors = {
cacheLength: 50,
createPseudo: i,
match: pt,
find: {},
relative: {
">": {dir: "parentNode", first: !0},
" ": {dir: "parentNode"},
"+": {dir: "previousSibling", first: !0},
"~": {dir: "previousSibling"}
},
preFilter: {
ATTR: function (e) {
return e[1] = e[1].replace(xt, Tt), e[3] = (e[4] || e[5] || "").replace(xt, Tt), "~=" === e[2] && (e[3] = " " + e[3] + " "), e.slice(0, 4)
}, CHILD: function (e) {
return e[1] = e[1].toLowerCase(), "nth" === e[1].slice(0, 3) ? (e[3] || a.error(e[0]), e[4] = +(e[4] ? e[5] + (e[6] || 1) : 2 * ("even" === e[3] || "odd" === e[3])), e[5] = +(e[7] + e[8] || "odd" === e[3])) : e[3] && a.error(e[0]), e
}, PSEUDO: function (e) {
var t, n = !e[5] && e[2];
return pt.CHILD.test(e[0]) ? null : (e[4] ? e[2] = e[4] : n && ct.test(n) && (t = f(n, !0)) && (t = n.indexOf(")", n.length - t) - n.length) && (e[0] = e[0].slice(0, t), e[2] = n.slice(0, t)), e.slice(0, 3))
}
},
filter: {
TAG: function (e) {
return "*" === e ? function () {
return !0
} : (e = e.replace(xt, Tt).toLowerCase(), function (t) {
return t.nodeName && t.nodeName.toLowerCase() === e
})
}, CLASS: function (e) {
var t = z[e + " "];
return t || (t = RegExp("(^|" + et + ")" + e + "(" + et + "|$)")) && z(e, function (e) {
return t.test(e.className || typeof e.getAttribute !== V && e.getAttribute("class") || "")
})
}, ATTR: function (e, t, n) {
return function (r) {
var i = a.attr(r, e);
return null == i ? "!=" === t : t ? (i += "", "=" === t ? i === n : "!=" === t ? i !== n : "^=" === t ? n && 0 === i.indexOf(n) : "*=" === t ? n && i.indexOf(n) > -1 : "$=" === t ? n && i.substr(i.length - n.length) === n : "~=" === t ? (" " + i + " ").indexOf(n) > -1 : "|=" === t ? i === n || i.substr(0, n.length + 1) === n + "-" : !1) : !0
}
}, CHILD: function (e, t, n, r, i) {
var o = "nth" !== e.slice(0, 3), a = "last" !== e.slice(-4), s = "of-type" === t;
return 1 === r && 0 === i ? function (e) {
return !!e.parentNode
} : function (t, n, u) {
var l, c, f, p, d, h, g = o !== a ? "nextSibling" : "previousSibling", m = t.parentNode, y = s && t.nodeName.toLowerCase(), v = !u && !s;
if (m) {
if (o) {
for (; g;) {
for (f = t; f = f[g];)if (s ? f.nodeName.toLowerCase() === y : 1 === f.nodeType)return !1;
h = g = "only" === e && !h && "nextSibling"
}
return !0
}
if (h = [a ? m.firstChild : m.lastChild], a && v) {
for (c = m[P] || (m[P] = {}), l = c[e] || [], d = l[0] === $ && l[1], p = l[0] === $ && l[2], f = d && m.childNodes[d]; f = ++d && f && f[g] || (p = d = 0) || h.pop();)if (1 === f.nodeType && ++p && f === t) {
c[e] = [$, d, p];
break
}
} else if (v && (l = (t[P] || (t[P] = {}))[e]) && l[0] === $)p = l[1]; else for (; (f = ++d && f && f[g] || (p = d = 0) || h.pop()) && ((s ? f.nodeName.toLowerCase() !== y : 1 !== f.nodeType) || !++p || (v && ((f[P] || (f[P] = {}))[e] = [$, p]), f !== t)););
return p -= i, p === r || 0 === p % r && p / r >= 0
}
}
}, PSEUDO: function (e, t) {
var n, r = C.pseudos[e] || C.setFilters[e.toLowerCase()] || a.error("unsupported pseudo: " + e);
return r[P] ? r(t) : r.length > 1 ? (n = [e, e, "", t], C.setFilters.hasOwnProperty(e.toLowerCase()) ? i(function (e, n) {
for (var i, o = r(e, t), a = o.length; a--;)i = Z.call(e, o[a]), e[i] = !(n[i] = o[a])
}) : function (e) {
return r(e, 0, n)
}) : r
}
},
pseudos: {
not: i(function (e) {
var t = [], n = [], r = S(e.replace(at, "$1"));
return r[P] ? i(function (e, t, n, i) {
for (var o, a = r(e, null, i, []), s = e.length; s--;)(o = a[s]) && (e[s] = !(t[s] = o))
}) : function (e, i, o) {
return t[0] = e, r(t, null, o, n), !n.pop()
}
}), has: i(function (e) {
return function (t) {
return a(e, t).length > 0
}
}), contains: i(function (e) {
return function (t) {
return (t.textContent || t.innerText || k(t)).indexOf(e) > -1
}
}), lang: i(function (e) {
return ft.test(e || "") || a.error("unsupported lang: " + e), e = e.replace(xt, Tt).toLowerCase(), function (t) {
var n;
do if (n = M ? t.getAttribute("xml:lang") || t.getAttribute("lang") : t.lang)return n = n.toLowerCase(), n === e || 0 === n.indexOf(e + "-"); while ((t = t.parentNode) && 1 === t.nodeType);
return !1
}
}), target: function (t) {
var n = e.location && e.location.hash;
return n && n.slice(1) === t.id
}, root: function (e) {
return e === H
}, focus: function (e) {
return e === L.activeElement && (!L.hasFocus || L.hasFocus()) && !!(e.type || e.href || ~e.tabIndex)
}, enabled: function (e) {
return e.disabled === !1
}, disabled: function (e) {
return e.disabled === !0
}, checked: function (e) {
var t = e.nodeName.toLowerCase();
return "input" === t && !!e.checked || "option" === t && !!e.selected
}, selected: function (e) {
return e.parentNode && e.parentNode.selectedIndex, e.selected === !0
}, empty: function (e) {
for (e = e.firstChild; e; e = e.nextSibling)if (e.nodeName > "@" || 3 === e.nodeType || 4 === e.nodeType)return !1;
return !0
}, parent: function (e) {
return !C.pseudos.empty(e)
}, header: function (e) {
return yt.test(e.nodeName)
}, input: function (e) {
return mt.test(e.nodeName)
}, button: function (e) {
var t = e.nodeName.toLowerCase();
return "input" === t && "button" === e.type || "button" === t
}, text: function (e) {
var t;
return "input" === e.nodeName.toLowerCase() && "text" === e.type && (null == (t = e.getAttribute("type")) || t.toLowerCase() === e.type)
}, first: c(function () {
return [0]
}), last: c(function (e, t) {
return [t - 1]
}), eq: c(function (e, t, n) {
return [0 > n ? n + t : n]
}), even: c(function (e, t) {
for (var n = 0; t > n; n += 2)e.push(n);
return e
}), odd: c(function (e, t) {
for (var n = 1; t > n; n += 2)e.push(n);
return e
}), lt: c(function (e, t, n) {
for (var r = 0 > n ? n + t : n; --r >= 0;)e.push(r);
return e
}), gt: c(function (e, t, n) {
for (var r = 0 > n ? n + t : n; t > ++r;)e.push(r);
return e
})
}
};
for (w in{radio: !0, checkbox: !0, file: !0, password: !0, image: !0})C.pseudos[w] = u(w);
for (w in{submit: !0, reset: !0})C.pseudos[w] = l(w);
S = a.compile = function (e, t) {
var n, r = [], i = [], o = U[e + " "];
if (!o) {
for (t || (t = f(e)), n = t.length; n--;)o = y(t[n]), o[P] ? r.push(o) : i.push(o);
o = U(e, v(i, r))
}
return o
}, C.pseudos.nth = C.pseudos.eq, C.filters = T.prototype = C.pseudos, C.setFilters = new T, D(), a.attr = st.attr, st.find = a, st.expr = a.selectors, st.expr[":"] = st.expr.pseudos, st.unique = a.uniqueSort, st.text = a.getText, st.isXMLDoc = a.isXML, st.contains = a.contains
}(e);
var Pt = /Until$/, Rt = /^(?:parents|prev(?:Until|All))/, Wt = /^.[^:#\[\.,]*$/, $t = st.expr.match.needsContext, It = {
children: !0,
contents: !0,
next: !0,
prev: !0
};
st.fn.extend({
find: function (e) {
var t, n, r;
if ("string" != typeof e)return r = this, this.pushStack(st(e).filter(function () {
for (t = 0; r.length > t; t++)if (st.contains(r[t], this))return !0
}));
for (n = [], t = 0; this.length > t; t++)st.find(e, this[t], n);
return n = this.pushStack(st.unique(n)), n.selector = (this.selector ? this.selector + " " : "") + e, n
}, has: function (e) {
var t, n = st(e, this), r = n.length;
return this.filter(function () {
for (t = 0; r > t; t++)if (st.contains(this, n[t]))return !0
})
}, not: function (e) {
return this.pushStack(f(this, e, !1))
}, filter: function (e) {
return this.pushStack(f(this, e, !0))
}, is: function (e) {
return !!e && ("string" == typeof e ? $t.test(e) ? st(e, this.context).index(this[0]) >= 0 : st.filter(e, this).length > 0 : this.filter(e).length > 0)
}, closest: function (e, t) {
for (var n, r = 0, i = this.length, o = [], a = $t.test(e) || "string" != typeof e ? st(e, t || this.context) : 0; i > r; r++)for (n = this[r]; n && n.ownerDocument && n !== t && 11 !== n.nodeType;) {
if (a ? a.index(n) > -1 : st.find.matchesSelector(n, e)) {
o.push(n);
break
}
n = n.parentNode
}
return this.pushStack(o.length > 1 ? st.unique(o) : o)
}, index: function (e) {
return e ? "string" == typeof e ? st.inArray(this[0], st(e)) : st.inArray(e.jquery ? e[0] : e, this) : this[0] && this[0].parentNode ? this.first().prevAll().length : -1
}, add: function (e, t) {
var n = "string" == typeof e ? st(e, t) : st.makeArray(e && e.nodeType ? [e] : e), r = st.merge(this.get(), n);
return this.pushStack(st.unique(r))
}, addBack: function (e) {
return this.add(null == e ? this.prevObject : this.prevObject.filter(e))
}
}), st.fn.andSelf = st.fn.addBack, st.each({
parent: function (e) {
var t = e.parentNode;
return t && 11 !== t.nodeType ? t : null
}, parents: function (e) {
return st.dir(e, "parentNode")
}, parentsUntil: function (e, t, n) {
return st.dir(e, "parentNode", n)
}, next: function (e) {
return c(e, "nextSibling")
}, prev: function (e) {
return c(e, "previousSibling")
}, nextAll: function (e) {
return st.dir(e, "nextSibling")
}, prevAll: function (e) {
return st.dir(e, "previousSibling")
}, nextUntil: function (e, t, n) {
return st.dir(e, "nextSibling", n)
}, prevUntil: function (e, t, n) {
return st.dir(e, "previousSibling", n)
}, siblings: function (e) {
return st.sibling((e.parentNode || {}).firstChild, e)
}, children: function (e) {
return st.sibling(e.firstChild)
}, contents: function (e) {
return st.nodeName(e, "iframe") ? e.contentDocument || e.contentWindow.document : st.merge([], e.childNodes)
}
}, function (e, t) {
st.fn[e] = function (n, r) {
var i = st.map(this, t, n);
return Pt.test(e) || (r = n), r && "string" == typeof r && (i = st.filter(r, i)), i = this.length > 1 && !It[e] ? st.unique(i) : i, this.length > 1 && Rt.test(e) && (i = i.reverse()), this.pushStack(i)
}
}), st.extend({
filter: function (e, t, n) {
return n && (e = ":not(" + e + ")"), 1 === t.length ? st.find.matchesSelector(t[0], e) ? [t[0]] : [] : st.find.matches(e, t)
}, dir: function (e, n, r) {
for (var i = [], o = e[n]; o && 9 !== o.nodeType && (r === t || 1 !== o.nodeType || !st(o).is(r));)1 === o.nodeType && i.push(o), o = o[n];
return i
}, sibling: function (e, t) {
for (var n = []; e; e = e.nextSibling)1 === e.nodeType && e !== t && n.push(e);
return n
}
});
var zt = "abbr|article|aside|audio|bdi|canvas|data|datalist|details|figcaption|figure|footer|header|hgroup|mark|meter|nav|output|progress|section|summary|time|video", Xt = / jQuery\d+="(?:null|\d+)"/g, Ut = RegExp("<(?:" + zt + ")[\\s/>]", "i"), Vt = /^\s+/, Yt = /<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi, Jt = /<([\w:]+)/, Gt = /<tbody/i, Qt = /<|&#?\w+;/, Kt = /<(?:script|style|link)/i, Zt = /^(?:checkbox|radio)$/i, en = /checked\s*(?:[^=]|=\s*.checked.)/i, tn = /^$|\/(?:java|ecma)script/i, nn = /^true\/(.*)/, rn = /^\s*<!(?:\[CDATA\[|--)|(?:\]\]|--)>\s*$/g, on = {
option: [1, "<select multiple='multiple'>", "</select>"],
legend: [1, "<fieldset>", "</fieldset>"],
area: [1, "<map>", "</map>"],
param: [1, "<object>", "</object>"],
thead: [1, "<table>", "</table>"],
tr: [2, "<table><tbody>", "</tbody></table>"],
col: [2, "<table><tbody></tbody><colgroup>", "</colgroup></table>"],
td: [3, "<table><tbody><tr>", "</tr></tbody></table>"],
_default: st.support.htmlSerialize ? [0, "", ""] : [1, "X<div>", "</div>"]
}, an = p(V), sn = an.appendChild(V.createElement("div"));
on.optgroup = on.option, on.tbody = on.tfoot = on.colgroup = on.caption = on.thead, on.th = on.td, st.fn.extend({
text: function (e) {
return st.access(this, function (e) {
return e === t ? st.text(this) : this.empty().append((this[0] && this[0].ownerDocument || V).createTextNode(e))
}, null, e, arguments.length)
}, wrapAll: function (e) {
if (st.isFunction(e))return this.each(function (t) {
st(this).wrapAll(e.call(this, t))
});
if (this[0]) {
var t = st(e, this[0].ownerDocument).eq(0).clone(!0);
this[0].parentNode && t.insertBefore(this[0]), t.map(function () {
for (var e = this; e.firstChild && 1 === e.firstChild.nodeType;)e = e.firstChild;
return e
}).append(this)
}
return this
}, wrapInner: function (e) {
return st.isFunction(e) ? this.each(function (t) {
st(this).wrapInner(e.call(this, t))
}) : this.each(function () {
var t = st(this), n = t.contents();
n.length ? n.wrapAll(e) : t.append(e)
})
}, wrap: function (e) {
var t = st.isFunction(e);
return this.each(function (n) {
st(this).wrapAll(t ? e.call(this, n) : e)
})
}, unwrap: function () {
return this.parent().each(function () {
st.nodeName(this, "body") || st(this).replaceWith(this.childNodes)
}).end()
}, append: function () {
return this.domManip(arguments, !0, function (e) {
(1 === this.nodeType || 11 === this.nodeType || 9 === this.nodeType) && this.appendChild(e)
})
}, prepend: function () {
return this.domManip(arguments, !0, function (e) {
(1 === this.nodeType || 11 === this.nodeType || 9 === this.nodeType) && this.insertBefore(e, this.firstChild)
})
}, before: function () {
return this.domManip(arguments, !1, function (e) {
this.parentNode && this.parentNode.insertBefore(e, this)
})
}, after: function () {
return this.domManip(arguments, !1, function (e) {
this.parentNode && this.parentNode.insertBefore(e, this.nextSibling)
})
}, remove: function (e, t) {
for (var n, r = 0; null != (n = this[r]); r++)(!e || st.filter(e, [n]).length > 0) && (t || 1 !== n.nodeType || st.cleanData(b(n)), n.parentNode && (t && st.contains(n.ownerDocument, n) && m(b(n, "script")), n.parentNode.removeChild(n)));
return this
}, empty: function () {
for (var e, t = 0; null != (e = this[t]); t++) {
for (1 === e.nodeType && st.cleanData(b(e, !1)); e.firstChild;)e.removeChild(e.firstChild);
e.options && st.nodeName(e, "select") && (e.options.length = 0)
}
return this
}, clone: function (e, t) {
return e = null == e ? !1 : e, t = null == t ? e : t, this.map(function () {
return st.clone(this, e, t)
})
}, html: function (e) {
return st.access(this, function (e) {
var n = this[0] || {}, r = 0, i = this.length;
if (e === t)return 1 === n.nodeType ? n.innerHTML.replace(Xt, "") : t;
if (!("string" != typeof e || Kt.test(e) || !st.support.htmlSerialize && Ut.test(e) || !st.support.leadingWhitespace && Vt.test(e) || on[(Jt.exec(e) || ["", ""])[1].toLowerCase()])) {
e = e.replace(Yt, "<$1></$2>");
try {
for (; i > r; r++)n = this[r] || {}, 1 === n.nodeType && (st.cleanData(b(n, !1)), n.innerHTML = e);
n = 0
} catch (o) {
}
}
n && this.empty().append(e)
}, null, e, arguments.length)
}, replaceWith: function (e) {
var t = st.isFunction(e);
return t || "string" == typeof e || (e = st(e).not(this).detach()), this.domManip([e], !0, function (e) {
var t = this.nextSibling, n = this.parentNode;
(n && 1 === this.nodeType || 11 === this.nodeType) && (st(this).remove(), t ? t.parentNode.insertBefore(e, t) : n.appendChild(e))
})
}, detach: function (e) {
return this.remove(e, !0)
}, domManip: function (e, n, r) {
e = et.apply([], e);
var i, o, a, s, u, l, c = 0, f = this.length, p = this, m = f - 1, y = e[0], v = st.isFunction(y);
if (v || !(1 >= f || "string" != typeof y || st.support.checkClone) && en.test(y))return this.each(function (i) {
var o = p.eq(i);
v && (e[0] = y.call(this, i, n ? o.html() : t)), o.domManip(e, n, r)
});
if (f && (i = st.buildFragment(e, this[0].ownerDocument, !1, this), o = i.firstChild, 1 === i.childNodes.length && (i = o), o)) {
for (n = n && st.nodeName(o, "tr"), a = st.map(b(i, "script"), h), s = a.length; f > c; c++)u = i, c !== m && (u = st.clone(u, !0, !0), s && st.merge(a, b(u, "script"))), r.call(n && st.nodeName(this[c], "table") ? d(this[c], "tbody") : this[c], u, c);
if (s)for (l = a[a.length - 1].ownerDocument, st.map(a, g), c = 0; s > c; c++)u = a[c], tn.test(u.type || "") && !st._data(u, "globalEval") && st.contains(l, u) && (u.src ? st.ajax({
url: u.src,
type: "GET",
dataType: "script",
async: !1,
global: !1,
"throws": !0
}) : st.globalEval((u.text || u.textContent || u.innerHTML || "").replace(rn, "")));
i = o = null
}
return this
}
}), st.each({
appendTo: "append",
prependTo: "prepend",
insertBefore: "before",
insertAfter: "after",
replaceAll: "replaceWith"
}, function (e, t) {
st.fn[e] = function (e) {
for (var n, r = 0, i = [], o = st(e), a = o.length - 1; a >= r; r++)n = r === a ? this : this.clone(!0), st(o[r])[t](n), tt.apply(i, n.get());
return this.pushStack(i)
}
}), st.extend({
clone: function (e, t, n) {
var r, i, o, a, s, u = st.contains(e.ownerDocument, e);
if (st.support.html5Clone || st.isXMLDoc(e) || !Ut.test("<" + e.nodeName + ">") ? s = e.cloneNode(!0) : (sn.innerHTML = e.outerHTML, sn.removeChild(s = sn.firstChild)), !(st.support.noCloneEvent && st.support.noCloneChecked || 1 !== e.nodeType && 11 !== e.nodeType || st.isXMLDoc(e)))for (r = b(s), i = b(e), a = 0; null != (o = i[a]); ++a)r[a] && v(o, r[a]);
if (t)if (n)for (i = i || b(e), r = r || b(s), a = 0; null != (o = i[a]); a++)y(o, r[a]); else y(e, s);
return r = b(s, "script"), r.length > 0 && m(r, !u && b(e, "script")), r = i = o = null, s
}, buildFragment: function (e, t, n, r) {
for (var i, o, a, s, u, l, c, f = e.length, d = p(t), h = [], g = 0; f > g; g++)if (o = e[g], o || 0 === o)if ("object" === st.type(o))st.merge(h, o.nodeType ? [o] : o); else if (Qt.test(o)) {
for (s = s || d.appendChild(t.createElement("div")), a = (Jt.exec(o) || ["", ""])[1].toLowerCase(), u = on[a] || on._default, s.innerHTML = u[1] + o.replace(Yt, "<$1></$2>") + u[2], c = u[0]; c--;)s = s.lastChild;
if (!st.support.leadingWhitespace && Vt.test(o) && h.push(t.createTextNode(Vt.exec(o)[0])), !st.support.tbody)for (o = "table" !== a || Gt.test(o) ? "<table>" !== u[1] || Gt.test(o) ? 0 : s : s.firstChild, c = o && o.childNodes.length; c--;)st.nodeName(l = o.childNodes[c], "tbody") && !l.childNodes.length && o.removeChild(l);
for (st.merge(h, s.childNodes), s.textContent = ""; s.firstChild;)s.removeChild(s.firstChild);
s = d.lastChild
} else h.push(t.createTextNode(o));
for (s && d.removeChild(s), st.support.appendChecked || st.grep(b(h, "input"), x), g = 0; o = h[g++];)if ((!r || -1 === st.inArray(o, r)) && (i = st.contains(o.ownerDocument, o), s = b(d.appendChild(o), "script"), i && m(s), n))for (c = 0; o = s[c++];)tn.test(o.type || "") && n.push(o);
return s = null, d
}, cleanData: function (e, n) {
for (var r, i, o, a, s = 0, u = st.expando, l = st.cache, c = st.support.deleteExpando, f = st.event.special; null != (o = e[s]); s++)if ((n || st.acceptData(o)) && (i = o[u], r = i && l[i])) {
if (r.events)for (a in r.events)f[a] ? st.event.remove(o, a) : st.removeEvent(o, a, r.handle);
l[i] && (delete l[i], c ? delete o[u] : o.removeAttribute !== t ? o.removeAttribute(u) : o[u] = null, K.push(i))
}
}
});
var un, ln, cn, fn = /alpha\([^)]*\)/i, pn = /opacity\s*=\s*([^)]*)/, dn = /^(top|right|bottom|left)$/, hn = /^(none|table(?!-c[ea]).+)/, gn = /^margin/, mn = RegExp("^(" + ut + ")(.*)$", "i"), yn = RegExp("^(" + ut + ")(?!px)[a-z%]+$", "i"), vn = RegExp("^([+-])=(" + ut + ")", "i"), bn = {BODY: "block"}, xn = {
position: "absolute",
visibility: "hidden",
display: "block"
}, Tn = {
letterSpacing: 0,
fontWeight: 400
}, wn = ["Top", "Right", "Bottom", "Left"], Nn = ["Webkit", "O", "Moz", "ms"];
st.fn.extend({
css: function (e, n) {
return st.access(this, function (e, n, r) {
var i, o, a = {}, s = 0;
if (st.isArray(n)) {
for (i = ln(e), o = n.length; o > s; s++)a[n[s]] = st.css(e, n[s], !1, i);
return a
}
return r !== t ? st.style(e, n, r) : st.css(e, n)
}, e, n, arguments.length > 1)
}, show: function () {
return N(this, !0)
}, hide: function () {
return N(this)
}, toggle: function (e) {
var t = "boolean" == typeof e;
return this.each(function () {
(t ? e : w(this)) ? st(this).show() : st(this).hide()
})
}
}), st.extend({
cssHooks: {
opacity: {
get: function (e, t) {
if (t) {
var n = un(e, "opacity");
return "" === n ? "1" : n
}
}
}
},
cssNumber: {
columnCount: !0,
fillOpacity: !0,
fontWeight: !0,
lineHeight: !0,
opacity: !0,
orphans: !0,
widows: !0,
zIndex: !0,
zoom: !0
},
cssProps: {"float": st.support.cssFloat ? "cssFloat" : "styleFloat"},
style: function (e, n, r, i) {
if (e && 3 !== e.nodeType && 8 !== e.nodeType && e.style) {
var o, a, s, u = st.camelCase(n), l = e.style;
if (n = st.cssProps[u] || (st.cssProps[u] = T(l, u)), s = st.cssHooks[n] || st.cssHooks[u], r === t)return s && "get"in s && (o = s.get(e, !1, i)) !== t ? o : l[n];
if (a = typeof r, "string" === a && (o = vn.exec(r)) && (r = (o[1] + 1) * o[2] + parseFloat(st.css(e, n)), a = "number"), !(null == r || "number" === a && isNaN(r) || ("number" !== a || st.cssNumber[u] || (r += "px"), st.support.clearCloneStyle || "" !== r || 0 !== n.indexOf("background") || (l[n] = "inherit"), s && "set"in s && (r = s.set(e, r, i)) === t)))try {
l[n] = r
} catch (c) {
}
}
},
css: function (e, n, r, i) {
var o, a, s, u = st.camelCase(n);
return n = st.cssProps[u] || (st.cssProps[u] = T(e.style, u)), s = st.cssHooks[n] || st.cssHooks[u], s && "get"in s && (o = s.get(e, !0, r)), o === t && (o = un(e, n, i)), "normal" === o && n in Tn && (o = Tn[n]), r ? (a = parseFloat(o), r === !0 || st.isNumeric(a) ? a || 0 : o) : o
},
swap: function (e, t, n, r) {
var i, o, a = {};
for (o in t)a[o] = e.style[o], e.style[o] = t[o];
i = n.apply(e, r || []);
for (o in t)e.style[o] = a[o];
return i
}
}), e.getComputedStyle ? (ln = function (t) {
return e.getComputedStyle(t, null)
}, un = function (e, n, r) {
var i, o, a, s = r || ln(e), u = s ? s.getPropertyValue(n) || s[n] : t, l = e.style;
return s && ("" !== u || st.contains(e.ownerDocument, e) || (u = st.style(e, n)), yn.test(u) && gn.test(n) && (i = l.width, o = l.minWidth, a = l.maxWidth, l.minWidth = l.maxWidth = l.width = u, u = s.width, l.width = i, l.minWidth = o, l.maxWidth = a)), u
}) : V.documentElement.currentStyle && (ln = function (e) {
return e.currentStyle
}, un = function (e, n, r) {
var i, o, a, s = r || ln(e), u = s ? s[n] : t, l = e.style;
return null == u && l && l[n] && (u = l[n]), yn.test(u) && !dn.test(n) && (i = l.left, o = e.runtimeStyle, a = o && o.left, a && (o.left = e.currentStyle.left), l.left = "fontSize" === n ? "1em" : u, u = l.pixelLeft + "px", l.left = i, a && (o.left = a)), "" === u ? "auto" : u
}), st.each(["height", "width"], function (e, n) {
st.cssHooks[n] = {
get: function (e, r, i) {
return r ? 0 === e.offsetWidth && hn.test(st.css(e, "display")) ? st.swap(e, xn, function () {
return E(e, n, i)
}) : E(e, n, i) : t
}, set: function (e, t, r) {
var i = r && ln(e);
return C(e, t, r ? k(e, n, r, st.support.boxSizing && "border-box" === st.css(e, "boxSizing", !1, i), i) : 0)
}
}
}), st.support.opacity || (st.cssHooks.opacity = {
get: function (e, t) {
return pn.test((t && e.currentStyle ? e.currentStyle.filter : e.style.filter) || "") ? .01 * parseFloat(RegExp.$1) + "" : t ? "1" : ""
}, set: function (e, t) {
var n = e.style, r = e.currentStyle, i = st.isNumeric(t) ? "alpha(opacity=" + 100 * t + ")" : "", o = r && r.filter || n.filter || "";
n.zoom = 1, (t >= 1 || "" === t) && "" === st.trim(o.replace(fn, "")) && n.removeAttribute && (n.removeAttribute("filter"), "" === t || r && !r.filter) || (n.filter = fn.test(o) ? o.replace(fn, i) : o + " " + i)
}
}), st(function () {
st.support.reliableMarginRight || (st.cssHooks.marginRight = {
get: function (e, n) {
return n ? st.swap(e, {display: "inline-block"}, un, [e, "marginRight"]) : t
}
}), !st.support.pixelPosition && st.fn.position && st.each(["top", "left"], function (e, n) {
st.cssHooks[n] = {
get: function (e, r) {
return r ? (r = un(e, n), yn.test(r) ? st(e).position()[n] + "px" : r) : t
}
}
})
}), st.expr && st.expr.filters && (st.expr.filters.hidden = function (e) {
return 0 === e.offsetWidth && 0 === e.offsetHeight || !st.support.reliableHiddenOffsets && "none" === (e.style && e.style.display || st.css(e, "display"))
}, st.expr.filters.visible = function (e) {
return !st.expr.filters.hidden(e)
}), st.each({margin: "", padding: "", border: "Width"}, function (e, t) {
st.cssHooks[e + t] = {
expand: function (n) {
for (var r = 0, i = {}, o = "string" == typeof n ? n.split(" ") : [n]; 4 > r; r++)i[e + wn[r] + t] = o[r] || o[r - 2] || o[0];
return i
}
}, gn.test(e) || (st.cssHooks[e + t].set = C)
});
var Cn = /%20/g, kn = /\[\]$/, En = /\r?\n/g, Sn = /^(?:submit|button|image|reset)$/i, An = /^(?:input|select|textarea|keygen)/i;
st.fn.extend({
serialize: function () {
return st.param(this.serializeArray())
}, serializeArray: function () {
return this.map(function () {
var e = st.prop(this, "elements");
return e ? st.makeArray(e) : this
}).filter(function () {
var e = this.type;
return this.name && !st(this).is(":disabled") && An.test(this.nodeName) && !Sn.test(e) && (this.checked || !Zt.test(e))
}).map(function (e, t) {
var n = st(this).val();
return null == n ? null : st.isArray(n) ? st.map(n, function (e) {
return {name: t.name, value: e.replace(En, "\r\n")}
}) : {name: t.name, value: n.replace(En, "\r\n")}
}).get()
}
}), st.param = function (e, n) {
var r, i = [], o = function (e, t) {
t = st.isFunction(t) ? t() : null == t ? "" : t, i[i.length] = encodeURIComponent(e) + "=" + encodeURIComponent(t)
};
if (n === t && (n = st.ajaxSettings && st.ajaxSettings.traditional), st.isArray(e) || e.jquery && !st.isPlainObject(e))st.each(e, function () {
o(this.name, this.value)
}); else for (r in e)j(r, e[r], n, o);
return i.join("&").replace(Cn, "+")
};
var jn, Dn, Ln = st.now(), Hn = /\?/, Mn = /#.*$/, qn = /([?&])_=[^&]*/, _n = /^(.*?):[ \t]*([^\r\n]*)\r?$/gm, Fn = /^(?:about|app|app-storage|.+-extension|file|res|widget):$/, On = /^(?:GET|HEAD)$/, Bn = /^\/\//, Pn = /^([\w.+-]+:)(?:\/\/([^\/?#:]*)(?::(\d+)|)|)/, Rn = st.fn.load, Wn = {}, $n = {}, In = "*/".concat("*");
try {
Dn = Y.href
} catch (zn) {
Dn = V.createElement("a"), Dn.href = "", Dn = Dn.href
}
jn = Pn.exec(Dn.toLowerCase()) || [], st.fn.load = function (e, n, r) {
if ("string" != typeof e && Rn)return Rn.apply(this, arguments);
var i, o, a, s = this, u = e.indexOf(" ");
return u >= 0 && (i = e.slice(u, e.length), e = e.slice(0, u)), st.isFunction(n) ? (r = n, n = t) : n && "object" == typeof n && (o = "POST"), s.length > 0 && st.ajax({
url: e,
type: o,
dataType: "html",
data: n
}).done(function (e) {
a = arguments, s.html(i ? st("<div>").append(st.parseHTML(e)).find(i) : e)
}).complete(r && function (e, t) {
s.each(r, a || [e.responseText, t, e])
}), this
}, st.each(["ajaxStart", "ajaxStop", "ajaxComplete", "ajaxError", "ajaxSuccess", "ajaxSend"], function (e, t) {
st.fn[t] = function (e) {
return this.on(t, e)
}
}), st.each(["get", "post"], function (e, n) {
st[n] = function (e, r, i, o) {
return st.isFunction(r) && (o = o || i, i = r, r = t), st.ajax({
url: e,
type: n,
dataType: o,
data: r,
success: i
})
}
}), st.extend({
active: 0,
lastModified: {},
etag: {},
ajaxSettings: {
url: Dn,
type: "GET",
isLocal: Fn.test(jn[1]),
global: !0,
processData: !0,
async: !0,
contentType: "application/x-www-form-urlencoded; charset=UTF-8",
accepts: {
"*": In,
text: "text/plain",
html: "text/html",
xml: "application/xml, text/xml",
json: "application/json, text/javascript"
},
contents: {xml: /xml/, html: /html/, json: /json/},
responseFields: {xml: "responseXML", text: "responseText"},
converters: {"* text": e.String, "text html": !0, "text json": st.parseJSON, "text xml": st.parseXML},
flatOptions: {url: !0, context: !0}
},
ajaxSetup: function (e, t) {
return t ? H(H(e, st.ajaxSettings), t) : H(st.ajaxSettings, e)
},
ajaxPrefilter: D(Wn),
ajaxTransport: D($n),
ajax: function (e, n) {
function r(e, n, r, s) {
var l, f, v, b, T, N = n;
2 !== x && (x = 2, u && clearTimeout(u), i = t, a = s || "", w.readyState = e > 0 ? 4 : 0, r && (b = M(p, w, r)), e >= 200 && 300 > e || 304 === e ? (p.ifModified && (T = w.getResponseHeader("Last-Modified"), T && (st.lastModified[o] = T), T = w.getResponseHeader("etag"), T && (st.etag[o] = T)), 304 === e ? (l = !0, N = "notmodified") : (l = q(p, b), N = l.state, f = l.data, v = l.error, l = !v)) : (v = N, (e || !N) && (N = "error", 0 > e && (e = 0))), w.status = e, w.statusText = (n || N) + "", l ? g.resolveWith(d, [f, N, w]) : g.rejectWith(d, [w, N, v]), w.statusCode(y), y = t, c && h.trigger(l ? "ajaxSuccess" : "ajaxError", [w, p, l ? f : v]), m.fireWith(d, [w, N]), c && (h.trigger("ajaxComplete", [w, p]), --st.active || st.event.trigger("ajaxStop")))
}
"object" == typeof e && (n = e, e = t), n = n || {};
var i, o, a, s, u, l, c, f, p = st.ajaxSetup({}, n), d = p.context || p, h = p.context && (d.nodeType || d.jquery) ? st(d) : st.event, g = st.Deferred(), m = st.Callbacks("once memory"), y = p.statusCode || {}, v = {}, b = {}, x = 0, T = "canceled", w = {
readyState: 0,
getResponseHeader: function (e) {
var t;
if (2 === x) {
if (!s)for (s = {}; t = _n.exec(a);)s[t[1].toLowerCase()] = t[2];
t = s[e.toLowerCase()]
}
return null == t ? null : t
},
getAllResponseHeaders: function () {
return 2 === x ? a : null
},
setRequestHeader: function (e, t) {
var n = e.toLowerCase();
return x || (e = b[n] = b[n] || e, v[e] = t), this
},
overrideMimeType: function (e) {
return x || (p.mimeType = e), this
},
statusCode: function (e) {
var t;
if (e)if (2 > x)for (t in e)y[t] = [y[t], e[t]]; else w.always(e[w.status]);
return this
},
abort: function (e) {
var t = e || T;
return i && i.abort(t), r(0, t), this
}
};
if (g.promise(w).complete = m.add, w.success = w.done, w.error = w.fail, p.url = ((e || p.url || Dn) + "").replace(Mn, "").replace(Bn, jn[1] + "//"), p.type = n.method || n.type || p.method || p.type, p.dataTypes = st.trim(p.dataType || "*").toLowerCase().match(lt) || [""], null == p.crossDomain && (l = Pn.exec(p.url.toLowerCase()), p.crossDomain = !(!l || l[1] === jn[1] && l[2] === jn[2] && (l[3] || ("http:" === l[1] ? 80 : 443)) == (jn[3] || ("http:" === jn[1] ? 80 : 443)))), p.data && p.processData && "string" != typeof p.data && (p.data = st.param(p.data, p.traditional)), L(Wn, p, n, w), 2 === x)return w;
c = p.global, c && 0 === st.active++ && st.event.trigger("ajaxStart"), p.type = p.type.toUpperCase(), p.hasContent = !On.test(p.type), o = p.url, p.hasContent || (p.data && (o = p.url += (Hn.test(o) ? "&" : "?") + p.data, delete p.data), p.cache === !1 && (p.url = qn.test(o) ? o.replace(qn, "$1_=" + Ln++) : o + (Hn.test(o) ? "&" : "?") + "_=" + Ln++)), p.ifModified && (st.lastModified[o] && w.setRequestHeader("If-Modified-Since", st.lastModified[o]), st.etag[o] && w.setRequestHeader("If-None-Match", st.etag[o])), (p.data && p.hasContent && p.contentType !== !1 || n.contentType) && w.setRequestHeader("Content-Type", p.contentType), w.setRequestHeader("Accept", p.dataTypes[0] && p.accepts[p.dataTypes[0]] ? p.accepts[p.dataTypes[0]] + ("*" !== p.dataTypes[0] ? ", " + In + "; q=0.01" : "") : p.accepts["*"]);
for (f in p.headers)w.setRequestHeader(f, p.headers[f]);
if (p.beforeSend && (p.beforeSend.call(d, w, p) === !1 || 2 === x))return w.abort();
T = "abort";
for (f in{success: 1, error: 1, complete: 1})w[f](p[f]);
if (i = L($n, p, n, w)) {
w.readyState = 1, c && h.trigger("ajaxSend", [w, p]), p.async && p.timeout > 0 && (u = setTimeout(function () {
w.abort("timeout")
}, p.timeout));
try {
x = 1, i.send(v, r)
} catch (N) {
if (!(2 > x))throw N;
r(-1, N)
}
} else r(-1, "No Transport");
return w
},
getScript: function (e, n) {
return st.get(e, t, n, "script")
},
getJSON: function (e, t, n) {
return st.get(e, t, n, "json")
}
}), st.ajaxSetup({
accepts: {script: "text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},
contents: {script: /(?:java|ecma)script/},
converters: {
"text script": function (e) {
return st.globalEval(e), e
}
}
}), st.ajaxPrefilter("script", function (e) {
e.cache === t && (e.cache = !1), e.crossDomain && (e.type = "GET", e.global = !1)
}), st.ajaxTransport("script", function (e) {
if (e.crossDomain) {
var n, r = V.head || st("head")[0] || V.documentElement;
return {
send: function (t, i) {
n = V.createElement("script"), n.async = !0, e.scriptCharset && (n.charset = e.scriptCharset), n.src = e.url, n.onload = n.onreadystatechange = function (e, t) {
(t || !n.readyState || /loaded|complete/.test(n.readyState)) && (n.onload = n.onreadystatechange = null, n.parentNode && n.parentNode.removeChild(n), n = null, t || i(200, "success"))
}, r.insertBefore(n, r.firstChild)
}, abort: function () {
n && n.onload(t, !0)
}
}
}
});
var Xn = [], Un = /(=)\?(?=&|$)|\?\?/;
st.ajaxSetup({
jsonp: "callback", jsonpCallback: function () {
var e = Xn.pop() || st.expando + "_" + Ln++;
return this[e] = !0, e
}
}), st.ajaxPrefilter("json jsonp", function (n, r, i) {
var o, a, s, u = n.jsonp !== !1 && (Un.test(n.url) ? "url" : "string" == typeof n.data && !(n.contentType || "").indexOf("application/x-www-form-urlencoded") && Un.test(n.data) && "data");
return u || "jsonp" === n.dataTypes[0] ? (o = n.jsonpCallback = st.isFunction(n.jsonpCallback) ? n.jsonpCallback() : n.jsonpCallback, u ? n[u] = n[u].replace(Un, "$1" + o) : n.jsonp !== !1 && (n.url += (Hn.test(n.url) ? "&" : "?") + n.jsonp + "=" + o), n.converters["script json"] = function () {
return s || st.error(o + " was not called"), s[0]
}, n.dataTypes[0] = "json", a = e[o], e[o] = function () {
s = arguments
}, i.always(function () {
e[o] = a, n[o] && (n.jsonpCallback = r.jsonpCallback, Xn.push(o)), s && st.isFunction(a) && a(s[0]), s = a = t
}), "script") : t
});
var Vn, Yn, Jn = 0, Gn = e.ActiveXObject && function () {
var e;
for (e in Vn)Vn[e](t, !0)
};
st.ajaxSettings.xhr = e.ActiveXObject ? function () {
return !this.isLocal && _() || F()
} : _, Yn = st.ajaxSettings.xhr(), st.support.cors = !!Yn && "withCredentials"in Yn, Yn = st.support.ajax = !!Yn, Yn && st.ajaxTransport(function (n) {
if (!n.crossDomain || st.support.cors) {
var r;
return {
send: function (i, o) {
var a, s, u = n.xhr();
if (n.username ? u.open(n.type, n.url, n.async, n.username, n.password) : u.open(n.type, n.url, n.async), n.xhrFields)for (s in n.xhrFields)u[s] = n.xhrFields[s];
n.mimeType && u.overrideMimeType && u.overrideMimeType(n.mimeType), n.crossDomain || i["X-Requested-With"] || (i["X-Requested-With"] = "XMLHttpRequest");
try {
for (s in i)u.setRequestHeader(s, i[s])
} catch (l) {
}
u.send(n.hasContent && n.data || null), r = function (e, i) {
var s, l, c, f, p;
try {
if (r && (i || 4 === u.readyState))if (r = t, a && (u.onreadystatechange = st.noop, Gn && delete Vn[a]), i)4 !== u.readyState && u.abort(); else {
f = {}, s = u.status, p = u.responseXML, c = u.getAllResponseHeaders(), p && p.documentElement && (f.xml = p), "string" == typeof u.responseText && (f.text = u.responseText);
try {
l = u.statusText
} catch (d) {
l = ""
}
s || !n.isLocal || n.crossDomain ? 1223 === s && (s = 204) : s = f.text ? 200 : 404
}
} catch (h) {
i || o(-1, h)
}
f && o(s, l, f, c)
}, n.async ? 4 === u.readyState ? setTimeout(r) : (a = ++Jn, Gn && (Vn || (Vn = {}, st(e).unload(Gn)), Vn[a] = r), u.onreadystatechange = r) : r()
}, abort: function () {
r && r(t, !0)
}
}
}
});
var Qn, Kn, Zn = /^(?:toggle|show|hide)$/, er = RegExp("^(?:([+-])=|)(" + ut + ")([a-z%]*)$", "i"), tr = /queueHooks$/, nr = [W], rr = {
"*": [function (e, t) {
var n, r, i = this.createTween(e, t), o = er.exec(t), a = i.cur(), s = +a || 0, u = 1, l = 20;
if (o) {
if (n = +o[2], r = o[3] || (st.cssNumber[e] ? "" : "px"), "px" !== r && s) {
s = st.css(i.elem, e, !0) || n || 1;
do u = u || ".5", s /= u, st.style(i.elem, e, s + r); while (u !== (u = i.cur() / a) && 1 !== u && --l)
}
i.unit = r, i.start = s, i.end = o[1] ? s + (o[1] + 1) * n : n
}
return i
}]
};
st.Animation = st.extend(P, {
tweener: function (e, t) {
st.isFunction(e) ? (t = e, e = ["*"]) : e = e.split(" ");
for (var n, r = 0, i = e.length; i > r; r++)n = e[r], rr[n] = rr[n] || [], rr[n].unshift(t)
}, prefilter: function (e, t) {
t ? nr.unshift(e) : nr.push(e)
}
}), st.Tween = $, $.prototype = {
constructor: $, init: function (e, t, n, r, i, o) {
this.elem = e, this.prop = n, this.easing = i || "swing", this.options = t, this.start = this.now = this.cur(), this.end = r, this.unit = o || (st.cssNumber[n] ? "" : "px")
}, cur: function () {
var e = $.propHooks[this.prop];
return e && e.get ? e.get(this) : $.propHooks._default.get(this)
}, run: function (e) {
var t, n = $.propHooks[this.prop];
return this.pos = t = this.options.duration ? st.easing[this.easing](e, this.options.duration * e, 0, 1, this.options.duration) : e, this.now = (this.end - this.start) * t + this.start, this.options.step && this.options.step.call(this.elem, this.now, this), n && n.set ? n.set(this) : $.propHooks._default.set(this), this
}
}, $.prototype.init.prototype = $.prototype, $.propHooks = {
_default: {
get: function (e) {
var t;
return null == e.elem[e.prop] || e.elem.style && null != e.elem.style[e.prop] ? (t = st.css(e.elem, e.prop, "auto"), t && "auto" !== t ? t : 0) : e.elem[e.prop]
}, set: function (e) {
st.fx.step[e.prop] ? st.fx.step[e.prop](e) : e.elem.style && (null != e.elem.style[st.cssProps[e.prop]] || st.cssHooks[e.prop]) ? st.style(e.elem, e.prop, e.now + e.unit) : e.elem[e.prop] = e.now
}
}
}, $.propHooks.scrollTop = $.propHooks.scrollLeft = {
set: function (e) {
e.elem.nodeType && e.elem.parentNode && (e.elem[e.prop] = e.now)
}
}, st.each(["toggle", "show", "hide"], function (e, t) {
var n = st.fn[t];
st.fn[t] = function (e, r, i) {
return null == e || "boolean" == typeof e ? n.apply(this, arguments) : this.animate(I(t, !0), e, r, i)
}
}), st.fn.extend({
fadeTo: function (e, t, n, r) {
return this.filter(w).css("opacity", 0).show().end().animate({opacity: t}, e, n, r)
}, animate: function (e, t, n, r) {
var i = st.isEmptyObject(e), o = st.speed(t, n, r), a = function () {
var t = P(this, st.extend({}, e), o);
a.finish = function () {
t.stop(!0)
}, (i || st._data(this, "finish")) && t.stop(!0)
};
return a.finish = a, i || o.queue === !1 ? this.each(a) : this.queue(o.queue, a)
}, stop: function (e, n, r) {
var i = function (e) {
var t = e.stop;
delete e.stop, t(r)
};
return "string" != typeof e && (r = n, n = e, e = t), n && e !== !1 && this.queue(e || "fx", []), this.each(function () {
var t = !0, n = null != e && e + "queueHooks", o = st.timers, a = st._data(this);
if (n)a[n] && a[n].stop && i(a[n]); else for (n in a)a[n] && a[n].stop && tr.test(n) && i(a[n]);
for (n = o.length; n--;)o[n].elem !== this || null != e && o[n].queue !== e || (o[n].anim.stop(r), t = !1, o.splice(n, 1));
(t || !r) && st.dequeue(this, e)
})
}, finish: function (e) {
return e !== !1 && (e = e || "fx"), this.each(function () {
var t, n = st._data(this), r = n[e + "queue"], i = n[e + "queueHooks"], o = st.timers, a = r ? r.length : 0;
for (n.finish = !0, st.queue(this, e, []), i && i.cur && i.cur.finish && i.cur.finish.call(this), t = o.length; t--;)o[t].elem === this && o[t].queue === e && (o[t].anim.stop(!0), o.splice(t, 1));
for (t = 0; a > t; t++)r[t] && r[t].finish && r[t].finish.call(this);
delete n.finish
})
}
}), st.each({
slideDown: I("show"),
slideUp: I("hide"),
slideToggle: I("toggle"),
fadeIn: {opacity: "show"},
fadeOut: {opacity: "hide"},
fadeToggle: {opacity: "toggle"}
}, function (e, t) {
st.fn[e] = function (e, n, r) {
return this.animate(t, e, n, r)
}
}), st.speed = function (e, t, n) {
var r = e && "object" == typeof e ? st.extend({}, e) : {
complete: n || !n && t || st.isFunction(e) && e,
duration: e,
easing: n && t || t && !st.isFunction(t) && t
};
return r.duration = st.fx.off ? 0 : "number" == typeof r.duration ? r.duration : r.duration in st.fx.speeds ? st.fx.speeds[r.duration] : st.fx.speeds._default, (null == r.queue || r.queue === !0) && (r.queue = "fx"), r.old = r.complete, r.complete = function () {
st.isFunction(r.old) && r.old.call(this), r.queue && st.dequeue(this, r.queue)
}, r
}, st.easing = {
linear: function (e) {
return e
}, swing: function (e) {
return .5 - Math.cos(e * Math.PI) / 2
}
}, st.timers = [], st.fx = $.prototype.init, st.fx.tick = function () {
var e, n = st.timers, r = 0;
for (Qn = st.now(); n.length > r; r++)e = n[r], e() || n[r] !== e || n.splice(r--, 1);
n.length || st.fx.stop(), Qn = t
}, st.fx.timer = function (e) {
e() && st.timers.push(e) && st.fx.start()
}, st.fx.interval = 13, st.fx.start = function () {
Kn || (Kn = setInterval(st.fx.tick, st.fx.interval))
}, st.fx.stop = function () {
clearInterval(Kn), Kn = null
}, st.fx.speeds = {
slow: 600,
fast: 200,
_default: 400
}, st.fx.step = {}, st.expr && st.expr.filters && (st.expr.filters.animated = function (e) {
return st.grep(st.timers, function (t) {
return e === t.elem
}).length
}), st.fn.offset = function (e) {
if (arguments.length)return e === t ? this : this.each(function (t) {
st.offset.setOffset(this, e, t)
});
var n, r, i = {top: 0, left: 0}, o = this[0], a = o && o.ownerDocument;
if (a)return n = a.documentElement, st.contains(n, o) ? (o.getBoundingClientRect !== t && (i = o.getBoundingClientRect()), r = z(a), {
top: i.top + (r.pageYOffset || n.scrollTop) - (n.clientTop || 0),
left: i.left + (r.pageXOffset || n.scrollLeft) - (n.clientLeft || 0)
}) : i
}, st.offset = {
setOffset: function (e, t, n) {
var r = st.css(e, "position");
"static" === r && (e.style.position = "relative");
var i, o, a = st(e), s = a.offset(), u = st.css(e, "top"), l = st.css(e, "left"), c = ("absolute" === r || "fixed" === r) && st.inArray("auto", [u, l]) > -1, f = {}, p = {};
c ? (p = a.position(), i = p.top, o = p.left) : (i = parseFloat(u) || 0, o = parseFloat(l) || 0), st.isFunction(t) && (t = t.call(e, n, s)), null != t.top && (f.top = t.top - s.top + i), null != t.left && (f.left = t.left - s.left + o), "using"in t ? t.using.call(e, f) : a.css(f)
}
}, st.fn.extend({
position: function () {
if (this[0]) {
var e, t, n = {top: 0, left: 0}, r = this[0];
return "fixed" === st.css(r, "position") ? t = r.getBoundingClientRect() : (e = this.offsetParent(), t = this.offset(), st.nodeName(e[0], "html") || (n = e.offset()), n.top += st.css(e[0], "borderTopWidth", !0), n.left += st.css(e[0], "borderLeftWidth", !0)), {
top: t.top - n.top - st.css(r, "marginTop", !0),
left: t.left - n.left - st.css(r, "marginLeft", !0)
}
}
}, offsetParent: function () {
return this.map(function () {
for (var e = this.offsetParent || V.documentElement; e && !st.nodeName(e, "html") && "static" === st.css(e, "position");)e = e.offsetParent;
return e || V.documentElement
})
}
}), st.each({scrollLeft: "pageXOffset", scrollTop: "pageYOffset"}, function (e, n) {
var r = /Y/.test(n);
st.fn[e] = function (i) {
return st.access(this, function (e, i, o) {
var a = z(e);
return o === t ? a ? n in a ? a[n] : a.document.documentElement[i] : e[i] : (a ? a.scrollTo(r ? st(a).scrollLeft() : o, r ? o : st(a).scrollTop()) : e[i] = o, t)
}, e, i, arguments.length, null)
}
}), st.each({Height: "height", Width: "width"}, function (e, n) {
st.each({padding: "inner" + e, content: n, "": "outer" + e}, function (r, i) {
st.fn[i] = function (i, o) {
var a = arguments.length && (r || "boolean" != typeof i), s = r || (i === !0 || o === !0 ? "margin" : "border");
return st.access(this, function (n, r, i) {
var o;
return st.isWindow(n) ? n.document.documentElement["client" + e] : 9 === n.nodeType ? (o = n.documentElement, Math.max(n.body["scroll" + e], o["scroll" + e], n.body["offset" + e], o["offset" + e], o["client" + e])) : i === t ? st.css(n, r, s) : st.style(n, r, i, s)
}, n, a ? i : t, a, null)
}
})
}), e.jQuery = e.$ = st, "function" == typeof define && define.amd && define.amd.jQuery && define("jquery", [], function () {
return st
})
})(window);
//@ sourceMappingURL=jquery.min.map | |
subnet.py | # -*- coding: utf-8 -*-
"""Utilities for working with VPC subnets."""
from . import client as boto3client
def create(profile, cidr_block, vpc, availability_zone=None):
"""Create a subnet in a VPC.
Args:
profile
A profile to connect to AWS with.
cidr_block
The network range for the subnet, in CIDR notation.
For instance, "10.0.0.0/24".
vpc
The ID of the VPC you want to create the subnet in.
availability_zone
The name of the availability zone to create the subnet in.
If None, Amazon will pick one for you.
Returns:
The JSON response returned by boto3.
"""
client = boto3client.get("ec2", profile)
params = {}
params["CidrBlock"] = cidr_block
params["VpcId"] = vpc
if availability_zone:
params["AvailabilityZone"] = availability_zone
return client.create_subnet(**params)
def | (profile, subnet):
"""Delete a subnet from a VPC.
Args:
profile
A profile to connect to AWS with.
subnet
The ID of the subnet you want to delete.
"""
client = boto3client.get("ec2", profile)
params = {}
params["SubnetId"] = subnet
return client.delete_subnet(**params)
def get(profile, filters=None):
"""Get a list of all subnets.
Args:
profile
A profile to connect to AWS with.
filters
Filters to apply to the request.
Returns:
The JSON response returned by boto3.
"""
client = boto3client.get("ec2", profile)
params = {}
if filters:
params["Filters"] = filters
return client.describe_subnets(**params)
def enable_public_ips(profile, subnet):
"""Set the subnet to give instances public IPs by default.
Args:
profile
A profile to connect to AWS with.
subnet
The ID of the subnet.
Returns:
The JSON response returned by boto3.
"""
client = boto3client.get("ec2", profile)
params = {}
params["SubnetId"] = subnet
params["MapPublicIpOnLaunch"] = {"Value": True}
return client.modify_subnet_attribute(**params)
def disable_public_ips(profile, subnet):
"""Set the subnet not to give instances public IPs by default.
Args:
profile
A profile to connect to AWS with.
subnet
The ID of the subnet.
Returns:
The JSON response returned by boto3.
"""
client = boto3client.get("ec2", profile)
params = {}
params["SubnetId"] = subnet
params["MapPublicIpOnLaunch"] = {"Value": False}
return client.modify_subnet_attribute(**params)
def tag(profile, subnet, key, value):
"""Add a tag to a subnet.
Args:
profile
A profile to connect to AWS with.
subnet
The ID of the subnet you want to tag.
key
The key/name of the tag.
value
The value of the tag.
Returns:
The response returned by boto3.
"""
client = boto3client.get("ec2", profile)
params = {}
params["Resources"] = [subnet]
params["Tags"] = [{"Key": key, "Value": value}]
return client.create_tags(**params)
| delete |
footer.js | $(".footer").load("./footer.html");
}); | $(document).ready(function(){ |
|
NumericUpDown.py | class NumericUpDown(UpDownBase,IComponent,IDisposable,IOleControl,IOleObject,IOleInPlaceObject,IOleInPlaceActiveObject,IOleWindow,IViewObject,IViewObject2,IPersist,IPersistStreamInit,IPersistPropertyBag,IPersistStorage,IQuickActivate,ISupportOleDropSource,IDropTarget,ISynchronizeInvoke,IWin32Window,IArrangedElement,IBindableComponent,IContainerControl,ISupportInitialize):
"""
Represents a Windows spin box (also known as an up-down control) that displays numeric values.
NumericUpDown()
"""
def AccessibilityNotifyClients(self,*args):
"""
AccessibilityNotifyClients(self: Control,accEvent: AccessibleEvents,objectID: int,childID: int)
Notifies the accessibility client applications of the specified
System.Windows.Forms.AccessibleEvents for the specified child control .
accEvent: The System.Windows.Forms.AccessibleEvents to notify the accessibility client applications of.
objectID: The identifier of the System.Windows.Forms.AccessibleObject.
childID: The child System.Windows.Forms.Control to notify of the accessible event.
AccessibilityNotifyClients(self: Control,accEvent: AccessibleEvents,childID: int)
Notifies the accessibility client applications of the specified
System.Windows.Forms.AccessibleEvents for the specified child control.
accEvent: The System.Windows.Forms.AccessibleEvents to notify the accessibility client applications of.
childID: The child System.Windows.Forms.Control to notify of the accessible event.
"""
pass
def AdjustFormScrollbars(self,*args):
"""
AdjustFormScrollbars(self: ContainerControl,displayScrollbars: bool)
displayScrollbars: true to show the scroll bars; otherwise,false.
"""
pass
def BeginInit(self):
"""
BeginInit(self: NumericUpDown)
Begins the initialization of a System.Windows.Forms.NumericUpDown control that is used on a form
or used by another component. The initialization occurs at run time.
"""
pass
def CreateAccessibilityInstance(self,*args):
"""
CreateAccessibilityInstance(self: NumericUpDown) -> AccessibleObject
Returns: A new System.Windows.Forms.AccessibleObject for the control.
"""
pass
def CreateControlsInstance(self,*args):
"""
CreateControlsInstance(self: Control) -> ControlCollection
Creates a new instance of the control collection for the control.
Returns: A new instance of System.Windows.Forms.Control.ControlCollection assigned to the control.
"""
pass
def CreateHandle(self,*args):
"""
CreateHandle(self: Control)
Creates a handle for the control.
"""
pass
def DefWndProc(self,*args):
"""
DefWndProc(self: Control,m: Message) -> Message
Sends the specified message to the default window procedure.
m: The Windows System.Windows.Forms.Message to process.
"""
pass
def DestroyHandle(self,*args):
"""
DestroyHandle(self: Control)
Destroys the handle associated with the control.
"""
pass
def Dispose(self):
"""
Dispose(self: ContainerControl,disposing: bool)
disposing: true to release both managed and unmanaged resources; false to release only unmanaged resources.
"""
pass
def DownButton(self):
"""
DownButton(self: NumericUpDown)
Decrements the value of the spin box (also known as an up-down control).
"""
pass
def EndInit(self):
"""
EndInit(self: NumericUpDown)
Ends the initialization of a System.Windows.Forms.NumericUpDown control that is used on a form
or used by another component. The initialization occurs at run time.
"""
pass
def GetAccessibilityObjectById(self,*args):
"""
GetAccessibilityObjectById(self: Control,objectId: int) -> AccessibleObject
Retrieves the specified System.Windows.Forms.AccessibleObject.
objectId: An Int32 that identifies the System.Windows.Forms.AccessibleObject to retrieve.
Returns: An System.Windows.Forms.AccessibleObject.
"""
pass
def GetAutoSizeMode(self,*args):
"""
GetAutoSizeMode(self: Control) -> AutoSizeMode
Retrieves a value indicating how a control will behave when its
System.Windows.Forms.Control.AutoSize property is enabled.
Returns: One of the System.Windows.Forms.AutoSizeMode values.
"""
pass
def GetScaledBounds(self,*args):
"""
GetScaledBounds(self: Control,bounds: Rectangle,factor: SizeF,specified: BoundsSpecified) -> Rectangle
Retrieves the bounds within which the control is scaled.
bounds: A System.Drawing.Rectangle that specifies the area for which to retrieve the display bounds.
factor: The height and width of the control's bounds.
specified: One of the values of System.Windows.Forms.BoundsSpecified that specifies the bounds of the
control to use when defining its size and position.
Returns: A System.Drawing.Rectangle representing the bounds within which the control is scaled.
"""
pass
def GetScrollState(self,*args):
"""
GetScrollState(self: ScrollableControl,bit: int) -> bool
Determines whether the specified flag has been set.
bit: The flag to check.
Returns: true if the specified flag has been set; otherwise,false.
"""
pass
def GetService(self,*args):
"""
GetService(self: Component,service: Type) -> object
Returns an object that represents a service provided by the System.ComponentModel.Component or
by its System.ComponentModel.Container.
service: A service provided by the System.ComponentModel.Component.
Returns: An System.Object that represents a service provided by the System.ComponentModel.Component,or
null if the System.ComponentModel.Component does not provide the specified service.
"""
pass
def GetStyle(self,*args):
"""
GetStyle(self: Control,flag: ControlStyles) -> bool
Retrieves the value of the specified control style bit for the control.
flag: The System.Windows.Forms.ControlStyles bit to return the value from.
Returns: true if the specified control style bit is set to true; otherwise,false.
"""
pass
def GetTopLevel(self,*args):
"""
GetTopLevel(self: Control) -> bool
Determines if the control is a top-level control.
Returns: true if the control is a top-level control; otherwise,false.
"""
pass
def InitLayout(self,*args):
"""
InitLayout(self: Control)
Called after the control has been added to another container.
"""
pass
def InvokeGotFocus(self,*args):
"""
InvokeGotFocus(self: Control,toInvoke: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.GotFocus event for the specified control.
toInvoke: The System.Windows.Forms.Control to assign the event to.
e: An System.EventArgs that contains the event data.
"""
pass
def InvokeLostFocus(self,*args):
"""
InvokeLostFocus(self: Control,toInvoke: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.LostFocus event for the specified control.
toInvoke: The System.Windows.Forms.Control to assign the event to.
e: An System.EventArgs that contains the event data.
"""
pass
def InvokeOnClick(self,*args):
"""
InvokeOnClick(self: Control,toInvoke: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.Click event for the specified control.
toInvoke: The System.Windows.Forms.Control to assign the System.Windows.Forms.Control.Click event to.
e: An System.EventArgs that contains the event data.
"""
pass
def InvokePaint(self,*args):
"""
InvokePaint(self: Control,c: Control,e: PaintEventArgs)
Raises the System.Windows.Forms.Control.Paint event for the specified control.
c: The System.Windows.Forms.Control to assign the System.Windows.Forms.Control.Paint event to.
e: An System.Windows.Forms.PaintEventArgs that contains the event data.
"""
pass
def InvokePaintBackground(self,*args):
"""
InvokePaintBackground(self: Control,c: Control,e: PaintEventArgs)
Raises the PaintBackground event for the specified control.
c: The System.Windows.Forms.Control to assign the System.Windows.Forms.Control.Paint event to.
e: An System.Windows.Forms.PaintEventArgs that contains the event data.
"""
pass
def IsInputChar(self,*args):
"""
IsInputChar(self: Control,charCode: Char) -> bool
Determines if a character is an input character that the control recognizes.
charCode: The character to test.
Returns: true if the character should be sent directly to the control and not preprocessed; otherwise,
false.
"""
pass
def IsInputKey(self,*args):
"""
IsInputKey(self: Control,keyData: Keys) -> bool
Determines whether the specified key is a regular input key or a special key that requires
preprocessing.
keyData: One of the System.Windows.Forms.Keys values.
Returns: true if the specified key is a regular input key; otherwise,false.
"""
pass
def MemberwiseClone(self,*args):
"""
MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject
Creates a shallow copy of the current System.MarshalByRefObject object.
cloneIdentity: false to delete the current System.MarshalByRefObject object's identity,which will cause the
object to be assigned a new identity when it is marshaled across a remoting boundary. A value of
false is usually appropriate. true to copy the current System.MarshalByRefObject object's
identity to its clone,which will cause remoting client calls to be routed to the remote server
object.
Returns: A shallow copy of the current System.MarshalByRefObject object.
MemberwiseClone(self: object) -> object
Creates a shallow copy of the current System.Object.
Returns: A shallow copy of the current System.Object.
"""
pass
def NotifyInvalidate(self,*args):
"""
NotifyInvalidate(self: Control,invalidatedArea: Rectangle)
Raises the System.Windows.Forms.Control.Invalidated event with a specified region of the control
to invalidate.
invalidatedArea: A System.Drawing.Rectangle representing the area to invalidate.
"""
pass
def OnAutoSizeChanged(self,*args):
"""
OnAutoSizeChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.AutoSizeChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnAutoValidateChanged(self,*args):
"""
OnAutoValidateChanged(self: ContainerControl,e: EventArgs)
Raises the System.Windows.Forms.ContainerControl.AutoValidateChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnBackColorChanged(self,*args):
"""
OnBackColorChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.BackColorChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnBackgroundImageChanged(self,*args):
"""
OnBackgroundImageChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.BackgroundImageChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnBackgroundImageLayoutChanged(self,*args):
"""
OnBackgroundImageLayoutChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.BackgroundImageLayoutChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnBindingContextChanged(self,*args):
"""
OnBindingContextChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.BindingContextChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnCausesValidationChanged(self,*args):
"""
OnCausesValidationChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.CausesValidationChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnChanged(self,*args):
"""
OnChanged(self: UpDownBase,source: object,e: EventArgs)
When overridden in a derived class,raises the Changed event.
source: The source of the event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnChangeUICues(self,*args):
"""
OnChangeUICues(self: Control,e: UICuesEventArgs)
Raises the System.Windows.Forms.Control.ChangeUICues event.
e: A System.Windows.Forms.UICuesEventArgs that contains the event data.
"""
pass
def OnClick(self,*args):
"""
OnClick(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.Click event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnClientSizeChanged(self,*args):
"""
OnClientSizeChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.ClientSizeChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnContextMenuChanged(self,*args):
"""
OnContextMenuChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.ContextMenuChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnContextMenuStripChanged(self,*args):
"""
OnContextMenuStripChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.ContextMenuStripChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnControlAdded(self,*args):
"""
OnControlAdded(self: Control,e: ControlEventArgs)
Raises the System.Windows.Forms.Control.ControlAdded event.
e: A System.Windows.Forms.ControlEventArgs that contains the event data.
"""
pass
def OnControlRemoved(self,*args):
"""
OnControlRemoved(self: Control,e: ControlEventArgs)
Raises the System.Windows.Forms.Control.ControlRemoved event.
e: A System.Windows.Forms.ControlEventArgs that contains the event data.
"""
pass
def OnCreateControl(self,*args):
""" OnCreateControl(self: ContainerControl) """
pass
def OnCursorChanged(self,*args):
"""
OnCursorChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.CursorChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnDockChanged(self,*args):
"""
OnDockChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.DockChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnDoubleClick(self,*args):
"""
OnDoubleClick(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.DoubleClick event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnDpiChangedAfterParent(self,*args):
""" OnDpiChangedAfterParent(self: Control,e: EventArgs) """
pass
def OnDpiChangedBeforeParent(self,*args):
""" OnDpiChangedBeforeParent(self: Control,e: EventArgs) """
pass
def OnDragDrop(self,*args):
"""
OnDragDrop(self: Control,drgevent: DragEventArgs)
Raises the System.Windows.Forms.Control.DragDrop event.
drgevent: A System.Windows.Forms.DragEventArgs that contains the event data.
"""
pass
def OnDragEnter(self,*args):
"""
OnDragEnter(self: Control,drgevent: DragEventArgs)
Raises the System.Windows.Forms.Control.DragEnter event.
drgevent: A System.Windows.Forms.DragEventArgs that contains the event data.
"""
pass
def OnDragLeave(self,*args):
"""
OnDragLeave(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.DragLeave event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnDragOver(self,*args):
"""
OnDragOver(self: Control,drgevent: DragEventArgs)
Raises the System.Windows.Forms.Control.DragOver event.
drgevent: A System.Windows.Forms.DragEventArgs that contains the event data.
"""
pass
def OnEnabledChanged(self,*args):
"""
OnEnabledChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.EnabledChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnEnter(self,*args):
"""
OnEnter(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.Enter event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnFontChanged(self,*args):
"""
OnFontChanged(self: UpDownBase,e: EventArgs)
Raises the System.Windows.Forms.Control.FontChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnForeColorChanged(self,*args):
"""
OnForeColorChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.ForeColorChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnGiveFeedback(self,*args):
"""
OnGiveFeedback(self: Control,gfbevent: GiveFeedbackEventArgs)
Raises the System.Windows.Forms.Control.GiveFeedback event.
gfbevent: A System.Windows.Forms.GiveFeedbackEventArgs that contains the event data.
"""
pass
def OnGotFocus(self,*args):
"""
OnGotFocus(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.GotFocus event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnHandleCreated(self,*args):
"""
OnHandleCreated(self: UpDownBase,e: EventArgs)
Raises the System.Windows.Forms.Control.HandleCreated event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnHandleDestroyed(self,*args):
"""
OnHandleDestroyed(self: UpDownBase,e: EventArgs)
Raises the System.Windows.Forms.Control.HandleDestroyed event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnHelpRequested(self,*args):
"""
OnHelpRequested(self: Control,hevent: HelpEventArgs)
Raises the System.Windows.Forms.Control.HelpRequested event.
hevent: A System.Windows.Forms.HelpEventArgs that contains the event data.
"""
pass
def OnImeModeChanged(self,*args):
|
def OnInvalidated(self,*args):
"""
OnInvalidated(self: Control,e: InvalidateEventArgs)
Raises the System.Windows.Forms.Control.Invalidated event.
e: An System.Windows.Forms.InvalidateEventArgs that contains the event data.
"""
pass
def OnKeyDown(self,*args):
"""
OnKeyDown(self: NumericUpDown,e: KeyEventArgs)
Raises the System.Windows.Forms.Control.KeyDown event.
e: A System.Windows.Forms.KeyEventArgs that contains the event data.
"""
pass
def OnKeyPress(self,*args):
"""
OnKeyPress(self: Control,e: KeyPressEventArgs)
Raises the System.Windows.Forms.Control.KeyPress event.
e: A System.Windows.Forms.KeyPressEventArgs that contains the event data.
"""
pass
def OnKeyUp(self,*args):
"""
OnKeyUp(self: NumericUpDown,e: KeyEventArgs)
Raises the System.Windows.Forms.Control.KeyUp event.
e: A System.Windows.Forms.KeyEventArgs that contains the event data.
"""
pass
def OnLayout(self,*args):
"""
OnLayout(self: UpDownBase,e: LayoutEventArgs)
Raises the System.Windows.Forms.Control.Layout event.
e: A System.Windows.Forms.LayoutEventArgs that contains the event data.
"""
pass
def OnLeave(self,*args):
"""
OnLeave(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.Leave event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnLocationChanged(self,*args):
"""
OnLocationChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.LocationChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnLostFocus(self,*args):
"""
OnLostFocus(self: NumericUpDown,e: EventArgs)
Raises the System.Windows.Forms.Control.LostFocus event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnMarginChanged(self,*args):
"""
OnMarginChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.MarginChanged event.
e: A System.EventArgs that contains the event data.
"""
pass
def OnMouseCaptureChanged(self,*args):
"""
OnMouseCaptureChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.MouseCaptureChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnMouseClick(self,*args):
"""
OnMouseClick(self: Control,e: MouseEventArgs)
Raises the System.Windows.Forms.Control.MouseClick event.
e: An System.Windows.Forms.MouseEventArgs that contains the event data.
"""
pass
def OnMouseDoubleClick(self,*args):
"""
OnMouseDoubleClick(self: Control,e: MouseEventArgs)
Raises the System.Windows.Forms.Control.MouseDoubleClick event.
e: An System.Windows.Forms.MouseEventArgs that contains the event data.
"""
pass
def OnMouseDown(self,*args):
"""
OnMouseDown(self: UpDownBase,e: MouseEventArgs)
Raises the System.Windows.Forms.Control.MouseDown event.
e: A System.Windows.Forms.MouseEventArgs that contains the event data.
"""
pass
def OnMouseEnter(self,*args):
"""
OnMouseEnter(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.MouseEnter event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnMouseHover(self,*args):
"""
OnMouseHover(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.MouseHover event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnMouseLeave(self,*args):
"""
OnMouseLeave(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.MouseLeave event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnMouseMove(self,*args):
"""
OnMouseMove(self: Control,e: MouseEventArgs)
Raises the System.Windows.Forms.Control.MouseMove event.
e: A System.Windows.Forms.MouseEventArgs that contains the event data.
"""
pass
def OnMouseUp(self,*args):
"""
OnMouseUp(self: UpDownBase,mevent: MouseEventArgs)
Raises the System.Windows.Forms.Control.MouseUp event.
mevent: A System.Windows.Forms.MouseEventArgs that contains the event data.
"""
pass
def OnMouseWheel(self,*args):
"""
OnMouseWheel(self: UpDownBase,e: MouseEventArgs)
Raises the System.Windows.Forms.Control.MouseWheel event.
e: A System.Windows.Forms.MouseEventArgs that contains the event data.
"""
pass
def OnMove(self,*args):
"""
OnMove(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.Move event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnNotifyMessage(self,*args):
"""
OnNotifyMessage(self: Control,m: Message)
Notifies the control of Windows messages.
m: A System.Windows.Forms.Message that represents the Windows message.
"""
pass
def OnPaddingChanged(self,*args):
"""
OnPaddingChanged(self: ScrollableControl,e: EventArgs)
Raises the System.Windows.Forms.Control.PaddingChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnPaint(self,*args):
"""
OnPaint(self: UpDownBase,e: PaintEventArgs)
Raises the System.Windows.Forms.Control.Paint event.
e: A System.Windows.Forms.PaintEventArgs that contains the event data.
"""
pass
def OnPaintBackground(self,*args):
"""
OnPaintBackground(self: ScrollableControl,e: PaintEventArgs)
Paints the background of the control.
e: A System.Windows.Forms.PaintEventArgs that contains the event data.
"""
pass
def OnParentBackColorChanged(self,*args):
"""
OnParentBackColorChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.BackColorChanged event when the
System.Windows.Forms.Control.BackColor property value of the control's container changes.
e: An System.EventArgs that contains the event data.
"""
pass
def OnParentBackgroundImageChanged(self,*args):
"""
OnParentBackgroundImageChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.BackgroundImageChanged event when the
System.Windows.Forms.Control.BackgroundImage property value of the control's container changes.
e: An System.EventArgs that contains the event data.
"""
pass
def OnParentBindingContextChanged(self,*args):
"""
OnParentBindingContextChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.BindingContextChanged event when the
System.Windows.Forms.Control.BindingContext property value of the control's container changes.
e: An System.EventArgs that contains the event data.
"""
pass
def OnParentChanged(self,*args):
"""
OnParentChanged(self: ContainerControl,e: EventArgs)
e: An System.EventArgs that contains the event data.
"""
pass
def OnParentCursorChanged(self,*args):
"""
OnParentCursorChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.CursorChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnParentEnabledChanged(self,*args):
"""
OnParentEnabledChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.EnabledChanged event when the
System.Windows.Forms.Control.Enabled property value of the control's container changes.
e: An System.EventArgs that contains the event data.
"""
pass
def OnParentFontChanged(self,*args):
"""
OnParentFontChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.FontChanged event when the
System.Windows.Forms.Control.Font property value of the control's container changes.
e: An System.EventArgs that contains the event data.
"""
pass
def OnParentForeColorChanged(self,*args):
"""
OnParentForeColorChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.ForeColorChanged event when the
System.Windows.Forms.Control.ForeColor property value of the control's container changes.
e: An System.EventArgs that contains the event data.
"""
pass
def OnParentRightToLeftChanged(self,*args):
"""
OnParentRightToLeftChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.RightToLeftChanged event when the
System.Windows.Forms.Control.RightToLeft property value of the control's container changes.
e: An System.EventArgs that contains the event data.
"""
pass
def OnParentVisibleChanged(self,*args):
"""
OnParentVisibleChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.VisibleChanged event when the
System.Windows.Forms.Control.Visible property value of the control's container changes.
e: An System.EventArgs that contains the event data.
"""
pass
def OnPreviewKeyDown(self,*args):
"""
OnPreviewKeyDown(self: Control,e: PreviewKeyDownEventArgs)
Raises the System.Windows.Forms.Control.PreviewKeyDown event.
e: A System.Windows.Forms.PreviewKeyDownEventArgs that contains the event data.
"""
pass
def OnPrint(self,*args):
"""
OnPrint(self: Control,e: PaintEventArgs)
Raises the System.Windows.Forms.Control.Paint event.
e: A System.Windows.Forms.PaintEventArgs that contains the event data.
"""
pass
def OnQueryContinueDrag(self,*args):
"""
OnQueryContinueDrag(self: Control,qcdevent: QueryContinueDragEventArgs)
Raises the System.Windows.Forms.Control.QueryContinueDrag event.
qcdevent: A System.Windows.Forms.QueryContinueDragEventArgs that contains the event data.
"""
pass
def OnRegionChanged(self,*args):
"""
OnRegionChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.RegionChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnResize(self,*args):
"""
OnResize(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.Resize event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnRightToLeftChanged(self,*args):
"""
OnRightToLeftChanged(self: ScrollableControl,e: EventArgs)
e: An System.EventArgs that contains the event data.
"""
pass
def OnScroll(self,*args):
"""
OnScroll(self: ScrollableControl,se: ScrollEventArgs)
Raises the System.Windows.Forms.ScrollableControl.Scroll event.
se: A System.Windows.Forms.ScrollEventArgs that contains the event data.
"""
pass
def OnSizeChanged(self,*args):
"""
OnSizeChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.SizeChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnStyleChanged(self,*args):
"""
OnStyleChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.StyleChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnSystemColorsChanged(self,*args):
"""
OnSystemColorsChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.SystemColorsChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnTabIndexChanged(self,*args):
"""
OnTabIndexChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.TabIndexChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnTabStopChanged(self,*args):
"""
OnTabStopChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.TabStopChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnTextBoxKeyDown(self,*args):
"""
OnTextBoxKeyDown(self: UpDownBase,source: object,e: KeyEventArgs)
Raises the System.Windows.Forms.Control.KeyDown event.
source: The source of the event.
e: A System.Windows.Forms.KeyEventArgs that contains the event data.
"""
pass
def OnTextBoxKeyPress(self,*args):
"""
OnTextBoxKeyPress(self: NumericUpDown,source: object,e: KeyPressEventArgs)
Raises the System.Windows.Forms.Control.KeyPress event.
source: The source of the event.
e: A System.Windows.Forms.KeyPressEventArgs that contains the event data.
"""
pass
def OnTextBoxLostFocus(self,*args):
"""
OnTextBoxLostFocus(self: UpDownBase,source: object,e: EventArgs)
Raises the System.Windows.Forms.Control.LostFocus event.
source: The source of the event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnTextBoxResize(self,*args):
"""
OnTextBoxResize(self: UpDownBase,source: object,e: EventArgs)
Raises the System.Windows.Forms.Control.Resize event.
source: The source of the event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnTextBoxTextChanged(self,*args):
"""
OnTextBoxTextChanged(self: UpDownBase,source: object,e: EventArgs)
Raises the System.Windows.Forms.Control.TextChanged event.
source: The source of the event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnTextChanged(self,*args):
"""
OnTextChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.TextChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnValidated(self,*args):
"""
OnValidated(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.Validated event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnValidating(self,*args):
"""
OnValidating(self: Control,e: CancelEventArgs)
Raises the System.Windows.Forms.Control.Validating event.
e: A System.ComponentModel.CancelEventArgs that contains the event data.
"""
pass
def OnValueChanged(self,*args):
"""
OnValueChanged(self: NumericUpDown,e: EventArgs)
Raises the System.Windows.Forms.NumericUpDown.ValueChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnVisibleChanged(self,*args):
"""
OnVisibleChanged(self: ScrollableControl,e: EventArgs)
e: An System.EventArgs that contains the event data.
"""
pass
def ParseEditText(self,*args):
"""
ParseEditText(self: NumericUpDown)
Converts the text displayed in the spin box (also known as an up-down control) to a numeric
value and evaluates it.
"""
pass
def ProcessCmdKey(self,*args):
"""
ProcessCmdKey(self: ContainerControl,msg: Message,keyData: Keys) -> (bool,Message)
msg: A System.Windows.Forms.Message,passed by reference,that represents the window message to
process.
keyData: One of the System.Windows.Forms.Keys values that represents the key to process.
Returns: true if the character was processed by the control; otherwise,false.
"""
pass
def ProcessDialogChar(self,*args):
"""
ProcessDialogChar(self: ContainerControl,charCode: Char) -> bool
charCode: The character to process.
Returns: true if the character was processed by the control; otherwise,false.
"""
pass
def ProcessDialogKey(self,*args):
"""
ProcessDialogKey(self: ContainerControl,keyData: Keys) -> bool
keyData: One of the System.Windows.Forms.Keys values that represents the key to process.
Returns: true if the key was processed by the control; otherwise,false.
"""
pass
def ProcessKeyEventArgs(self,*args):
"""
ProcessKeyEventArgs(self: Control,m: Message) -> (bool,Message)
Processes a key message and generates the appropriate control events.
m: A System.Windows.Forms.Message,passed by reference,that represents the window message to
process.
Returns: true if the message was processed by the control; otherwise,false.
"""
pass
def ProcessKeyMessage(self,*args):
"""
ProcessKeyMessage(self: Control,m: Message) -> (bool,Message)
Processes a keyboard message.
m: A System.Windows.Forms.Message,passed by reference,that represents the window message to
process.
Returns: true if the message was processed by the control; otherwise,false.
"""
pass
def ProcessKeyPreview(self,*args):
"""
ProcessKeyPreview(self: Control,m: Message) -> (bool,Message)
Previews a keyboard message.
m: A System.Windows.Forms.Message,passed by reference,that represents the window message to
process.
Returns: true if the message was processed by the control; otherwise,false.
"""
pass
def ProcessMnemonic(self,*args):
"""
ProcessMnemonic(self: ContainerControl,charCode: Char) -> bool
charCode: The character to process.
Returns: true if the character was processed as a mnemonic by the control; otherwise,false.
"""
pass
def ProcessTabKey(self,*args):
"""
ProcessTabKey(self: ContainerControl,forward: bool) -> bool
Selects the next available control and makes it the active control.
forward: true to cycle forward through the controls in the System.Windows.Forms.ContainerControl;
otherwise,false.
Returns: true if a control is selected; otherwise,false.
"""
pass
def RaiseDragEvent(self,*args):
"""
RaiseDragEvent(self: Control,key: object,e: DragEventArgs)
Raises the appropriate drag event.
key: The event to raise.
e: A System.Windows.Forms.DragEventArgs that contains the event data.
"""
pass
def RaiseKeyEvent(self,*args):
"""
RaiseKeyEvent(self: Control,key: object,e: KeyEventArgs)
Raises the appropriate key event.
key: The event to raise.
e: A System.Windows.Forms.KeyEventArgs that contains the event data.
"""
pass
def RaiseMouseEvent(self,*args):
"""
RaiseMouseEvent(self: Control,key: object,e: MouseEventArgs)
Raises the appropriate mouse event.
key: The event to raise.
e: A System.Windows.Forms.MouseEventArgs that contains the event data.
"""
pass
def RaisePaintEvent(self,*args):
"""
RaisePaintEvent(self: Control,key: object,e: PaintEventArgs)
Raises the appropriate paint event.
key: The event to raise.
e: A System.Windows.Forms.PaintEventArgs that contains the event data.
"""
pass
def RecreateHandle(self,*args):
"""
RecreateHandle(self: Control)
Forces the re-creation of the handle for the control.
"""
pass
def RescaleConstantsForDpi(self,*args):
""" RescaleConstantsForDpi(self: UpDownBase,deviceDpiOld: int,deviceDpiNew: int) """
pass
def ResetMouseEventArgs(self,*args):
"""
ResetMouseEventArgs(self: Control)
Resets the control to handle the System.Windows.Forms.Control.MouseLeave event.
"""
pass
def RtlTranslateAlignment(self,*args):
"""
RtlTranslateAlignment(self: Control,align: ContentAlignment) -> ContentAlignment
Converts the specified System.Drawing.ContentAlignment to the appropriate
System.Drawing.ContentAlignment to support right-to-left text.
align: One of the System.Drawing.ContentAlignment values.
Returns: One of the System.Drawing.ContentAlignment values.
RtlTranslateAlignment(self: Control,align: LeftRightAlignment) -> LeftRightAlignment
Converts the specified System.Windows.Forms.LeftRightAlignment to the appropriate
System.Windows.Forms.LeftRightAlignment to support right-to-left text.
align: One of the System.Windows.Forms.LeftRightAlignment values.
Returns: One of the System.Windows.Forms.LeftRightAlignment values.
RtlTranslateAlignment(self: Control,align: HorizontalAlignment) -> HorizontalAlignment
Converts the specified System.Windows.Forms.HorizontalAlignment to the appropriate
System.Windows.Forms.HorizontalAlignment to support right-to-left text.
align: One of the System.Windows.Forms.HorizontalAlignment values.
Returns: One of the System.Windows.Forms.HorizontalAlignment values.
"""
pass
def RtlTranslateContent(self,*args):
"""
RtlTranslateContent(self: Control,align: ContentAlignment) -> ContentAlignment
Converts the specified System.Drawing.ContentAlignment to the appropriate
System.Drawing.ContentAlignment to support right-to-left text.
align: One of the System.Drawing.ContentAlignment values.
Returns: One of the System.Drawing.ContentAlignment values.
"""
pass
def RtlTranslateHorizontal(self,*args):
"""
RtlTranslateHorizontal(self: Control,align: HorizontalAlignment) -> HorizontalAlignment
Converts the specified System.Windows.Forms.HorizontalAlignment to the appropriate
System.Windows.Forms.HorizontalAlignment to support right-to-left text.
align: One of the System.Windows.Forms.HorizontalAlignment values.
Returns: One of the System.Windows.Forms.HorizontalAlignment values.
"""
pass
def RtlTranslateLeftRight(self,*args):
"""
RtlTranslateLeftRight(self: Control,align: LeftRightAlignment) -> LeftRightAlignment
Converts the specified System.Windows.Forms.LeftRightAlignment to the appropriate
System.Windows.Forms.LeftRightAlignment to support right-to-left text.
align: One of the System.Windows.Forms.LeftRightAlignment values.
Returns: One of the System.Windows.Forms.LeftRightAlignment values.
"""
pass
def ScaleControl(self,*args):
"""
ScaleControl(self: ScrollableControl,factor: SizeF,specified: BoundsSpecified)
factor: The factor by which the height and width of the control will be scaled.
specified: A System.Windows.Forms.BoundsSpecified value that specifies the bounds of the control to use
when defining its size and position.
"""
pass
def ScaleCore(self,*args):
"""
ScaleCore(self: ScrollableControl,dx: Single,dy: Single)
dx: The horizontal scaling factor.
dy: The vertical scaling factor.
"""
pass
def ScrollToControl(self,*args):
"""
ScrollToControl(self: ScrollableControl,activeControl: Control) -> Point
Calculates the scroll offset to the specified child control.
activeControl: The child control to scroll into view.
Returns: The upper-left hand System.Drawing.Point of the display area relative to the client area
required to scroll the control into view.
"""
pass
def Select(self,start=None,length=None):
"""
Select(self: ContainerControl,directed: bool,forward: bool)
directed: true to specify the direction of the control to select; otherwise,false.
forward: true to move forward in the tab order; false to move backward in the tab order.
"""
pass
def SetAutoSizeMode(self,*args):
"""
SetAutoSizeMode(self: Control,mode: AutoSizeMode)
Sets a value indicating how a control will behave when its System.Windows.Forms.Control.AutoSize
property is enabled.
mode: One of the System.Windows.Forms.AutoSizeMode values.
"""
pass
def SetBoundsCore(self,*args):
"""
SetBoundsCore(self: Control,x: int,y: int,width: int,height: int,specified: BoundsSpecified)
Performs the work of setting the specified bounds of this control.
x: The new System.Windows.Forms.Control.Left property value of the control.
y: The new System.Windows.Forms.Control.Top property value of the control.
width: The new System.Windows.Forms.Control.Width property value of the control.
height: The new System.Windows.Forms.Control.Height property value of the control.
specified: A bitwise combination of the System.Windows.Forms.BoundsSpecified values.
"""
pass
def SetClientSizeCore(self,*args):
"""
SetClientSizeCore(self: Control,x: int,y: int)
Sets the size of the client area of the control.
x: The client area width,in pixels.
y: The client area height,in pixels.
"""
pass
def SetDisplayRectLocation(self,*args):
"""
SetDisplayRectLocation(self: ScrollableControl,x: int,y: int)
Positions the display window to the specified value.
x: The horizontal offset at which to position the System.Windows.Forms.ScrollableControl.
y: The vertical offset at which to position the System.Windows.Forms.ScrollableControl.
"""
pass
def SetScrollState(self,*args):
"""
SetScrollState(self: ScrollableControl,bit: int,value: bool)
Sets the specified scroll state flag.
bit: The scroll state flag to set.
value: The value to set the flag.
"""
pass
def SetStyle(self,*args):
"""
SetStyle(self: Control,flag: ControlStyles,value: bool)
Sets a specified System.Windows.Forms.ControlStyles flag to either true or false.
flag: The System.Windows.Forms.ControlStyles bit to set.
value: true to apply the specified style to the control; otherwise,false.
"""
pass
def SetTopLevel(self,*args):
"""
SetTopLevel(self: Control,value: bool)
Sets the control as the top-level control.
value: true to set the control as the top-level control; otherwise,false.
"""
pass
def SetVisibleCore(self,*args):
"""
SetVisibleCore(self: Control,value: bool)
Sets the control to the specified visible state.
value: true to make the control visible; otherwise,false.
"""
pass
def SizeFromClientSize(self,*args):
"""
SizeFromClientSize(self: Control,clientSize: Size) -> Size
Determines the size of the entire control from the height and width of its client area.
clientSize: A System.Drawing.Size value representing the height and width of the control's client area.
Returns: A System.Drawing.Size value representing the height and width of the entire control.
"""
pass
def ToString(self):
"""
ToString(self: NumericUpDown) -> str
Returns a string that represents the System.Windows.Forms.NumericUpDown control.
Returns: A string that represents the current System.Windows.Forms.NumericUpDown.
"""
pass
def UpButton(self):
"""
UpButton(self: NumericUpDown)
Increments the value of the spin box (also known as an up-down control).
"""
pass
def UpdateBounds(self,*args):
"""
UpdateBounds(self: Control,x: int,y: int,width: int,height: int,clientWidth: int,clientHeight: int)
Updates the bounds of the control with the specified size,location,and client size.
x: The System.Drawing.Point.X coordinate of the control.
y: The System.Drawing.Point.Y coordinate of the control.
width: The System.Drawing.Size.Width of the control.
height: The System.Drawing.Size.Height of the control.
clientWidth: The client System.Drawing.Size.Width of the control.
clientHeight: The client System.Drawing.Size.Height of the control.
UpdateBounds(self: Control,x: int,y: int,width: int,height: int)
Updates the bounds of the control with the specified size and location.
x: The System.Drawing.Point.X coordinate of the control.
y: The System.Drawing.Point.Y coordinate of the control.
width: The System.Drawing.Size.Width of the control.
height: The System.Drawing.Size.Height of the control.
UpdateBounds(self: Control)
Updates the bounds of the control with the current size and location.
"""
pass
def UpdateDefaultButton(self,*args):
"""
UpdateDefaultButton(self: ContainerControl)
When overridden by a derived class,updates which button is the default button.
"""
pass
def UpdateEditText(self,*args):
"""
UpdateEditText(self: NumericUpDown)
Displays the current value of the spin box (also known as an up-down control) in the appropriate
format.
"""
pass
def UpdateStyles(self,*args):
"""
UpdateStyles(self: Control)
Forces the assigned styles to be reapplied to the control.
"""
pass
def UpdateZOrder(self,*args):
"""
UpdateZOrder(self: Control)
Updates the control in its parent's z-order.
"""
pass
def ValidateEditText(self,*args):
"""
ValidateEditText(self: NumericUpDown)
Validates and updates the text displayed in the spin box (also known as an up-down control).
"""
pass
def WndProc(self,*args):
"""
WndProc(self: UpDownBase,m: Message) -> Message
Processes Windows messages.
m: The Windows System.Windows.Forms.Message to process.
"""
pass
def __enter__(self,*args):
"""
__enter__(self: IDisposable) -> object
Provides the implementation of __enter__ for objects which implement IDisposable.
"""
pass
def __exit__(self,*args):
"""
__exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object)
Provides the implementation of __exit__ for objects which implement IDisposable.
"""
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __str__(self,*args):
pass
Accelerations=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a collection of sorted acceleration objects for the System.Windows.Forms.NumericUpDown control.
Get: Accelerations(self: NumericUpDown) -> NumericUpDownAccelerationCollection
"""
AutoScaleFactor=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the scaling factor between the current and design-time automatic scaling dimensions.
"""
CanEnableIme=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value indicating whether the System.Windows.Forms.Control.ImeMode property can be set to an active value,to enable IME support.
"""
CanRaiseEvents=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Determines if events can be raised on the control.
"""
ChangingText=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value indicating whether the text property is being changed internally by its parent class.
"""
CreateParams=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the required creation parameters when the control handle is created.
"""
DecimalPlaces=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the number of decimal places to display in the spin box (also known as an up-down control).
Get: DecimalPlaces(self: NumericUpDown) -> int
Set: DecimalPlaces(self: NumericUpDown)=value
"""
DefaultCursor=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the default cursor for the control.
"""
DefaultImeMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the default Input Method Editor (IME) mode supported by the control.
"""
DefaultMargin=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the space,in pixels,that is specified by default between controls.
"""
DefaultMaximumSize=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the length and height,in pixels,that is specified as the default maximum size of a control.
"""
DefaultMinimumSize=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the length and height,in pixels,that is specified as the default minimum size of a control.
"""
DefaultPadding=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the internal spacing,in pixels,of the contents of a control.
"""
DefaultSize=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the default size of the control.
"""
DesignMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value that indicates whether the System.ComponentModel.Component is currently in design mode.
"""
DoubleBuffered=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value indicating whether this control should redraw its surface using a secondary buffer to reduce or prevent flicker.
"""
Events=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the list of event handlers that are attached to this System.ComponentModel.Component.
"""
FontHeight=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the height of the font of the control.
"""
Hexadecimal=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value indicating whether the spin box (also known as an up-down control) should display the value it contains in hexadecimal format.
Get: Hexadecimal(self: NumericUpDown) -> bool
Set: Hexadecimal(self: NumericUpDown)=value
"""
HScroll=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value indicating whether the horizontal scroll bar is visible.
"""
ImeModeBase=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the IME mode of a control.
"""
Increment=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the value to increment or decrement the spin box (also known as an up-down control) when the up or down buttons are clicked.
Get: Increment(self: NumericUpDown) -> Decimal
Set: Increment(self: NumericUpDown)=value
"""
Maximum=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the maximum value for the spin box (also known as an up-down control).
Get: Maximum(self: NumericUpDown) -> Decimal
Set: Maximum(self: NumericUpDown)=value
"""
Minimum=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the minimum allowed value for the spin box (also known as an up-down control).
Get: Minimum(self: NumericUpDown) -> Decimal
Set: Minimum(self: NumericUpDown)=value
"""
Padding=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the space between the edges of a System.Windows.Forms.NumericUpDown control and its contents.
Get: Padding(self: NumericUpDown) -> Padding
Set: Padding(self: NumericUpDown)=value
"""
RenderRightToLeft=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""This property is now obsolete.
"""
ResizeRedraw=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value indicating whether the control redraws itself when resized.
"""
ScaleChildren=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value that determines the scaling of child controls.
"""
ShowFocusCues=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value indicating whether the control should display focus rectangles.
"""
ShowKeyboardCues=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value indicating whether the user interface is in the appropriate state to show or hide keyboard accelerators.
"""
Text=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the text to be displayed in the System.Windows.Forms.NumericUpDown control.
Get: Text(self: NumericUpDown) -> str
Set: Text(self: NumericUpDown)=value
"""
ThousandsSeparator=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value indicating whether a thousands separator is displayed in the spin box (also known as an up-down control) when appropriate.
Get: ThousandsSeparator(self: NumericUpDown) -> bool
Set: ThousandsSeparator(self: NumericUpDown)=value
"""
UserEdit=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value indicating whether a value has been entered by the user.
"""
Value=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the value assigned to the spin box (also known as an up-down control).
Get: Value(self: NumericUpDown) -> Decimal
Set: Value(self: NumericUpDown)=value
"""
VScroll=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value indicating whether the vertical scroll bar is visible.
"""
PaddingChanged=None
TextChanged=None
ValueChanged=None
| """
OnImeModeChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.ImeModeChanged event.
e: An System.EventArgs that contains the event data.
"""
pass |
scheduler.py | # -*- coding: utf-8 -*-
"""Implementation of the Scheduler interface. This implementation only
supports sending yos"""
# Pylint rules regarding variable names that are not in PEP8.
# https://www.python.org/dev/peps/pep-0008/#global-variable-names
# pylint: disable=invalid-name
# Scheduled task manager
import sys
from flask import current_app
from ..extensions.scheduler import Scheduler
from yoapi.contacts import get_contact_usernames, get_subscriptions
from ..yos.helpers import construct_auto_follow_yo, construct_first_yo, construct_yo
from ..yos.queries import clear_get_yo_cache
from ..helpers import get_usec_timestamp, make_json_response
from ..models import Yo, Header
GRACE_PERIOD = 3e8
YO_JOB_TYPE = 'yo'
yo_scheduler = Scheduler('scheduled_for', grace_period=GRACE_PERIOD)
@yo_scheduler.execute_job_handler(job_type=YO_JOB_TYPE)
def send_scheduled_yo(yo):
"""Send a scheduled yo"""
from ..yos.send import _send_yo
yo.status = 'started'
yo.save()
clear_get_yo_cache(yo.yo_id)
if yo.header and str(yo.header.id) == '55c1035f6461740061000027':
if len(get_contact_usernames(yo.recipient)) + len(get_subscriptions(yo.recipient)) == 0:
yo_scheduler.become(yo.sender)
_send_yo.delay(yo_id=yo.yo_id)
else:
pass
else:
yo_scheduler.become(yo.sender)
_send_yo.delay(yo_id=yo.yo_id)
@yo_scheduler.get_scheduled_jobs_handler(job_type=YO_JOB_TYPE)
def get_scheduled_jobs():
"""Gets yos scheduled between grace period start and now"""
schedule_name = yo_scheduler.app.config.get('SCHEDULE_NAME')
usec_now = get_usec_timestamp()
cutoff_usec = usec_now - GRACE_PERIOD
query = Yo.objects(scheduled_for__exists=True,
scheduled_for__lte=usec_now,
scheduled_for__gte=cutoff_usec,
schedule_name=schedule_name,
status='scheduled')
return query.order_by('scheduled_for')
@yo_scheduler.get_execute_delay_handler(job_type=YO_JOB_TYPE)
def get_time_until_next_jobs():
"""Returns the number of microseconds until next Yo"""
schedule_name = yo_scheduler.app.config.get('SCHEDULE_NAME')
usec_now = get_usec_timestamp()
cutoff_usec = usec_now - GRACE_PERIOD
query = Yo.objects(scheduled_for__exists=True,
scheduled_for__gte=cutoff_usec,
schedule_name=schedule_name,
status='scheduled')
return query.order_by('scheduled_for')
@yo_scheduler.failed_job_handler(job_type=YO_JOB_TYPE)
def handle_failed_job(yo):
yo.status='failed'
yo.save()
clear_get_yo_cache(yo.yo_id)
@yo_scheduler.new_job_handler(job_type=YO_JOB_TYPE)
def handle_new_scheduled_yo(yo):
"""log new yos as they are announced"""
with yo_scheduler._make_context():
yo_scheduler.app.process_response(
make_json_response(**yo))
def schedule_yo(yo, scheduled_for=None, schedule_name=None):
"""schedule a yo for a specific time"""
if not schedule_name:
schedule_name = current_app.config.get('SCHEDULE_NAME')
if not scheduled_for:
scheduled_for = get_usec_timestamp()
yo.reload()
yo.status = 'scheduled'
yo.scheduled_for = scheduled_for
yo.schedule_name = schedule_name
yo.save()
yo_scheduler.announce_new(yo, YO_JOB_TYPE)
def schedule_auto_follow_yo(user, auto_follow_user):
auto_follow_yo = construct_auto_follow_yo(user, auto_follow_user)
if auto_follow_yo:
# 10 seconds from now
auto_follow_delay = current_app.config.get('AUTO_FOLLOW_DELAY')
auto_follow_delay = auto_follow_delay*1e6 + get_usec_timestamp()
schedule_yo(auto_follow_yo, auto_follow_delay)
def schedule_first_yo(user, first_yo_from):
yo_link, yo_location = construct_first_yo(user, first_yo_from)
first_yo_delay = current_app.config.get('FIRST_YO_DELAY')
first_yo_delay = first_yo_delay.replace(' ', '').split(',')
first_yo_link_delay = int(first_yo_delay[0])*1e6
first_yo_location_delay = int(first_yo_delay[1])*1e6
# If the delay is equal to 0 assume it is disabled
if first_yo_location_delay:
first_yo_location_delay += get_usec_timestamp()
schedule_yo(yo_location, first_yo_location_delay)
if first_yo_link_delay:
first_yo_link_delay += get_usec_timestamp()
schedule_yo(yo_link, first_yo_link_delay)
def schedule_no_contacts_yo(user, first_yo_from):
try: |
yo_link = construct_yo(sender=first_yo_from, recipients=[user],
link='https://index.justyo.co', ignore_permission=True,
header=header, link_content_type='text/html')
hour = int(60 * 60)*1e6
delay = hour + get_usec_timestamp()
schedule_yo(yo_link, delay)
except:
current_app.log_exception(sys.exc_info()) | header = Header.objects.get(id='55c1035f6461740061000027') |
componentstatus.go | /*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// This file was automatically generated by informer-gen with arguments: --input-dirs=[k8s.io/kubernetes/pkg/api,k8s.io/kubernetes/pkg/api/v1,k8s.io/kubernetes/pkg/apis/abac,k8s.io/kubernetes/pkg/apis/abac/v0,k8s.io/kubernetes/pkg/apis/abac/v1beta1,k8s.io/kubernetes/pkg/apis/apps,k8s.io/kubernetes/pkg/apis/apps/v1beta1,k8s.io/kubernetes/pkg/apis/authentication,k8s.io/kubernetes/pkg/apis/authentication/v1beta1,k8s.io/kubernetes/pkg/apis/authorization,k8s.io/kubernetes/pkg/apis/authorization/v1beta1,k8s.io/kubernetes/pkg/apis/autoscaling,k8s.io/kubernetes/pkg/apis/autoscaling/v1,k8s.io/kubernetes/pkg/apis/batch,k8s.io/kubernetes/pkg/apis/batch/v1,k8s.io/kubernetes/pkg/apis/batch/v2alpha1,k8s.io/kubernetes/pkg/apis/certificates,k8s.io/kubernetes/pkg/apis/certificates/v1alpha1,k8s.io/kubernetes/pkg/apis/componentconfig,k8s.io/kubernetes/pkg/apis/componentconfig/v1alpha1,k8s.io/kubernetes/pkg/apis/extensions,k8s.io/kubernetes/pkg/apis/extensions/v1beta1,k8s.io/kubernetes/pkg/apis/imagepolicy,k8s.io/kubernetes/pkg/apis/imagepolicy/v1alpha1,k8s.io/kubernetes/pkg/apis/meta/v1,k8s.io/kubernetes/pkg/apis/policy,k8s.io/kubernetes/pkg/apis/policy/v1beta1,k8s.io/kubernetes/pkg/apis/rbac,k8s.io/kubernetes/pkg/apis/rbac/v1alpha1,k8s.io/kubernetes/pkg/apis/storage,k8s.io/kubernetes/pkg/apis/storage/v1beta1] --internal-clientset-package=k8s.io/kubernetes/pkg/client/clientset_generated/internalclientset --listers-package=k8s.io/kubernetes/pkg/client/listers --versioned-clientset-package=k8s.io/kubernetes/pkg/client/clientset_generated/release_1_5
package v1
import (
api_v1 "k8s.io/kubernetes/pkg/api/v1"
cache "k8s.io/kubernetes/pkg/client/cache"
release_1_5 "k8s.io/kubernetes/pkg/client/clientset_generated/release_1_5"
interfaces "k8s.io/kubernetes/pkg/client/informers/interfaces"
v1 "k8s.io/kubernetes/pkg/client/listers/core/v1"
runtime "k8s.io/kubernetes/pkg/runtime"
watch "k8s.io/kubernetes/pkg/watch"
time "time"
)
// ComponentStatusInformer provides access to a shared informer and lister for
// ComponentStatuses.
type ComponentStatusInformer interface {
Informer() cache.SharedIndexInformer
Lister() v1.ComponentStatusLister
}
type componentStatusInformer struct {
factory interfaces.SharedInformerFactory
}
func | (client release_1_5.Interface, resyncPeriod time.Duration) cache.SharedIndexInformer {
sharedIndexInformer := cache.NewSharedIndexInformer(
&cache.ListWatch{
ListFunc: func(options api_v1.ListOptions) (runtime.Object, error) {
return client.CoreV1().ComponentStatuses().List(options)
},
WatchFunc: func(options api_v1.ListOptions) (watch.Interface, error) {
return client.CoreV1().ComponentStatuses().Watch(options)
},
},
&api_v1.ComponentStatus{},
resyncPeriod,
cache.Indexers{cache.NamespaceIndex: cache.MetaNamespaceIndexFunc},
)
return sharedIndexInformer
}
func (f *componentStatusInformer) Informer() cache.SharedIndexInformer {
return f.factory.VersionedInformerFor(&api_v1.ComponentStatus{}, newComponentStatusInformer)
}
func (f *componentStatusInformer) Lister() v1.ComponentStatusLister {
return v1.NewComponentStatusLister(f.Informer().GetIndexer())
}
| newComponentStatusInformer |
echo.go | package server
// EchoCmd - echo back to the client
type EchoCmd struct{} | }
func (cmd *EchoCmd) destroy() {}
func (cmd *EchoCmd) handle(ctx CmdContext) error {
_, err := ctx.write([]byte("OK " + ctx.cmd[5:] + "\r\n"))
return err
} |
func init() {
addHandler("ECHO ", &EchoCmd{}) |
getImage.go | // *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package digitalocean
import (
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
// Get information on an image for use in other resources (e.g. creating a Droplet
// based on snapshot). This data source provides all of the image properties as
// configured on your DigitalOcean account. This is useful if the image in question
// is not managed by the provider or you need to utilize any of the image's data.
//
// An error is triggered if zero or more than one result is returned by the query.
//
// ## Example Usage
//
// Get the data about a snapshot:
//
// ```go
// package main
//
// import (
// "github.com/pulumi/pulumi-digitalocean/sdk/v4/go/digitalocean"
// "github.com/pulumi/pulumi/sdk/v3/go/pulumi"
// )
//
// func main() {
// pulumi.Run(func(ctx *pulumi.Context) error {
// opt0 := "example-1.0.0"
// _, err := digitalocean.GetImage(ctx, &digitalocean.GetImageArgs{
// Name: &opt0,
// }, nil)
// if err != nil {
// return err | // }
// return nil
// })
// }
// ```
//
// Reuse the data about a snapshot to create a Droplet:
//
// ```go
// package main
//
// import (
// "github.com/pulumi/pulumi-digitalocean/sdk/v4/go/digitalocean"
// "github.com/pulumi/pulumi/sdk/v3/go/pulumi"
// )
//
// func main() {
// pulumi.Run(func(ctx *pulumi.Context) error {
// opt0 := "example-1.0.0"
// exampleImage, err := digitalocean.GetImage(ctx, &digitalocean.GetImageArgs{
// Name: &opt0,
// }, nil)
// if err != nil {
// return err
// }
// _, err = digitalocean.NewDroplet(ctx, "exampleDroplet", &digitalocean.DropletArgs{
// Image: pulumi.Int(exampleImage.Id),
// Region: pulumi.String("nyc2"),
// Size: pulumi.String("s-1vcpu-1gb"),
// })
// if err != nil {
// return err
// }
// return nil
// })
// }
// ```
//
// Get the data about an official image:
//
// ```go
// package main
//
// import (
// "github.com/pulumi/pulumi-digitalocean/sdk/v4/go/digitalocean"
// "github.com/pulumi/pulumi/sdk/v3/go/pulumi"
// )
//
// func main() {
// pulumi.Run(func(ctx *pulumi.Context) error {
// opt0 := "ubuntu-18-04-x64"
// _, err := digitalocean.GetImage(ctx, &digitalocean.GetImageArgs{
// Slug: &opt0,
// }, nil)
// if err != nil {
// return err
// }
// return nil
// })
// }
// ```
func GetImage(ctx *pulumi.Context, args *GetImageArgs, opts ...pulumi.InvokeOption) (*GetImageResult, error) {
var rv GetImageResult
err := ctx.Invoke("digitalocean:index/getImage:getImage", args, &rv, opts...)
if err != nil {
return nil, err
}
return &rv, nil
}
// A collection of arguments for invoking getImage.
type GetImageArgs struct {
// The id of the image
Id *int `pulumi:"id"`
// The name of the image.
Name *string `pulumi:"name"`
// The slug of the official image.
Slug *string `pulumi:"slug"`
// Restrict the search to one of the following categories of images:
Source *string `pulumi:"source"`
}
// A collection of values returned by getImage.
type GetImageResult struct {
Created string `pulumi:"created"`
Description string `pulumi:"description"`
// The name of the distribution of the OS of the image.
// * `minDiskSize`: The minimum 'disk' required for the image.
// * `sizeGigabytes`: The size of the image in GB.
Distribution string `pulumi:"distribution"`
ErrorMessage string `pulumi:"errorMessage"`
Id int `pulumi:"id"`
// The id of the image (legacy parameter).
Image string `pulumi:"image"`
MinDiskSize int `pulumi:"minDiskSize"`
Name string `pulumi:"name"`
// Is image a public image or not. Public images represent
// Linux distributions or One-Click Applications, while non-public images represent
// snapshots and backups and are only available within your account.
// * `regions`: A set of the regions that the image is available in.
// * `tags`: A set of tags applied to the image
// * `created`: When the image was created
// * `status`: Current status of the image
// * `errorMessage`: Any applicable error message pertaining to the image
Private bool `pulumi:"private"`
Regions []string `pulumi:"regions"`
SizeGigabytes float64 `pulumi:"sizeGigabytes"`
Slug string `pulumi:"slug"`
Source *string `pulumi:"source"`
Status string `pulumi:"status"`
Tags []string `pulumi:"tags"`
Type string `pulumi:"type"`
} | |
JsonReact.test.tsx | import JsonReact from '../src';
import * as ReactDOMServer from 'react-dom/server';
import * as trenderer from 'react-test-renderer';
import * as A from '../examples/components/A';
import * as B from '../examples/components/B';
JsonReact.RegisterComponent(A);
JsonReact.RegisterComponent(B);
describe('success render with json', () => {
it('success render null as null', () => {
const renderer = new JsonReact();
const el = renderer.render();
expect(
ReactDOMServer.renderToString(el)
).toEqual('');
});
it('render a array type of json', () => {
const renderer = new JsonReact();
const el = renderer.render([{
type: 'div',
props: { style: { left: 20 } }
}, {
type: 'div',
props: { style: { left: 50 } }
}])
const tree = trenderer.create(el).toJSON();
expect(tree).toMatchSnapshot();
})
it('render div success', () => {
const renderer = new JsonReact();
const el = renderer.render({
type: 'div',
props: { style: { left: 20 } }
})
const tree = trenderer.create(el).toJSON();
expect(tree).toMatchSnapshot();
})
})
describe('render custom component', () => {
const renderer = new JsonReact();
const el = renderer.render([{
type: 'div',
props: { style: { left: 20 } },
children: [
'WelCome ',
'To ', | 'JsonReact'
]
}, {
type: 'A',
props: { style: { color: 'white', background: 'red' } },
events: [{
event: A.eventKeys.ACLICK.key,
reducer: B.actionKeys.BCLICKED.key,
}]
}, {
type: 'B',
}]);
it('success render custom component', () => {
const tree = trenderer.create(el).toJSON();
expect(tree).toMatchSnapshot();
})
it('change rendere result when dispatch event', () => {
renderer.store.dispatch({
type: B.actionKeys.BCLICKED.key
})
const tree = trenderer.create(el).toJSON();
expect(tree).toMatchSnapshot();
})
}) | |
utxo_list.py | #!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2015 Thomas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from .util import *
from electroncash.i18n import _
class UTXOList(MyTreeWidget):
filter_columns = [0, 2] # Address, Label
def __init__(self, parent=None):
MyTreeWidget.__init__(self, parent, self.create_menu, [ _('Address'), _('Label'), _('Amount'), _('Height'), _('Output point')], 1)
self.setSelectionMode(QAbstractItemView.ExtendedSelection)
self.setSortingEnabled(True)
# force attributes to always be defined, even if None, at construction.
self.wallet = self.parent.wallet if hasattr(self.parent, 'wallet') else None
self.utxos = list()
def get_name(self, x):
return x.get('prevout_hash') + ":%d"%x.get('prevout_n')
@rate_limited(1.0) # performance tweak -- limit updates to no more than oncer per second
def update(self):
if self.wallet and self.wallet.thread and not self.wallet.thread.isRunning():
# short-cut return if window was closed and wallet is stopped
return
super().update()
def on_update(self):
prev_selection = self.get_selected() # cache previous selection, if any
self.clear()
self.wallet = self.parent.wallet
if not self.wallet: return
self.utxos = self.wallet.get_utxos()
for x in self.utxos:
address = x['address']
address_text = address.to_ui_string()
height = x['height']
name = self.get_name(x)
label = self.wallet.get_label(x['prevout_hash'])
amount = self.parent.format_amount(x['value'])
utxo_item = SortableTreeWidgetItem([address_text, label, amount,
str(height),
name[0:10] + '...' + name[-2:]])
utxo_item.DataRole = Qt.UserRole+100 # set this here to avoid sorting based on Qt.UserRole+1
utxo_item.setFont(0, QFont(MONOSPACE_FONT))
utxo_item.setFont(4, QFont(MONOSPACE_FONT))
utxo_item.setData(0, Qt.UserRole, name)
a_frozen = self.wallet.is_frozen(address)
c_frozen = x['is_frozen_coin']
if a_frozen and not c_frozen:
# address is frozen, coin is not frozen
# emulate the "Look" off the address_list .py's frozen entry
utxo_item.setBackground(0, QColor('lightblue'))
elif c_frozen and not a_frozen:
# coin is frozen, address is not frozen
utxo_item.setBackground(0, ColorScheme.BLUE.as_color(True))
elif c_frozen and a_frozen:
# both coin and address are frozen so color-code it to indicate that.
utxo_item.setBackground(0, QColor('lightblue'))
utxo_item.setForeground(0, QColor('#3399ff'))
# save the address-level-frozen and coin-level-frozen flags to the data item for retrieval later in create_menu() below.
utxo_item.setData(0, Qt.UserRole+1, "{}{}".format(("a" if a_frozen else ""), ("c" if c_frozen else "")))
self.addChild(utxo_item)
if name in prev_selection:
# NB: This needs to be here after the item is added to the widget. See #979.
utxo_item.setSelected(True) # restore previous selection
def get_selected(self):
return { x.data(0, Qt.UserRole) : x.data(0, Qt.UserRole+1) # dict of "name" -> frozen flags string (eg: "ac")
for x in self.selectedItems() }
def | (self, position):
selected = self.get_selected()
if not selected:
return
menu = QMenu()
coins = filter(lambda x: self.get_name(x) in selected, self.utxos)
spendable_coins = list(filter(lambda x: not selected.get(self.get_name(x), ''), coins))
# Unconditionally add the "Spend" option but leave it disabled if there are no spendable_coins
menu.addAction(_("Spend"), lambda: self.parent.spend_coins(spendable_coins)).setEnabled(bool(spendable_coins))
if len(selected) == 1:
# single selection, offer them the "Details" option and also coin/address "freeze" status, if any
txid = list(selected.keys())[0].split(':')[0]
frozen_flags = list(selected.values())[0]
tx = self.wallet.transactions.get(txid)
if tx:
label = self.wallet.get_label(txid) or None
menu.addAction(_("Details"), lambda: self.parent.show_transaction(tx, label))
act = None
needsep = True
if 'c' in frozen_flags:
menu.addSeparator()
menu.addAction(_("Coin is frozen"), lambda: None).setEnabled(False)
menu.addAction(_("Unfreeze Coin"), lambda: self.set_frozen_coins(list(selected.keys()), False))
menu.addSeparator()
needsep = False
else:
menu.addAction(_("Freeze Coin"), lambda: self.set_frozen_coins(list(selected.keys()), True))
if 'a' in frozen_flags:
if needsep: menu.addSeparator()
menu.addAction(_("Address is frozen"), lambda: None).setEnabled(False)
menu.addAction(_("Unfreeze Address"), lambda: self.set_frozen_addresses_for_coins(list(selected.keys()), False))
else:
menu.addAction(_("Freeze Address"), lambda: self.set_frozen_addresses_for_coins(list(selected.keys()), True))
else:
# multi-selection
menu.addSeparator()
if any(['c' not in flags for flags in selected.values()]):
# they have some coin-level non-frozen in the selection, so add the menu action "Freeze coins"
menu.addAction(_("Freeze Coins"), lambda: self.set_frozen_coins(list(selected.keys()), True))
if any(['c' in flags for flags in selected.values()]):
# they have some coin-level frozen in the selection, so add the menu action "Unfreeze coins"
menu.addAction(_("Unfreeze Coins"), lambda: self.set_frozen_coins(list(selected.keys()), False))
if any(['a' not in flags for flags in selected.values()]):
# they have some address-level non-frozen in the selection, so add the menu action "Freeze addresses"
menu.addAction(_("Freeze Addresses"), lambda: self.set_frozen_addresses_for_coins(list(selected.keys()), True))
if any(['a' in flags for flags in selected.values()]):
# they have some address-level frozen in the selection, so add the menu action "Unfreeze addresses"
menu.addAction(_("Unfreeze Addresses"), lambda: self.set_frozen_addresses_for_coins(list(selected.keys()), False))
menu.exec_(self.viewport().mapToGlobal(position))
def on_permit_edit(self, item, column):
# disable editing fields in this tab (labels)
return False
def set_frozen_coins(self, coins, b):
if self.parent:
self.parent.set_frozen_coin_state(coins, b)
def set_frozen_addresses_for_coins(self, coins, b):
if not self.parent: return
addrs = set()
for utxo in self.utxos:
name = self.get_name(utxo)
if name in coins:
addrs.add(utxo['address'])
if addrs:
self.parent.set_frozen_state(list(addrs), b)
| create_menu |
introOutro.js | import { Vector3 } from 'three';
var titleHTML = "\
<span style='position: absolute; top: "+80+"px; left: "+55+"px; display: block;'>\
<center><h1 id=title style='color: silver; font-size: 70px; opacity: 0;'>LABYRINTH</h1></center>\
<center id=entertostart style='color: silver; position: relative; top: "+(-30)+"px; opacity: 0;'>Press Enter to Start</center>\
</span>\
";
var instructionsHTML = "\
<span id=pregame style='position: absolute; left: "+0+"px; top: "+0+"px; padding: 25px;\
height: "+300+"px; width: "+500+"px; background: linear-gradient(to bottom right, #d9d9d9, #737373); border-radius: 5px; display: block; opacity: 0.85;'>\
<center><h1>LABYRINTH</h1></center>\
<p><strong>Instructions:</strong> You will be placed in a random location of a multi-level maze. Your objective is to reach the goal hidden in the maze (tap <strong>S</strong> a few times for a view of the goal on this title screen).</p>\
<p>Use the <strong>Up</strong>, <strong>Down</strong>, <strong>Left</strong>, and <strong>Right</strong> arrow keys to orient yourself, and use <strong>WASD</strong> to move around. Use <strong>Space</strong> to go up, and <strong>Shift</strong> to go down (try them out now). At any point in the game, press <strong>ESC</strong> to pause. Good luck!</p>\
<span style='position: relative; top: "+(0*300/2-0)+"px;'>\
<center style='position: relative; top: "+0+"px;'>Enter Difficulty (1-5)</center>\
<center><input id=lvlInpPre value='' style='position: relative; top: "+10+"px;'></input><center>\
<center id=entertostart style='position: relative; top: "+20+"px;'>Press Enter to Start</center>\
</span>\
</span>\
";
var pausedHTML = "\
<span id=pregame style='position: absolute; left: "+0+"px; top: "+0+"px; padding: 25px;\
height: "+300+"px; width: "+500+"px; background: linear-gradient(to bottom right, #d9d9d9, #737373); border-radius: 5px; display: block; opacity: 0.85;'>\
<center><h1>LABYRINTH</h1></center>\
<p><strong>Instructions:</strong> You will be placed in a random location of a multi-level maze. Your objective is to reach the goal hidden in the maze.</p>\
<p>Use the <strong>Up</strong>, <strong>Down</strong>, <strong>Left</strong>, and <strong>Right</strong> arrow keys to orient yourself, and use <strong>WASD</strong> to move around. Use <strong>Space</strong> to go up, and <strong>Shift</strong> to go down (try them out now). At any point in the game, press <strong>ESC</strong> to pause. Good luck!</p>\
<span style='position: relative; top: "+(0*300/2-0)+"px;'>\
<center style='position: relative; top: "+0+"px;'>Restart? Enter Difficulty (1-5)</center>\
<center><input id=lvlInpPre value='' style='position: relative; top: "+10+"px;'></input><center>\
<center id=entertostart style='position: relative; top: "+20+"px;'>Press Enter to Start, ESC to Resume, or Q to Quit</center>\
</span>\
</span>\
";
var startingHTML = "\
<span id=start style='position: absolute; top: "+80+"px; left: "+55+"px; display: none; opacity: 1.0'>\
<center><h1 style='color: silver; font-size: 70px;'>START!</h1></center>\
</span>\
";
var replayHTML = "\
<span id=endgame style='position: absolute; left: "+0+"px; top: "+0+"px; \
height: "+250+"px; width: "+400+"px; background: linear-gradient(to bottom right, #d9d9d9, #737373); border-radius: 5px; display: none; opacity: 0.85;'>\
<span style='position: relative; top: "+0+"px;'>\
<center style='position: relative; top: "+0+"px;'><h1 style='font-size: 40px;'>You won in <span id=finishtime></span>!</h1><p>Play again?</p></center>\
<center style='position: relative; top: "+0+"px;'>Enter Difficulty (1-5)</center>\
<center><input id=lvlInpEnd value='' style='position: relative; top: "+10+"px;'></input><center>\
<center id=etsReplay style='position: relative; top: "+20+"px;'><strong>Press Enter to Start</strong></center>\
</span>\
</span>\
";
var timedistHTML = "\
<span id=timedist style='position: absolute; top: "+80+"px; left: "+55+"px; display: none;'>\
<h1 style='color: silver; font-size: 30px;'>Time: <span id=time></span></h1>\
<h1 style='color: silver; font-size: 30px; position: relative; top: "+(-25)+"px;'>Distance: <span id=dist></span></h1>\
</span>\
";
function getChildNamed(parent, name) {
for (const child of parent.children) {
if (child.name == name) {
return child;
}
}
}
// pregame prompt. Shows once per page load. Has intro + instructions. Initializes as hidden
const pregamePromptHTML = () => {
let elem = document.createElement("SPAN");
let w = 500;
let h = 300;
let x = window.innerWidth / 2 - w / 2;
let y = window.innerHeight / 2 - h / 2;
let intro = "<span id=intro style='position: absolute; display: block;'></span>";
elem.innerHTML = intro;
document.body.appendChild(elem);
document.getElementById('intro').innerHTML = titleHTML;
}
// pregame prompt. Shows between games. Congratulates and prompts user to play again.
const endgamePromptHTML = () => {
let elem = document.createElement("SPAN");
let w = 500;
let h = 300;
let x = window.innerWidth / 2 - w / 2;
let y = window.innerHeight / 2 - h / 2;
elem.innerHTML = replayHTML;
document.body.appendChild(elem);
}
// shows "START!" once the round starts
const startHTML = () => {
let elem = document.createElement("SPAN");
elem.innerHTML = startingHTML;
document.body.appendChild(elem);
}
const timeAndDistHTML = () => {
let elem = document.createElement("SPAN");
elem.innerHTML = timedistHTML;
document.body.appendChild(elem);
}
function | () {
let w = 500 + 50;
let h = 300 + 50;
let x = window.innerWidth / 2 - w / 2;
let y = window.innerHeight / 2 - h / 2;
let pre = document.getElementById('intro'); // used to be pregame
pre.style.left = x + 'px';
pre.style.top = y + 'px';
let str = document.getElementById('start');
str.style.left = window.innerWidth / 2 - 125 + 'px';
str.style.top = window.innerHeight / 2 - 95 + 'px';
w = 400;
h = 250;
x = window.innerWidth / 2 - w / 2;
y = window.innerHeight / 2 - h / 2;
let end = document.getElementById('endgame');
end.style.left = x + 'px';
end.style.top = y + 'px';
let td = document.getElementById('timedist');
td.style.left = (window.innerWidth - 220) + 'px';
td.style.top = '0px';
}
function showPregamePrompt() {
document.getElementById("intro").innerHTML = instructionsHTML;
//document.getElementById("lvlInpPre").focus();
document.getElementById("lvlInpPre").value = '';
}
function hidePregamePrompt() {
document.getElementById("intro").innerHTML = instructionsHTML; //document.getElementById("intro").innerHTML.replace(' or Q to Quit', '');
document.getElementById("intro").style.display = 'none';
}
function showPause() {
// document.getElementById("intro").innerHTML = document.getElementById("intro").innerHTML.replace(' (tap <strong>S</strong> a few times for a view of the goal on this title screen)', '');
// document.getElementById("intro").innerHTML = document.getElementById("intro").innerHTML.replace('Press Enter to Start', 'Press Enter to Start or Q to Quit');
document.getElementById("intro").innerHTML = pausedHTML;
document.getElementById("intro").style.display = 'block';
document.getElementById("lvlInpPre").focus();
document.getElementById("lvlInpPre").value = '';
}
function hidePause() {
document.getElementById("intro").innerHTML = instructionsHTML;//document.getElementById("intro").innerHTML.replace(' or Q to Quit', '');
document.getElementById("intro").style.display = 'none';
}
const showEndgamePrompt = () => {
document.getElementById("endgame").style.display = 'block';
document.getElementById("lvlInpEnd").focus();
document.getElementById("lvlInpEnd").value = '';
}
const hideEndgamePrompt = () => {
document.getElementById("endgame").style.display = 'none';
}
const showStart = () => {
document.getElementById("start").style.opacity = 1.0;
document.getElementById("start").style.display = 'block';
}
const hideStart = () => {
document.getElementById("start").style.display = 'none';
}
function showTimeAndDist() {
document.getElementById("timedist").style.display = 'block';
}
function hideTimeAndDist() {
document.getElementById("timedist").style.display = 'none';
}
function verifyInp(inp) {
console.log(parseInt(inp));
let num = parseInt(inp);
console.log(num);
console.log(Number.isInteger(num) && num >= 0 && num <= 5);
return Number.isInteger(num) && num >= 0 && num <= 5;
}
function toTitleScreen(scene, controls) {
scene.dimensions = 0;
hidePause();
hideEndgamePrompt();
resetScene(scene, 0, controls);
hideTimeAndDist();
document.getElementById("intro").style.display = 'block';
document.getElementById("intro").innerHTML = titleHTML;
renderedTitle = false;
}
// Set up camera
function setUpPlayer(controls) {
let scene = controls.scene;
let camera = controls.object;
let spotLight = controls.light;
let scale = scene.mazeScale;
let dimensions = scene.dimensions;
let x = Math.floor(Math.random() * dimensions) * scale - scale/2;
let y = Math.floor(Math.random() * dimensions) * scale;
let z = Math.floor(Math.random() * dimensions) * scale - scale/2;
camera.position.set(x, y, z);
let halfDist = (dimensions / 2) * scale - scale/2;
camera.lookAt(new Vector3(halfDist, y, halfDist));
// Set up controls
spotLight.position.set(x, y, z);
spotLight.target.position.set(halfDist, y, halfDist);
controls.mouseX = 0;
controls.mouseY = 0;
controls.object.lookAt(new Vector3(halfDist, y, halfDist));
}
function beginPlaying(scene, controls) {
let inp = document.getElementById("lvlInpPre").value;
if (!verifyInp(inp))
return 0;
let dimensions = parseInt(inp) + 1;
hidePregamePrompt();
resetScene(scene, dimensions, controls);
startTime = undefined;
return 1;
}
function playAgain(scene, controls) {
let inp = document.getElementById("lvlInpEnd").value;
if (!verifyInp(inp))
return 0;
let dimensions = parseInt(inp) + 1;
hideEndgamePrompt();
resetScene(scene, dimensions, controls);
startTime = undefined;
return 1;
}
function resetScene(scene, dimensions, controls) {
let scale = scene.mazeScale;
let maze = getChildNamed(scene, 'maze');
scene.dimensions = dimensions;
maze.children = [];
maze.maxDist = Math.sqrt(2*(dimensions*scale)*(dimensions*scale));
maze.buildRandomMaze(dimensions, scale);
let x = Math.floor(Math.random() * dimensions) * scale - scale/2;
let y = Math.floor(Math.random() * dimensions) * scale;
let z = Math.floor(Math.random() * dimensions) * scale - scale/2;
let endGoal = getChildNamed(scene, 'endgoal');
endGoal.position.set(x, y, z);
setUpPlayer(controls);
scene.destinationLoc.copy(endGoal.position);
showStart();
showTimeAndDist();
}
function fixToLen(num, l) {
let newNum = '' + num;
let len = newNum.length;
if (len < l) {
if (!newNum.includes('.')) {
newNum += '.';
len++;
}
for (let i = 0; i < l - len; i++) {
newNum += '0';
}
} else {
newNum = newNum.substring(0, l);
}
return newNum;
}
function frontPad(num, l) {
let newNum = '' + num;
let len = l - newNum.length;
if (len < l) {
for (let i = 0; i < len; i++) {
newNum = '0' + newNum;
}
}
return newNum;
}
function backPad(num, l) {
let newNum = '' + num;
let len = l - newNum.length;
if (len < l) {
for (let i = 0; i < len; i++) {
newNum = newNum + '0';
}
}
return newNum;
}
var startTime = undefined;
function timeToString(time) {
// let minutes = time.substring(0, time.indexOf(':'));
// let seconds = time.substring(time.indexOf(':') + 1, time.indexOf('.'));
// let milliseconds = time.substring(time.indexOf('.') + 1);
// let newTime = minutes * 60 + seconds + milliseconds / 1000;
//console.log(time);
let totTime = (time - startTime);
let seconds = Math.floor(totTime / 1000) % 60;
let minutes = (Math.floor(totTime / 1000) - seconds) / 60;
//console.log('millis:', totTime / 1000 - Math.floor(totTime / 1000));
let milliseconds = '' + Math.floor((totTime / 1000 - Math.floor(totTime / 1000)) * 1000);
return minutes + ':' + frontPad(seconds, 2) + '.' + backPad(milliseconds.substring(0, 4), 3);
}
var renderedTitle = false;
function animateHTML(timeStamp, gameState, dist) {
// sumn
if (gameState == 'intro' || gameState == 'paused') {
let opac = 0.4 * Math.sin(timeStamp / 500) + 0.6;
document.getElementById('entertostart').style.opacity = opac;
}
if (gameState == 'title') {
let titleOpac = parseFloat(document.getElementById('title').style.opacity);
if (titleOpac < 1) {
titleOpac += 0.002;
document.getElementById('title').style.opacity = titleOpac;
}
if (titleOpac > 0.55) {
let opac = 0.45 * Math.sin(timeStamp / 500) + 0.55;
if (opac < 0.11 || renderedTitle) {
document.getElementById('entertostart').style.opacity = opac;
renderedTitle = true;
}
}
}
if (gameState == 'playing') {
let opac = parseFloat(document.getElementById('start').style.opacity);
if (opac > 0.0) {
document.getElementById('start').style.opacity = opac - 0.01;
} else {
document.getElementById('start').style.display = 'none';
}
// update time & dist
//let time = timeToSeconds(document.getElementById('time').innerHTML);
if (startTime == undefined)
startTime = timeStamp;
document.getElementById('time').innerHTML = timeToString(timeStamp);
document.getElementById('dist').innerHTML = fixToLen(dist, 5);
} else {
document.getElementById('start').style.display = 'none';
}
if (gameState == 'outro') {
let opac = 0.4 * Math.sin(timeStamp / 500) + 0.6;
document.getElementById('etsReplay').style.opacity = opac;
document.getElementById('finishtime').innerHTML = document.getElementById('time').innerHTML;
document.getElementById('dist').innerHTML = '0.000';
}
}
export { getChildNamed, pregamePromptHTML, endgamePromptHTML, startHTML, timeAndDistHTML, handleResizeHTML, showPregamePrompt, hidePregamePrompt, toTitleScreen, showPause, hidePause, showEndgamePrompt, hideEndgamePrompt, setUpPlayer, beginPlaying, playAgain, resetScene, animateHTML };
| handleResizeHTML |
ContractService.ts | import { ContractService as ContractServiceInterface } from '../services'
import { Network } from '../../contract/types'
import { NFTCategory } from '../../nft/types'
import { TransferType } from '../types'
const network = process.env.REACT_APP_NETWORK! as Network
// No Ropsten!
const contractAddresses = {
[Network.ROPSTEN]: {
DigitalAsset: '0xfbeef911dc5821886e1dda71586d90ed28174b7d',
BuyAdapter: '0xfbeef911dc5821886e1dda71586d90ed28174b7d',
MarketplaceAdapter: '0xd1e4e2880ff56cd0d5c68da9bed58bfbf0150948'
},
[Network.MAINNET]: {
DigitalAsset: '0xfbeef911dc5821886e1dda71586d90ed28174b7d',
BuyAdapter: '0xfbeef911dc5821886e1dda71586d90ed28174b7d',
MarketplaceAdapter: '0xf4fbd84193f9aaf9779dedbb415a806933eb1c95'
}
}[network] |
const { DigitalAsset, MarketplaceAdapter } = contractAddresses
export type ContractName = keyof typeof contractAddresses
export class ContractService implements ContractServiceInterface {
static contractAddresses = contractAddresses
contractAddresses = contractAddresses
contractSymbols = {
[DigitalAsset]: 'KnownOriginDigitalAssetV2',
[MarketplaceAdapter]: 'Partner Marketplace'
} as const
contractNames = {
[DigitalAsset]: 'KnownOrigin',
[MarketplaceAdapter]: 'Partner Marketplace'
} as const
contractCategories = {
[DigitalAsset]: NFTCategory.ART
} as const
getTransferType(_address: string) {
return TransferType.SAFE_TRANSFER_FROM
}
} | |
http-module.interface.ts | import { ModuleMetadata, Provider, Type } from '@nestjs/common';
import { AxiosRequestConfig } from 'kickstand-axios';
export type HttpModuleOptions = AxiosRequestConfig;
| export interface HttpModuleAsyncOptions
extends Pick<ModuleMetadata, 'imports'> {
useExisting?: Type<HttpModuleOptionsFactory>;
useClass?: Type<HttpModuleOptionsFactory>;
useFactory?: (
...args: any[]
) => Promise<HttpModuleOptions> | HttpModuleOptions;
inject?: any[];
extraProviders?: Provider[];
} | export interface HttpModuleOptionsFactory {
createHttpOptions(): Promise<HttpModuleOptions> | HttpModuleOptions;
}
|
listener_linux.go | //+build linux
package vsock
import (
"net"
"golang.org/x/sys/unix"
)
var _ net.Listener = &listener{}
// A listener is the net.Listener implementation for connection-oriented
// VM sockets.
type listener struct {
fd fd
addr *Addr
}
// Addr and Close implement the net.Listener interface for listener.
func (l *listener) Addr() net.Addr { return l.addr }
func (l *listener) Close() error { return l.fd.Close() }
// Accept accepts a single connection from the listener, and sets up
// a net.Conn backed by conn.
func (l *listener) Accept() (net.Conn, error) {
cfd, sa, err := l.fd.Accept4(0)
if err != nil {
return nil, err
}
savm := sa.(*unix.SockaddrVM)
remoteAddr := &Addr{
ContextID: savm.CID,
Port: savm.Port,
}
return newConn(cfd, l.addr.fileName(), l.addr, remoteAddr)
}
// listenStream is the entry point for ListenStream on Linux.
func listenStream(port uint32) (net.Listener, error) {
var cid uint32
if err := localContextID(sysFS{}, &cid); err != nil {
return nil, err
}
fd, err := unix.Socket(unix.AF_VSOCK, unix.SOCK_STREAM, 0)
if err != nil {
return nil, err
}
lfd := &sysFD{fd: fd}
return listenStreamLinuxHandleError(lfd, cid, port)
}
// listenStreamLinuxHandleError ensures that any errors from listenStreamLinux
// result in the socket being cleaned up properly.
func listenStreamLinuxHandleError(lfd fd, cid, port uint32) (net.Listener, error) {
l, err := listenStreamLinux(lfd, cid, port)
if err != nil |
return l, nil
}
// TODO(mdlayher): fine-tune this number instead of just picking one.
const listenBacklog = 32
// listenStreamLinux is the entry point for tests on Linux.
func listenStreamLinux(lfd fd, cid, port uint32) (net.Listener, error) {
// Zero-value for "any port" is friendlier in Go than a constant.
if port == 0 {
port = unix.VMADDR_PORT_ANY
}
sa := &unix.SockaddrVM{
CID: cid,
Port: port,
}
if err := lfd.SetNonblock(true); err != nil {
return nil, err
}
if err := lfd.Bind(sa); err != nil {
return nil, err
}
if err := lfd.Listen(listenBacklog); err != nil {
return nil, err
}
lsa, err := lfd.Getsockname()
if err != nil {
return nil, err
}
lsavm := lsa.(*unix.SockaddrVM)
addr := &Addr{
ContextID: lsavm.CID,
Port: lsavm.Port,
}
return &listener{
fd: lfd,
addr: addr,
}, nil
}
| {
// If any system calls fail during setup, the socket must be closed
// to avoid file descriptor leaks.
_ = lfd.Close()
return nil, err
} |
client.go | package algorand | "github.com/Bundle-App/blockatlas/pkg/blockatlas"
"github.com/Bundle-App/blockatlas/pkg/numbers"
)
type Client struct {
blockatlas.Request
}
func InitClient(baseUrl string) Client {
return Client{
Request: blockatlas.Request{
HttpClient: blockatlas.DefaultClient,
ErrorHandler: blockatlas.DefaultErrorHandler,
BaseUrl: baseUrl,
},
}
}
func (c *Client) GetLatestBlock() (int64, error) {
var status Status
err := c.Get(&status, "v1/status", nil)
if err != nil {
return 0, err
}
return status.LastRound, nil
}
func (c *Client) GetBlock(number int64) (BlockResponse, error) {
path := fmt.Sprintf("v1/block/%d", number)
var resp BlockResponse
err := c.Get(&resp, path, nil)
if err != nil {
return resp, err
}
normalizedTxs := make([]Transaction, 0)
//TODO: Read GetTxsOfAddress explanation
for _, t := range resp.Transactions.Transactions {
normalized := normalizeTx(&t, resp)
normalizedTxs = append(normalizedTxs, *normalized)
}
resp.Transactions.Transactions = normalizedTxs
return resp, nil
}
func (c *Client) GetTxsInBlock(number int64) ([]Transaction, error) {
block, err := c.GetBlock(number)
return block.Transactions.Transactions, err
}
func (c *Client) GetAccount(address string) (account *Account, err error) {
path := fmt.Sprintf("v1/account/%s", address)
err = c.Get(&account, path, nil)
return
}
func (c *Client) GetTxsOfAddress(address string) ([]Transaction, error) {
var response TransactionsResponse
path := fmt.Sprintf("v1/account/%s/transactions", address)
err := c.Get(&response, path, nil)
if err != nil {
return nil, blockatlas.ErrSourceConn
}
results := make([]Transaction, 0)
//FIXME. Currently fetching the last 6 transactions and get 6 blocks for each to retrieve timestamp.
//Algorand team promised to provide endpoint soon that will contain timestamp value inside TransactionsResponse response
//Get latest 6 transactions, which is enough until new endpoint fixes it.
txs := response.Transactions[:numbers.Min(6, len(response.Transactions))]
for _, t := range txs {
block, err := c.GetBlock(int64(t.Round))
if err == nil {
normalizeTx(&t, block)
results = append(results, t)
}
}
return results, err
}
func normalizeTx(transaction *Transaction, block BlockResponse) *Transaction {
transaction.Timestamp = block.Timestamp
return transaction
} |
import (
"fmt" |
main.rs | use modiom::config::Config;
use modiom::CliResult;
mod command_prelude;
mod commands;
// mod progress;
use crate::command_prelude::*;
fn main() -> CliResult {
let args = App::new("modiom")
.settings(&[
AppSettings::UnifiedHelpMessage,
AppSettings::DeriveDisplayOrder,
AppSettings::SubcommandRequiredElseHelp,
AppSettings::VersionlessSubcommands,
])
.subcommands(commands::builtin())
.arg(opt("test-env", "Use the mod.io test environment").global(true))
.get_matches_safe()
.unwrap_or_else(|e| e.exit());
let mut config = Config::default()?;
config.configure(args.is_test_env())?;
match commands::exec(&config, &args) {
Err(e) => {
eprintln!("{}", e);
std::process::exit(1);
}
Ok(()) => Ok(()), | }
} | |
negative_http2_client.go | /*
*
* Copyright 2016 gRPC authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
// Binary http2 is used to test http2 error edge cases like GOAWAYs and
// RST_STREAMs
//
// Documentation:
// https://github.com/grpc/grpc/blob/master/doc/negative-http2-interop-test-descriptions.md
package main
import (
"context"
"flag"
"net"
"strconv"
"sync"
"time"
"github.com/seacomandor/grpc-go"
"github.com/seacomandor/grpc-go/codes"
"github.com/seacomandor/grpc-go/grpclog"
"github.com/seacomandor/grpc-go/interop"
testpb "github.com/seacomandor/grpc-go/interop/grpc_testing"
"github.com/seacomandor/grpc-go/status"
)
var (
serverHost = flag.String("server_host", "localhost", "The server host name")
serverPort = flag.Int("server_port", 8080, "The server port number")
testCase = flag.String("test_case", "goaway",
`Configure different test cases. Valid options are:
goaway : client sends two requests, the server will send a goaway in between;
rst_after_header : server will send rst_stream after it sends headers;
rst_during_data : server will send rst_stream while sending data;
rst_after_data : server will send rst_stream after sending data;
ping : server will send pings between each http2 frame;
max_streams : server will ensure that the max_concurrent_streams limit is upheld;`)
largeReqSize = 271828
largeRespSize = 314159
)
func largeSimpleRequest() *testpb.SimpleRequest {
pl := interop.ClientNewPayload(testpb.PayloadType_COMPRESSABLE, largeReqSize)
return &testpb.SimpleRequest{
ResponseType: testpb.PayloadType_COMPRESSABLE,
ResponseSize: int32(largeRespSize),
Payload: pl,
}
}
// sends two unary calls. The server asserts that the calls use different connections.
func goaway(tc testpb.TestServiceClient) {
interop.DoLargeUnaryCall(tc)
// sleep to ensure that the client has time to recv the GOAWAY.
// TODO(ncteisen): make this less hacky.
time.Sleep(1 * time.Second)
interop.DoLargeUnaryCall(tc)
}
func rstAfterHeader(tc testpb.TestServiceClient) {
req := largeSimpleRequest()
reply, err := tc.UnaryCall(context.Background(), req)
if reply != nil {
grpclog.Fatalf("Client received reply despite server sending rst stream after header")
}
if status.Code(err) != codes.Internal {
grpclog.Fatalf("%v.UnaryCall() = _, %v, want _, %v", tc, status.Code(err), codes.Internal)
}
}
func rstDuringData(tc testpb.TestServiceClient) {
req := largeSimpleRequest()
reply, err := tc.UnaryCall(context.Background(), req)
if reply != nil {
grpclog.Fatalf("Client received reply despite server sending rst stream during data")
}
if status.Code(err) != codes.Unknown {
grpclog.Fatalf("%v.UnaryCall() = _, %v, want _, %v", tc, status.Code(err), codes.Unknown)
}
}
func | (tc testpb.TestServiceClient) {
req := largeSimpleRequest()
reply, err := tc.UnaryCall(context.Background(), req)
if reply != nil {
grpclog.Fatalf("Client received reply despite server sending rst stream after data")
}
if status.Code(err) != codes.Internal {
grpclog.Fatalf("%v.UnaryCall() = _, %v, want _, %v", tc, status.Code(err), codes.Internal)
}
}
func ping(tc testpb.TestServiceClient) {
// The server will assert that every ping it sends was ACK-ed by the client.
interop.DoLargeUnaryCall(tc)
}
func maxStreams(tc testpb.TestServiceClient) {
interop.DoLargeUnaryCall(tc)
var wg sync.WaitGroup
for i := 0; i < 15; i++ {
wg.Add(1)
go func() {
defer wg.Done()
interop.DoLargeUnaryCall(tc)
}()
}
wg.Wait()
}
func main() {
flag.Parse()
serverAddr := net.JoinHostPort(*serverHost, strconv.Itoa(*serverPort))
var opts []grpc.DialOption
opts = append(opts, grpc.WithInsecure())
conn, err := grpc.Dial(serverAddr, opts...)
if err != nil {
grpclog.Fatalf("Fail to dial: %v", err)
}
defer conn.Close()
tc := testpb.NewTestServiceClient(conn)
switch *testCase {
case "goaway":
goaway(tc)
grpclog.Infoln("goaway done")
case "rst_after_header":
rstAfterHeader(tc)
grpclog.Infoln("rst_after_header done")
case "rst_during_data":
rstDuringData(tc)
grpclog.Infoln("rst_during_data done")
case "rst_after_data":
rstAfterData(tc)
grpclog.Infoln("rst_after_data done")
case "ping":
ping(tc)
grpclog.Infoln("ping done")
case "max_streams":
maxStreams(tc)
grpclog.Infoln("max_streams done")
default:
grpclog.Fatal("Unsupported test case: ", *testCase)
}
}
| rstAfterData |
authenticated.guard.ts | import { CanActivate, ExecutionContext, Injectable, UnauthorizedException } from '@nestjs/common';
import { PrismaClient } from '@prisma/client';
@Injectable()
export class AuthenticatedGuard implements CanActivate {
async canActivate(
context: ExecutionContext
): Promise<boolean> {
const prisma = new PrismaClient();
const request = context.switchToHttp().getRequest();
const token: string = request.headers['x-auth-token'];
if (token === undefined){
throw new UnauthorizedException('Harap login terlebih dahulu');
}
const user = await prisma.users.findFirst({
select: {
id: true,
username: true
},
where: {
token: token
}
});
if (!user) {
throw new UnauthorizedException('Harap login terlebih dahulu');
}
| return true;
}
} | |
channel_status_subscriber_test_helper.go | /*
Copyright 2020 The Knative Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package helpers
import (
"testing"
duckv1beta1 "knative.dev/eventing/pkg/apis/duck/v1beta1"
eventingv1beta1 "knative.dev/eventing/pkg/apis/messaging/v1beta1"
testlib "knative.dev/eventing/test/lib"
"knative.dev/eventing/test/lib/resources"
corev1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
// ChannelStatusSubscriberTestHelperWithChannelTestRunner runs the tests of
// subscriber field of status for all Channels in the ComponentsTestRunner.
func ChannelStatusSubscriberTestHelperWithChannelTestRunner(
t *testing.T,
channelTestRunner testlib.ComponentsTestRunner,
options ...testlib.SetupClientOption,
) {
channelTestRunner.RunTests(t, testlib.FeatureBasic, func(st *testing.T, channel metav1.TypeMeta) {
client := testlib.Setup(st, true, options...)
defer testlib.TearDown(client)
t.Run("Channel has required status subscriber fields", func(t *testing.T) {
channelHasRequiredSubscriberStatus(st, client, channel, options...)
})
})
}
func channelHasRequiredSubscriberStatus(st *testing.T, client *testlib.Client, channel metav1.TypeMeta, options ...testlib.SetupClientOption) {
st.Logf("Running channel subscriber status conformance test with channel %q", channel)
channelName := "channel-req-status-subscriber"
subscriberServiceName := "channel-req-status-subscriber-svc"
client.T.Logf("Creating channel %+v-%s", channel, channelName)
client.CreateChannelOrFail(channelName, &channel)
client.WaitForResourceReadyOrFail(channelName, &channel)
pod := resources.EventRecordPod(subscriberServiceName + "-pod")
client.CreatePodOrFail(pod, testlib.WithService(subscriberServiceName))
subscription := client.CreateSubscriptionOrFail(
subscriberServiceName,
channelName,
&channel,
resources.WithSubscriberForSubscription(subscriberServiceName),
)
// wait for all test resources to be ready, so that we can start sending events
client.WaitForAllTestResourcesReadyOrFail()
dtsv, err := getChannelDuckTypeSupportVersion(channelName, client, &channel)
if err != nil {
st.Fatalf("Unable to check Channel duck type support version for %q: %q", channel, err)
}
if dtsv == "" || dtsv == "v1alpha1" {
// treat missing annotation value as v1alpha1, as written in the spec
channelable, err := getChannelAsV1Alpha1Channelable(channelName, client, channel)
if err != nil {
st.Fatalf("Unable to get channel %q to v1alpha1 duck type: %q", channel, err)
}
// SPEC: Each subscription to a channel is added to the channel status.subscribableStatus.subscribers automatically.
if channelable.Status.SubscribableStatus == nil || channelable.Status.SubscribableStatus.Subscribers == nil {
st.Fatalf("%q does not have status.subscribers", channel)
}
ss := findSubscriberStatus(channelable.Status.SubscribableStatus.Subscribers, subscription)
if ss == nil {
st.Fatalf("No subscription status found for channel %q and subscription %v", channel, subscription)
}
// SPEC: The ready field of the subscriber identified by its uid MUST be set to True when the subscription is ready to be processed.
if ss.Ready != corev1.ConditionTrue {
st.Fatalf("Subscription not ready found for channel %q and subscription %v", channel, subscription)
}
} else if dtsv == "v1beta1" {
channelable, err := getChannelAsV1Beta1Channelable(channelName, client, channel)
if err != nil {
st.Fatalf("Unable to get channel %q to v1beta1 duck type: %q", channel, err)
}
// SPEC: Each subscription to a channel is added to the channel status.subscribers automatically.
if channelable.Status.Subscribers == nil {
st.Fatalf("%q does not have status.subscribers", channel)
}
ss := findSubscriberStatus(channelable.Status.Subscribers, subscription)
if ss == nil {
st.Fatalf("No subscription status found for channel %q and subscription %v", channel, subscription)
}
// SPEC: The ready field of the subscriber identified by its uid MUST be set to True when the subscription is ready to be processed.
if ss.Ready != corev1.ConditionTrue {
st.Fatalf("Subscription not ready found for channel %q and subscription %v", channel, subscription)
}
} else {
st.Fatalf("Channel doesn't support v1alpha1 nor v1beta1 Channel duck types: %v", channel)
}
}
func | (statusArr []duckv1beta1.SubscriberStatus, subscription *eventingv1beta1.Subscription) *duckv1beta1.SubscriberStatus {
for _, v := range statusArr {
if v.UID == subscription.UID {
return &v
}
}
return nil
}
| findSubscriberStatus |
remote_jmx_queue.py | import re
class RemoteJmxQueue(object):
def __init__(self, jolokia_session, broker_name, queue_name):
self.name = queue_name
self.jolokia_session = jolokia_session
self.queue_bean = (
"org.apache.activemq:type=Broker,brokerName={},"
"destinationType=Queue,destinationName={}"
).format(broker_name, queue_name)
def get_name(self):
return self.name
def send_text_message(self, request):
operation = {
'type': 'exec',
'mbean': self.queue_bean,
'operation': 'sendTextMessage(java.lang.String)',
'arguments': [request]
}
self.jolokia_session.request(operation)
def get_size(self):
attribute = {
'type': 'read',
'mbean': self.queue_bean,
'attribute': 'QueueSize',
}
return self.jolokia_session.request(attribute)
def | (self):
operation = {
'type': 'exec',
'mbean': self.queue_bean,
'operation': 'browse()',
}
result = self.jolokia_session.request(operation)
if 'Text' in result[0]:
return [r['Text'] for r in result]
else:
return [self.bytearray_to_string(r) for r in result]
def bytearray_to_string(self, r):
result = str(bytearray(r['BodyPreview']))
result = re.sub("bytearray\(", "", result)
result = re.sub("\\'\)", "", result)
return re.sub("b\\'", "", result)
def purge(self):
operation = {
'type': 'exec',
'mbean': self.queue_bean,
'operation': 'purge()',
}
self.jolokia_session.request(operation)
| get_message_contents |
decoder.py | """Decoding module."""
import numpy as np
import warnings
from . import utils
from numba import njit, int64, types, float64
def decode(H, y, snr, maxiter=1000):
"""Decode a Gaussian noise corrupted n bits message using BP algorithm.
Decoding is performed in parallel if multiple codewords are passed in y.
Parameters
----------
H: array (n_equations, n_code). Decoding matrix H.
y: array (n_code, n_messages) or (n_code,). Received message(s) in the
codeword space.
maxiter: int. Maximum number of iterations of the BP algorithm.
Returns
-------
x: array (n_code,) or (n_code, n_messages) the solutions in the
codeword space.
"""
m, n = H.shape
bits_hist, bits_values, nodes_hist, nodes_values = utils._bitsandnodes(H)
_n_bits = np.unique(H.sum(0))
_n_nodes = np.unique(H.sum(1))
if _n_bits * _n_nodes == 1:
solver = _logbp_numba_regular
bits_values = bits_values.reshape(n, -1)
nodes_values = nodes_values.reshape(m, -1)
else:
solver = _logbp_numba
var = 10 ** (-snr / 10)
if y.ndim == 1:
y = y[:, None]
# step 0: initialization
Lc = 2 * y / var
_, n_messages = y.shape
Lq = np.zeros(shape=(m, n, n_messages))
Lr = np.zeros(shape=(m, n, n_messages))
for n_iter in range(maxiter):
Lq, Lr, L_posteriori = solver(bits_hist, bits_values, nodes_hist,
nodes_values, Lc, Lq, Lr, n_iter)
x = np.array(L_posteriori <= 0).astype(int)
product = utils.incode(H, x)
if product:
break
if n_iter == maxiter - 1:
warnings.warn("""Decoding stopped before convergence. You may want
to increase maxiter""")
return x.squeeze()
output_type_log2 = types.Tuple((float64[:, :, :], float64[:, :, :],
float64[:, :]))
@njit(output_type_log2(int64[:], int64[:], int64[:], int64[:], float64[:, :],
float64[:, :, :], float64[:, :, :], int64), cache=True)
def _logbp_numba(bits_hist, bits_values, nodes_hist, nodes_values, Lc, Lq, Lr,
n_iter):
"""Perform inner ext LogBP solver."""
m, n, n_messages = Lr.shape
# step 1 : Horizontal
bits_counter = 0
nodes_counter = 0
for i in range(m):
# ni = bits[i]
ff = bits_hist[i]
ni = bits_values[bits_counter: bits_counter + ff]
bits_counter += ff
for j in ni:
nij = ni[:]
X = np.ones(n_messages)
if n_iter == 0:
for kk in range(len(nij)):
if nij[kk] != j:
X *= np.tanh(0.5 * Lc[nij[kk]])
else:
for kk in range(len(nij)):
if nij[kk] != j:
X *= np.tanh(0.5 * Lq[i, nij[kk]])
num = 1 + X
denom = 1 - X
for ll in range(n_messages):
if num[ll] == 0:
Lr[i, j, ll] = -1
elif denom[ll] == 0:
Lr[i, j, ll] = 1
else:
Lr[i, j, ll] = np.log(num[ll] / denom[ll])
# step 2 : Vertical
for j in range(n):
# mj = nodes[j]
ff = nodes_hist[j]
mj = nodes_values[nodes_counter: nodes_counter + ff]
nodes_counter += ff
for i in mj:
mji = mj[:]
Lq[i, j] = Lc[j]
for kk in range(len(mji)):
if mji[kk] != i:
Lq[i, j] += Lr[mji[kk], j]
# LLR a posteriori:
L_posteriori = np.zeros((n, n_messages))
nodes_counter = 0
for j in range(n):
ff = nodes_hist[j]
mj = nodes_values[nodes_counter: nodes_counter + ff]
nodes_counter += ff
L_posteriori[j] = Lc[j] + Lr[mj, j].sum(axis=0)
return Lq, Lr, L_posteriori
@njit(output_type_log2(int64[:], int64[:, :], int64[:], int64[:, :],
float64[:, :], float64[:, :, :], float64[:, :, :],
int64), cache=True)
def _logbp_numba_regular(bits_hist, bits_values, nodes_hist, nodes_values, Lc,
Lq, Lr, n_iter):
"""Perform inner ext LogBP solver."""
m, n, n_messages = Lr.shape
# step 1 : Horizontal
for i in range(m):
ni = bits_values[i]
for j in ni:
nij = ni[:]
X = np.ones(n_messages)
if n_iter == 0:
for kk in range(len(nij)):
if nij[kk] != j:
X *= np.tanh(0.5 * Lc[nij[kk]])
else:
for kk in range(len(nij)):
if nij[kk] != j:
X *= np.tanh(0.5 * Lq[i, nij[kk]])
num = 1 + X
denom = 1 - X
for ll in range(n_messages):
if num[ll] == 0:
Lr[i, j, ll] = -1
elif denom[ll] == 0:
Lr[i, j, ll] = 1
else:
Lr[i, j, ll] = np.log(num[ll] / denom[ll])
# step 2 : Vertical
for j in range(n):
mj = nodes_values[j]
for i in mj:
mji = mj[:]
Lq[i, j] = Lc[j]
for kk in range(len(mji)):
if mji[kk] != i:
Lq[i, j] += Lr[mji[kk], j]
# LLR a posteriori:
L_posteriori = np.zeros((n, n_messages))
for j in range(n):
mj = nodes_values[j]
L_posteriori[j] = Lc[j] + Lr[mj, j].sum(axis=0)
return Lq, Lr, L_posteriori
def | (tG, x):
"""Compute the original `n_bits` message from a `n_code` codeword `x`.
Parameters
----------
tG: array (n_code, n_bits) coding matrix tG.
x: array (n_code,) decoded codeword of length `n_code`.
Returns
-------
message: array (n_bits,). Original binary message.
"""
n, k = tG.shape
rtG, rx = utils.gausselimination(tG, x)
message = np.zeros(k).astype(int)
message[k - 1] = rx[k - 1]
for i in reversed(range(k - 1)):
message[i] = rx[i]
message[i] -= utils.binaryproduct(rtG[i, list(range(i+1, k))],
message[list(range(i+1, k))])
return abs(message)
| get_message |
region.go | // Copyright 2019 Yunion
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package shell
import (
"yunion.io/x/onecloud/pkg/util/azure"
"yunion.io/x/onecloud/pkg/util/shellutils"
)
func init() {
type RegionListOptions struct {
}
shellutils.R(&RegionListOptions{}, "region-list", "List regions", func(cli *azure.SRegion, args *RegionListOptions) error { | } | regions := cli.GetClient().GetRegions()
printList(regions, 0, 0, 0, nil)
return nil
}) |
google_auth.py | # Copyright 2016 Ananya Mishra ([email protected])
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import flask_login
# Need to expose these downstream
# pylint: disable=unused-import
from flask_login import (current_user,
logout_user,
login_required,
login_user)
# pylint: enable=unused-import
from flask import url_for, redirect, request
from flask_oauthlib.client import OAuth
from airflow import models, configuration, settings
from airflow.utils.db import provide_session
from airflow.utils.log.logging_mixin import LoggingMixin
log = LoggingMixin().log
def get_config_param(param):
return str(configuration.get('google', param))
class GoogleUser(models.User):
def __init__(self, user):
self.user = user
def is_active(self):
|
def is_authenticated(self):
'''Required by flask_login'''
return True
def is_anonymous(self):
'''Required by flask_login'''
return False
def get_id(self):
'''Returns the current user id as required by flask_login'''
return self.user.get_id()
def data_profiling(self):
'''Provides access to data profiling tools'''
return True
def is_superuser(self):
'''Access all the things'''
return True
class AuthenticationError(Exception):
pass
class GoogleAuthBackend(object):
def __init__(self):
# self.google_host = get_config_param('host')
self.login_manager = flask_login.LoginManager()
self.login_manager.login_view = 'airflow.login'
self.flask_app = None
self.google_oauth = None
self.api_rev = None
def init_app(self, flask_app):
self.flask_app = flask_app
self.login_manager.init_app(self.flask_app)
self.google_oauth = OAuth(self.flask_app).remote_app(
'google',
consumer_key=get_config_param('client_id'),
consumer_secret=get_config_param('client_secret'),
request_token_params={'scope': [
'https://www.googleapis.com/auth/userinfo.profile',
'https://www.googleapis.com/auth/userinfo.email']},
base_url='https://www.google.com/accounts/',
request_token_url=None,
access_token_method='POST',
access_token_url='https://accounts.google.com/o/oauth2/token',
authorize_url='https://accounts.google.com/o/oauth2/auth')
self.login_manager.user_loader(self.load_user)
self.flask_app.add_url_rule(get_config_param('oauth_callback_route'),
'google_oauth_callback',
self.oauth_callback)
def login(self, request):
log.debug('Redirecting user to Google login')
return self.google_oauth.authorize(callback=url_for(
'google_oauth_callback',
_external=True,
next=request.args.get('next') or request.referrer or None))
def get_google_user_profile_info(self, google_token):
resp = self.google_oauth.get('https://www.googleapis.com/oauth2/v1/userinfo',
token=(google_token, ''))
if not resp or resp.status != 200:
raise AuthenticationError(
'Failed to fetch user profile, status ({0})'.format(
resp.status if resp else 'None'))
return resp.data['name'], resp.data['email']
def domain_check(self, email):
domain = email.split('@')[1]
domains = get_config_param('domain').split(',')
if domain in domains:
return True
return False
@provide_session
def load_user(self, userid, session=None):
if not userid or userid == 'None':
return None
user = session.query(models.User).filter(
models.User.id == int(userid)).first()
return GoogleUser(user)
@provide_session
def oauth_callback(self, session=None):
log.debug('Google OAuth callback called')
next_url = request.args.get('next') or url_for('admin.index')
resp = self.google_oauth.authorized_response()
try:
if resp is None:
raise AuthenticationError(
'Null response from Google, denying access.'
)
google_token = resp['access_token']
username, email = self.get_google_user_profile_info(google_token)
if not self.domain_check(email):
return redirect(url_for('airflow.noaccess'))
except AuthenticationError:
return redirect(url_for('airflow.noaccess'))
user = session.query(models.User).filter(
models.User.username == username).first()
if not user:
user = models.User(
username=username,
email=email,
is_superuser=False)
session.merge(user)
session.commit()
login_user(GoogleUser(user))
session.commit()
return redirect(next_url)
login_manager = GoogleAuthBackend()
def login(self, request):
return login_manager.login(request)
| '''Required by flask_login'''
return True |
request.ts | import axios from 'axios'
import config from '@/config'
| Accept: 'application/json',
},
})
export default request | const request = axios.create({
baseURL: config.apiURL,
headers: { |
exporter.go | package exporter
import (
"context"
"net"
"net/http"
"net/url"
"os"
"os/signal"
"syscall"
"time"
"go.uber.org/zap"
"github.com/pkg/errors"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promhttp"
"golang.org/x/sync/errgroup"
)
// Exporter handles serving the metrics
type Exporter struct {
addr string
endpoint *url.URL
fcgiEndpoint *url.URL
logger *zap.Logger
}
// OptionsFunc is a function passed to new for setting options on a new Exporter.
type OptionsFunc func(*Exporter) error
// New creates an exporter.
func | (options ...OptionsFunc) (*Exporter, error) {
e := &Exporter{
addr: ":9090",
}
for _, f := range options {
if err := f(e); err != nil {
return nil, errors.Wrap(err, "failed to set options")
}
}
if e.logger == nil {
l, err := NewLogger()
if err != nil {
return nil, errors.Wrap(err, "failed to create logger")
}
e.logger = l
}
if e.endpoint == nil && e.fcgiEndpoint == nil {
u, _ := url.Parse("http://localhost:9000/status")
e.endpoint = u
}
return e, nil
}
// SetLogger creates a function that will set the logger.
// Generally only used when create a new Exporter.
func SetLogger(l *zap.Logger) func(*Exporter) error {
return func(e *Exporter) error {
e.logger = l
return nil
}
}
// SetAddress creates a function that will set the listening address.
// Generally only used when create a new Exporter.
func SetAddress(addr string) func(*Exporter) error {
return func(e *Exporter) error {
host, port, err := net.SplitHostPort(addr)
if err != nil {
return errors.Wrapf(err, "invalid address")
}
e.addr = net.JoinHostPort(host, port)
return nil
}
}
// SetEndpoint creates a function that will set the URL endpoint to contact
// php-fpm.
// Generally only used when create a new Exporter.
func SetEndpoint(rawurl string) func(*Exporter) error {
return func(e *Exporter) error {
u, err := url.Parse(rawurl)
if err != nil {
return errors.Wrap(err, "failed to parse url")
}
e.endpoint = u
return nil
}
}
// SetFastcgi creates a function that will set the fastcgi URL endpoint to contact
// php-fpm. If this is set, then fastcgi is used rather than HTTP.
// Generally only used when create a new Exporter.
func SetFastcgi(rawurl string) func(*Exporter) error {
return func(e *Exporter) error {
u, err := url.Parse(rawurl)
if err != nil {
return errors.Wrap(err, "failed to parse url")
}
e.fcgiEndpoint = u
return nil
}
}
var healthzOK = []byte("ok\n")
func (e *Exporter) healthz(w http.ResponseWriter, r *http.Request) {
w.Write(healthzOK)
}
// Run starts the http server and collecting metrics. It generally does not return.
func (e *Exporter) Run() error {
c := e.newCollector()
if err := prometheus.Register(c); err != nil {
return errors.Wrap(err, "failed to register metrics")
}
prometheus.Unregister(prometheus.NewProcessCollector(os.Getpid(), ""))
prometheus.Unregister(prometheus.NewGoCollector())
http.HandleFunc("/healthz", e.healthz)
http.Handle("/metrics", promhttp.Handler())
stopChan := make(chan os.Signal)
signal.Notify(stopChan, syscall.SIGINT, syscall.SIGTERM)
srv := &http.Server{Addr: e.addr}
var g errgroup.Group
g.Go(func() error {
// TODO: allow TLS
return srv.ListenAndServe()
})
g.Go(func() error {
<-stopChan
// XXX: should shutdown time be configurable?
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
_ = srv.Shutdown(ctx)
return nil
})
if err := g.Wait(); err != http.ErrServerClosed {
return errors.Wrap(err, "failed to run server")
}
return nil
}
| New |
packet_sizes.rs | // Copyright 2020 Nym Technologies SA
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::FRAG_ID_LEN;
use nymsphinx_types::header::HEADER_SIZE;
use nymsphinx_types::PAYLOAD_OVERHEAD_SIZE;
use std::convert::TryFrom;
// it's up to the smart people to figure those values out : )
const REGULAR_PACKET_SIZE: usize = HEADER_SIZE + PAYLOAD_OVERHEAD_SIZE + 2 * 1024;
// TODO: even though we have 16B IV, is having just 5B (FRAG_ID_LEN) of the ID possibly insecure?
// TODO: I'm not entirely sure if we can easily extract `<AckEncryptionAlgorithm as NewStreamCipher>::NonceSize`
// into a const usize before relevant stuff is stabilised in rust...
const ACK_IV_SIZE: usize = 16;
const ACK_PACKET_SIZE: usize = HEADER_SIZE + PAYLOAD_OVERHEAD_SIZE + ACK_IV_SIZE + FRAG_ID_LEN;
const EXTENDED_PACKET_SIZE: usize = HEADER_SIZE + PAYLOAD_OVERHEAD_SIZE + 32 * 1024;
#[derive(Debug)]
pub struct InvalidPacketSize;
#[repr(u8)]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PacketSize {
RegularPacket = 1,
// for example instant messaging use case
ACKPacket = 2,
// for sending SURB-ACKs
ExtendedPacket = 3, // for example for streaming fast and furious in uncompressed 10bit 4K HDR quality
}
impl TryFrom<u8> for PacketSize {
type Error = InvalidPacketSize;
fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
match value {
_ if value == (PacketSize::RegularPacket as u8) => Ok(Self::RegularPacket),
_ if value == (PacketSize::ACKPacket as u8) => Ok(Self::ACKPacket),
_ if value == (PacketSize::ExtendedPacket as u8) => Ok(Self::ExtendedPacket),
_ => Err(InvalidPacketSize),
}
}
}
impl PacketSize {
pub fn | (self) -> usize {
match self {
PacketSize::RegularPacket => REGULAR_PACKET_SIZE,
PacketSize::ACKPacket => ACK_PACKET_SIZE,
PacketSize::ExtendedPacket => EXTENDED_PACKET_SIZE,
}
}
pub fn plaintext_size(self) -> usize {
self.size() - HEADER_SIZE - PAYLOAD_OVERHEAD_SIZE
}
pub fn payload_size(self) -> usize {
self.size() - HEADER_SIZE
}
pub fn get_type(size: usize) -> std::result::Result<Self, InvalidPacketSize> {
if PacketSize::RegularPacket.size() == size {
Ok(PacketSize::RegularPacket)
} else if PacketSize::ACKPacket.size() == size {
Ok(PacketSize::ACKPacket)
} else if PacketSize::ExtendedPacket.size() == size {
Ok(PacketSize::ExtendedPacket)
} else {
Err(InvalidPacketSize)
}
}
}
impl Default for PacketSize {
fn default() -> Self {
PacketSize::RegularPacket
}
}
| size |
recording_surface.rs | // Take a look at the license at the top of the repository in the LICENSE file.
use std::convert::TryFrom;
use std::fmt;
use std::ops::Deref;
use crate::enums::{Content, SurfaceType};
use crate::error::Error;
use crate::rectangle::Rectangle;
#[cfg(feature = "use_glib")]
use glib::translate::*;
use crate::surface::Surface;
declare_surface!(RecordingSurface, SurfaceType::Recording);
impl RecordingSurface {
#[doc(alias = "cairo_recording_surface_create")]
pub fn create<T: Into<Option<Rectangle>>>(
content: Content,
extends: T,
) -> Result<RecordingSurface, Error> {
unsafe {
let extends = extends.into();
let extends = match extends {
Some(c) => c.to_raw_none(),
None => ::std::ptr::null(),
};
Ok(Self::from_raw_full(ffi::cairo_recording_surface_create(
content.into(),
extends,
))?)
}
}
#[doc(alias = "cairo_recording_surface_get_extents")]
#[doc(alias = "get_extents")]
pub fn extents(&self) -> Option<Rectangle> {
unsafe { | Some(rectangle)
} else {
None
}
}
}
#[doc(alias = "cairo_recording_surface_ink_extents")]
pub fn ink_extents(&self) -> (f64, f64, f64, f64) {
let mut x0 = 0.;
let mut y0 = 0.;
let mut width = 0.;
let mut height = 0.;
unsafe {
ffi::cairo_recording_surface_ink_extents(
self.to_raw_none(),
&mut x0,
&mut y0,
&mut width,
&mut height,
);
}
(x0, y0, width, height)
}
} | let rectangle: Rectangle = ::std::mem::zeroed();
if ffi::cairo_recording_surface_get_extents(self.to_raw_none(), rectangle.to_raw_none())
.as_bool()
{ |
aliasesConfig.js | const packageAliases = require('../../package.json')._moduleAliases;
function | (options = {}) {
const aliases = {};
Object.keys(packageAliases).forEach((key) => {
aliases[key] = options.workFolder + packageAliases[key].slice(1);
});
return {
resolve: {
alias: aliases,
},
};
}
module.exports = config;
| config |
index.tsx | import React, { TextareaHTMLAttributes } from 'react';
import './styles.css';
interface TextareaProps extends TextareaHTMLAttributes<HTMLTextAreaElement> {
label: string;
name: string;
}
const Textarea: React.FC<TextareaProps> = ({label, name, ...rest }) => {
return(
<div className="textarea-block">
<label htmlFor={name}>{label}</label>
<textarea id={name} {...rest} />
</div>
);
} |
export default Textarea; |
|
unique_impl.rs |
use std::collections::HashMap;
use std::collections::hash_map::{Entry};
use std::hash::Hash;
use std::fmt;
/// An iterator adapter to filter out duplicate elements.
///
/// See [`.unique_by()`](../trait.Itertools.html#method.unique) for more information.
#[derive(Clone)]
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
pub struct UniqueBy<I: Iterator, V, F> {
iter: I,
// Use a hashmap for the entry API
used: HashMap<V, ()>,
f: F,
}
impl<I, V, F> fmt::Debug for UniqueBy<I, V, F>
where I: Iterator + fmt::Debug,
V: fmt::Debug + Hash + Eq,
{
debug_fmt_fields!(UniqueBy, iter, used);
}
/// Create a new `UniqueBy` iterator.
pub fn unique_by<I, V, F>(iter: I, f: F) -> UniqueBy<I, V, F>
where V: Eq + Hash,
F: FnMut(&I::Item) -> V,
I: Iterator,
{
UniqueBy {
iter,
used: HashMap::new(),
f,
}
}
// count the number of new unique keys in iterable (`used` is the set already seen)
fn count_new_keys<I, K>(mut used: HashMap<K, ()>, iterable: I) -> usize
where I: IntoIterator<Item=K>,
K: Hash + Eq,
{
let iter = iterable.into_iter();
let current_used = used.len();
used.extend(iter.map(|key| (key, ())));
used.len() - current_used
}
impl<I, V, F> Iterator for UniqueBy<I, V, F>
where I: Iterator,
V: Eq + Hash,
F: FnMut(&I::Item) -> V
{
type Item = I::Item;
fn next(&mut self) -> Option<Self::Item> {
while let Some(v) = self.iter.next() {
let key = (self.f)(&v);
if self.used.insert(key, ()).is_none() {
return Some(v);
}
}
None
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let (low, hi) = self.iter.size_hint();
((low > 0 && self.used.is_empty()) as usize, hi)
}
fn count(self) -> usize {
let mut key_f = self.f;
count_new_keys(self.used, self.iter.map(move |elt| key_f(&elt)))
}
}
impl<I, V, F> DoubleEndedIterator for UniqueBy<I, V, F>
where I: DoubleEndedIterator,
V: Eq + Hash,
F: FnMut(&I::Item) -> V
{
fn next_back(&mut self) -> Option<I::Item> {
while let Some(v) = self.iter.next_back() {
let key = (self.f)(&v);
if self.used.insert(key, ()).is_none() {
return Some(v);
}
}
None
}
}
impl<I> Iterator for Unique<I>
where I: Iterator,
I::Item: Eq + Hash + Clone
{
type Item = I::Item;
fn next(&mut self) -> Option<Self::Item> {
while let Some(v) = self.iter.iter.next() {
if let Entry::Vacant(entry) = self.iter.used.entry(v) {
let elt = entry.key().clone();
entry.insert(());
return Some(elt);
}
}
None
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let (low, hi) = self.iter.iter.size_hint();
((low > 0 && self.iter.used.is_empty()) as usize, hi)
}
fn count(self) -> usize {
count_new_keys(self.iter.used, self.iter.iter)
}
}
impl<I> DoubleEndedIterator for Unique<I>
where I: DoubleEndedIterator,
I::Item: Eq + Hash + Clone
{
fn next_back(&mut self) -> Option<I::Item> {
while let Some(v) = self.iter.iter.next_back() {
if let Entry::Vacant(entry) = self.iter.used.entry(v) {
let elt = entry.key().clone();
entry.insert(());
return Some(elt);
}
}
None
}
}
/// An iterator adapter to filter out duplicate elements.
///
/// See [`.unique()`](../trait.Itertools.html#method.unique) for more information.
#[derive(Clone)]
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
pub struct Unique<I: Iterator> {
iter: UniqueBy<I, I::Item, ()>,
}
impl<I> fmt::Debug for Unique<I>
where I: Iterator + fmt::Debug,
I::Item: Hash + Eq + fmt::Debug,
{
debug_fmt_fields!(Unique, iter);
}
pub fn | <I>(iter: I) -> Unique<I>
where I: Iterator,
I::Item: Eq + Hash,
{
Unique {
iter: UniqueBy {
iter,
used: HashMap::new(),
f: (),
}
}
}
| unique |
get_responses.go | package containers
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"fmt"
"io"
"github.com/go-openapi/runtime" |
"github.com/vmware/vic/lib/apiservers/portlayer/models"
)
// GetReader is a Reader for the Get structure.
type GetReader struct {
formats strfmt.Registry
}
// ReadResponse reads a server response into the received o.
func (o *GetReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {
switch response.Code() {
case 200:
result := NewGetOK()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return result, nil
case 404:
result := NewGetNotFound()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
default:
result := NewGetDefault(response.Code())
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
if response.Code()/100 == 2 {
return result, nil
}
return nil, result
}
}
// NewGetOK creates a GetOK with default headers values
func NewGetOK() *GetOK {
return &GetOK{}
}
/*GetOK handles this case with default header values.
OK
*/
type GetOK struct {
Payload string
}
func (o *GetOK) Error() string {
return fmt.Sprintf("[GET /containers/{id}][%d] getOK %+v", 200, o.Payload)
}
func (o *GetOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
// response payload
if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewGetNotFound creates a GetNotFound with default headers values
func NewGetNotFound() *GetNotFound {
return &GetNotFound{}
}
/*GetNotFound handles this case with default header values.
not found
*/
type GetNotFound struct {
Payload *models.Error
}
func (o *GetNotFound) Error() string {
return fmt.Sprintf("[GET /containers/{id}][%d] getNotFound %+v", 404, o.Payload)
}
func (o *GetNotFound) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.Error)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewGetDefault creates a GetDefault with default headers values
func NewGetDefault(code int) *GetDefault {
return &GetDefault{
_statusCode: code,
}
}
/*GetDefault handles this case with default header values.
Error
*/
type GetDefault struct {
_statusCode int
Payload *models.Error
}
// Code gets the status code for the get default response
func (o *GetDefault) Code() int {
return o._statusCode
}
func (o *GetDefault) Error() string {
return fmt.Sprintf("[GET /containers/{id}][%d] Get default %+v", o._statusCode, o.Payload)
}
func (o *GetDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.Error)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
} |
strfmt "github.com/go-openapi/strfmt" |
xgboost_baseline.py | __author__ = 'ZFTurbo: https://kaggle.com/zfturbo'
import datetime
import pandas as pd
import numpy as np
from sklearn.cross_validation import train_test_split
import xgboost as xgb
import random
import zipfile
import time
import shutil
from sklearn.metrics import log_loss
random.seed(2016)
def run_xgb(train, test, features, target, random_state=0):
eta = 0.1
max_depth = 3
subsample = 0.7
colsample_bytree = 0.7
start_time = time.time()
print('XGBoost params. ETA: {}, MAX_DEPTH: {}, SUBSAMPLE: {}, COLSAMPLE_BY_TREE: {}'.format(eta, max_depth, subsample, colsample_bytree))
params = {
"objective": "multi:softprob",
"num_class": 12,
"booster" : "gbtree",
"eval_metric": "mlogloss",
"eta": eta,
"max_depth": max_depth,
"subsample": subsample,
"colsample_bytree": colsample_bytree,
"silent": 1,
"seed": random_state,
}
num_boost_round = 500
early_stopping_rounds = 50
test_size = 0.3
X_train, X_valid = train_test_split(train, test_size=test_size, random_state=random_state)
print('Length train:', len(X_train.index))
print('Length valid:', len(X_valid.index))
y_train = X_train[target]
y_valid = X_valid[target]
dtrain = xgb.DMatrix(X_train[features], y_train)
dvalid = xgb.DMatrix(X_valid[features], y_valid)
watchlist = [(dtrain, 'train'), (dvalid, 'eval')]
gbm = xgb.train(params, dtrain, num_boost_round, evals=watchlist, early_stopping_rounds=early_stopping_rounds, verbose_eval=True)
print("Validating...")
check = gbm.predict(xgb.DMatrix(X_valid[features]), ntree_limit=gbm.best_iteration)
score = log_loss(y_valid.tolist(), check)
print("Predict test set...")
test_prediction = gbm.predict(xgb.DMatrix(test[features]), ntree_limit=gbm.best_iteration)
print('Training time: {} minutes'.format(round((time.time() - start_time)/60, 2)))
return test_prediction.tolist(), score
def create_submission(score, test, prediction):
# Make Submission
now = datetime.datetime.now()
sub_file = 'submission_' + str(score) + '_' + str(now.strftime("%Y-%m-%d-%H-%M")) + '.csv'
print('Writing submission: ', sub_file)
f = open(sub_file, 'w')
f.write('device_id,F23-,F24-26,F27-28,F29-32,F33-42,F43+,M22-,M23-26,M27-28,M29-31,M32-38,M39+\n')
total = 0
test_val = test['device_id'].values
for i in range(len(test_val)):
str1 = str(test_val[i])
for j in range(12):
str1 += ',' + str(prediction[i][j])
str1 += '\n'
total += 1
f.write(str1)
f.close()
def | (table, f):
labels = sorted(table[f].unique())
mappings = dict()
for i in range(len(labels)):
mappings[labels[i]] = i
table = table.replace({f: mappings})
return table
def read_train_test():
# Events
print('Read events...')
events = pd.read_csv("../input/events.csv", dtype={'device_id': np.str})
events['counts'] = events.groupby(['device_id'])['event_id'].transform('count')
events_small = events[['device_id', 'counts']].drop_duplicates('device_id', keep='first')
# Phone brand
print('Read brands...')
pbd = pd.read_csv("../input/phone_brand_device_model.csv", dtype={'device_id': np.str})
pbd.drop_duplicates('device_id', keep='first', inplace=True)
pbd = map_column(pbd, 'phone_brand')
pbd = map_column(pbd, 'device_model')
# Train
print('Read train...')
train = pd.read_csv("../input/gender_age_train.csv", dtype={'device_id': np.str})
train = map_column(train, 'group')
train = train.drop(['age'], axis=1)
train = train.drop(['gender'], axis=1)
train = pd.merge(train, pbd, how='left', on='device_id', left_index=True)
train = pd.merge(train, events_small, how='left', on='device_id', left_index=True)
train.fillna(-1, inplace=True)
# Test
print('Read test...')
test = pd.read_csv("../input/gender_age_test.csv", dtype={'device_id': np.str})
test = pd.merge(test, pbd, how='left', on='device_id', left_index=True)
test = pd.merge(test, events_small, how='left', on='device_id', left_index=True)
test.fillna(-1, inplace=True)
# Features
features = list(test.columns.values)
features.remove('device_id')
return train, test, features
train, test, features = read_train_test()
print('Length of train: ', len(train))
print('Length of test: ', len(test))
print('Features [{}]: {}'.format(len(features), sorted(features)))
test_prediction, score = run_xgb(train, test, features, 'group')
print("LS: {}".format(round(score, 5)))
create_submission(score, test, test_prediction) | map_column |
theme.rs | use ansi_term::{Style, Color};
pub struct PromptTheme {
pub prompt: Style,
pub prompt_continue: Style,
pub path: Style,
pub path_debug: Style,
pub path_basename: Style,
pub path_nowrite: Style,
pub path_nowrite_basename: Style,
pub repo: Style,
pub repo_work_tree: Style,
pub repo_dirty: Style,
pub repo_staged: Style,
pub hostname: Style,
pub username: Style,
}
impl PromptTheme {
pub fn arrow(style: Style, connected: Option<Style>) -> Style {
let mut res = if let Some(background) = style.background {
background.normal()
} else {
Style::default()
};
if let Some(connected) = connected {
if let Some(background) = connected.background {
res = res.on(background);
}
}
res
}
pub fn path(&self, readonly: bool) -> Style {
if readonly {
self.path_nowrite
} else {
self.path
}
}
pub fn basename(&self, readonly: bool) -> Style {
if readonly {
self.path_nowrite_basename
} else {
self.path_basename
}
}
}
pub fn basic_theme() -> PromptTheme |
pub fn nord_theme() -> PromptTheme {
let base00 = Color::RGB(0x2E, 0x34, 0x40);
let base02 = Color::RGB(0x43, 0x4C, 0x5E);
let base05 = Color::RGB(0xE5, 0xE9, 0xF0);
let base08 = Color::RGB(0x88, 0xC0, 0xD0);
let base0d = Color::RGB(0xEB, 0xCB, 0x8B);
PromptTheme {
prompt: Color::Fixed(10).on(Color::Black).bold(),
prompt_continue: Color::Fixed(13).on(Color::Black).bold(),
path: base05.on(base02),
path_debug: Color::Fixed(83).normal(),
path_basename: Color::RGB(0xEC, 0xEF, 0xF4).on(base02).bold(),
path_nowrite: base08.on(base02),
path_nowrite_basename: base08.on(base02.clone()).bold(),
repo: base00.on(Color::RGB(0xA3, 0xBE, 0x8C)),
repo_work_tree: base00.on(base02.clone()).bold(),
repo_dirty: base00.on(Color::RGB(0xBF, 0x61, 0x6A)),
repo_staged: base00.on(base0d.clone()),
hostname: Color::RGB(0x25, 0x5e, 0x87).on(Color::RGB(0xcc, 0xcc, 0xcc)),
username: Color::RGB(0x25, 0x5e, 0x87).on(Color::RGB(0xcc, 0xcc, 0xcc)).bold(),
}
}
pub struct SyntaxTheme {
pub normal: Style,
pub command: Style,
pub error: Style,
pub quoted: Style,
pub valid_path: Style,
pub argument: Style,
}
pub fn default_syntax_theme() -> SyntaxTheme {
SyntaxTheme {
normal: Style::new(),
command: Color::RGB(0, 0x5f, 0xd7).normal(),
error: Color::Red.normal(),
quoted: Color::RGB(0x99, 0x99, 0x00).normal(),
valid_path: Style::new().underline(),
argument: Color::RGB(0x00, 0xaf, 0xff).normal(),
}
}
| {
let base0d = Color::RGB(0xEB, 0xCB, 0x8B);
PromptTheme {
prompt: Color::Purple.bold(),
prompt_continue: Color::Fixed(13).bold(),
path: Color::Blue.normal(),
path_debug: Color::Fixed(83).normal(),
path_basename: Color::Blue.bold(),
path_nowrite: Color::Yellow.normal(),
path_nowrite_basename: Color::Yellow.bold(),
repo: Color::Green.normal(),
repo_work_tree: Color::Fixed(15).normal(),
repo_dirty: Color::Red.normal(),
repo_staged: Color::Yellow.normal(),
hostname: base0d.clone().normal(),
username: base0d.clone().bold(),
}
} |
SegDataGenerator.py | from keras.preprocessing.image import *
from keras.applications.imagenet_utils import preprocess_input
from keras import backend as K
from PIL import Image
import numpy as np
import os
#import cv2
def center_crop(x, center_crop_size, data_format, **kwargs):
if data_format == 'channels_first':
centerh, centerw = x.shape[1] // 2, x.shape[2] // 2
elif data_format == 'channels_last':
centerh, centerw = x.shape[0] // 2, x.shape[1] // 2
lh, lw = center_crop_size[0] // 2, center_crop_size[1] // 2
rh, rw = center_crop_size[0] - lh, center_crop_size[1] - lw
h_start, h_end = centerh - lh, centerh + rh
w_start, w_end = centerw - lw, centerw + rw
if data_format == 'channels_first':
return x[:, h_start:h_end, w_start:w_end]
elif data_format == 'channels_last':
return x[h_start:h_end, w_start:w_end, :]
def pair_center_crop(x, y, center_crop_size, data_format, **kwargs):
if data_format == 'channels_first':
centerh, centerw = x.shape[1] // 2, x.shape[2] // 2
elif data_format == 'channels_last':
centerh, centerw = x.shape[0] // 2, x.shape[1] // 2
lh, lw = center_crop_size[0] // 2, center_crop_size[1] // 2
rh, rw = center_crop_size[0] - lh, center_crop_size[1] - lw
h_start, h_end = centerh - lh, centerh + rh
w_start, w_end = centerw - lw, centerw + rw
if data_format == 'channels_first':
return x[:, h_start:h_end, w_start:w_end], \
y[:, h_start:h_end, w_start:w_end]
elif data_format == 'channels_last':
return x[h_start:h_end, w_start:w_end, :], \
y[h_start:h_end, w_start:w_end, :]
def | (x, random_crop_size, data_format, sync_seed=None, **kwargs):
np.random.seed(sync_seed)
if data_format == 'channels_first':
h, w = x.shape[1], x.shape[2]
elif data_format == 'channels_last':
h, w = x.shape[0], x.shape[1]
rangeh = (h - random_crop_size[0]) // 2
rangew = (w - random_crop_size[1]) // 2
offseth = 0 if rangeh == 0 else np.random.randint(rangeh)
offsetw = 0 if rangew == 0 else np.random.randint(rangew)
h_start, h_end = offseth, offseth + random_crop_size[0]
w_start, w_end = offsetw, offsetw + random_crop_size[1]
if data_format == 'channels_first':
return x[:, h_start:h_end, w_start:w_end]
elif data_format == 'channels_last':
return x[h_start:h_end, w_start:w_end, :]
def pair_random_crop(x, y, random_crop_size, data_format, sync_seed=None, **kwargs):
np.random.seed(sync_seed)
if data_format == 'channels_first':
h, w = x.shape[1], x.shape[2]
elif data_format == 'channels_last':
h, w = x.shape[0], x.shape[1]
rangeh = (h - random_crop_size[0]) // 2
rangew = (w - random_crop_size[1]) // 2
offseth = 0 if rangeh == 0 else np.random.randint(rangeh)
offsetw = 0 if rangew == 0 else np.random.randint(rangew)
h_start, h_end = offseth, offseth + random_crop_size[0]
w_start, w_end = offsetw, offsetw + random_crop_size[1]
if data_format == 'channels_first':
return x[:, h_start:h_end, w_start:w_end], y[:, h_start:h_end, h_start:h_end]
elif data_format == 'channels_last':
return x[h_start:h_end, w_start:w_end, :], y[h_start:h_end, w_start:w_end, :]
class SegDirectoryIterator(Iterator):
'''
Users need to ensure that all files exist.
Label images should be png images where pixel values represents class number.
find images -name *.jpg > images.txt
find labels -name *.png > labels.txt
for a file name 2011_002920.jpg, each row should contain 2011_002920
file_path: location of train.txt, or val.txt in PASCAL VOC2012 format,
listing image file path components without extension
data_dir: location of image files referred to by file in file_path
label_dir: location of label files
data_suffix: image file extension, such as `.jpg` or `.png`
label_suffix: label file suffix, such as `.png`, or `.npy`
loss_shape: shape to use when applying loss function to the label data
'''
def __init__(self, file_path, seg_data_generator,
data_dir, data_suffix,
label_dir, label_suffix, classes, ignore_label=255,
crop_mode='none', label_cval=255, pad_size=None,
target_size=None, color_mode='rgb',
data_format='default', class_mode='sparse',
batch_size=1, shuffle=True, seed=None,
save_to_dir=None, save_prefix='', save_format='jpeg',
loss_shape=None):
if data_format == 'default':
data_format = K.image_data_format()
self.file_path = file_path
self.data_dir = data_dir
self.data_suffix = data_suffix
self.label_suffix = label_suffix
self.label_dir = label_dir
self.classes = classes
self.seg_data_generator = seg_data_generator
self.target_size = tuple(target_size)
self.ignore_label = ignore_label
self.crop_mode = crop_mode
self.label_cval = label_cval
self.pad_size = pad_size
if color_mode not in {'rgb', 'grayscale'}:
raise ValueError('Invalid color mode:', color_mode,
'; expected "rgb" or "grayscale".')
self.color_mode = color_mode
self.data_format = data_format
self.nb_label_ch = 1
self.loss_shape = loss_shape
if (self.label_suffix == '.npy') or (self.label_suffix == 'npy'):
self.label_file_format = 'npy'
else:
self.label_file_format = 'img'
if target_size:
if self.color_mode == 'rgb':
if self.data_format == 'channels_last':
self.image_shape = self.target_size + (3,)
else:
self.image_shape = (3,) + self.target_size
else:
if self.data_format == 'channels_last':
self.image_shape = self.target_size + (1,)
else:
self.image_shape = (1,) + self.target_size
if self.data_format == 'channels_last':
self.label_shape = self.target_size + (self.nb_label_ch,)
else:
self.label_shape = (self.nb_label_ch,) + self.target_size
elif batch_size != 1:
raise ValueError(
'Batch size must be 1 when target image size is undetermined')
else:
self.image_shape = None
self.label_shape = None
if class_mode not in {'sparse', None}:
raise ValueError('Invalid class_mode:', class_mode,
'; expected one of '
'"sparse", or None.')
self.class_mode = class_mode
if save_to_dir:
self.palette = None
self.save_to_dir = save_to_dir
self.save_prefix = save_prefix
self.save_format = save_format
white_list_formats = {'png', 'jpg', 'jpeg', 'bmp', 'npy'}
# build lists for data files and label files
self.data_files = []
self.label_files = []
fp = open(file_path)
lines = fp.readlines()
fp.close()
self.nb_sample = len(lines)
for line in lines:
line = line.strip('\n')
self.data_files.append(line + data_suffix)
self.label_files.append(line + label_suffix)
super(SegDirectoryIterator, self).__init__(
self.nb_sample, batch_size, shuffle, seed)
def next(self):
with self.lock:
index_array, current_index, current_batch_size = next(
self.index_generator)
# The transformation of images is not under thread lock so it can be
# done in parallel
if self.target_size:
# TODO(ahundt) make dtype properly configurable
batch_x = np.zeros((current_batch_size,) + self.image_shape)
if self.loss_shape is None and self.label_file_format is 'img':
batch_y = np.zeros((current_batch_size,) + self.label_shape,
dtype=int)
elif self.loss_shape is None:
batch_y = np.zeros((current_batch_size,) + self.label_shape)
else:
batch_y = np.zeros((current_batch_size,) + self.loss_shape,
dtype=np.uint8)
grayscale = self.color_mode == 'grayscale'
# build batch of image data and labels
for i, j in enumerate(index_array):
data_file = self.data_files[j]
label_file = self.label_files[j]
img_file_format = 'img'
img = load_img(os.path.join(self.data_dir, data_file),
grayscale=grayscale, target_size=None)
label_filepath = os.path.join(self.label_dir, label_file)
if self.label_file_format == 'npy':
y = np.load(label_filepath)
else:
label = Image.open(label_filepath)
if self.save_to_dir and self.palette is None:
self.palette = label.palette
# do padding
if self.target_size:
if self.crop_mode != 'none':
x = img_to_array(img, data_format=self.data_format)
if self.label_file_format is not 'npy':
y = img_to_array(
label, data_format=self.data_format).astype(int)
img_w, img_h = img.size
if self.pad_size:
pad_w = max(self.pad_size[1] - img_w, 0)
pad_h = max(self.pad_size[0] - img_h, 0)
else:
pad_w = max(self.target_size[1] - img_w, 0)
pad_h = max(self.target_size[0] - img_h, 0)
if self.data_format == 'channels_first':
x = np.lib.pad(x, ((0, 0), (pad_h / 2, pad_h - pad_h / 2), (pad_w / 2, pad_w - pad_w / 2)), 'constant', constant_values=0.)
y = np.lib.pad(y, ((0, 0), (pad_h / 2, pad_h - pad_h / 2), (pad_w / 2, pad_w - pad_w / 2)),
'constant', constant_values=self.label_cval)
elif self.data_format == 'channels_last':
x = np.lib.pad(x, ((pad_h / 2, pad_h - pad_h / 2), (pad_w / 2, pad_w - pad_w / 2), (0, 0)), 'constant', constant_values=0.)
y = np.lib.pad(y, ((pad_h / 2, pad_h - pad_h / 2), (pad_w / 2, pad_w - pad_w / 2), (0, 0)), 'constant', constant_values=self.label_cval)
else:
x = img_to_array(img.resize((self.target_size[1], self.target_size[0]),
Image.BILINEAR),
data_format=self.data_format)
if self.label_file_format is not 'npy':
y = img_to_array(label.resize((self.target_size[1], self.target_size[
0]), Image.NEAREST), data_format=self.data_format).astype(int)
else:
print('ERROR: resize not implemented for label npy file')
if self.target_size is None:
batch_x = np.zeros((current_batch_size,) + x.shape)
if self.loss_shape is not None:
batch_y = np.zeros((current_batch_size,) + self.loss_shape)
else:
batch_y = np.zeros((current_batch_size,) + y.shape)
x, y = self.seg_data_generator.random_transform(x, y)
x = self.seg_data_generator.standardize(x)
if self.ignore_label:
y[np.where(y == self.ignore_label)] = self.classes
if self.loss_shape is not None:
y = np.reshape(y, self.loss_shape)
batch_x[i] = x
batch_y[i] = y
# optionally save augmented images to disk for debugging purposes
if self.save_to_dir:
for i in range(current_batch_size):
img = array_to_img(batch_x[i], self.data_format, scale=True)
label = batch_y[i][:, :, 0].astype('uint8')
label[np.where(label == self.classes)] = self.ignore_label
label = Image.fromarray(label, mode='P')
label.palette = self.palette
fname = '{prefix}_{index}_{hash}'.format(prefix=self.save_prefix,
index=current_index + i,
hash=np.random.randint(1e4))
img.save(os.path.join(self.save_to_dir, 'img_' +
fname + '.{format}'.format(format=self.save_format)))
label.save(os.path.join(self.save_to_dir,
'label_' + fname + '.png'))
# return
batch_x = preprocess_input(batch_x)
if self.class_mode == 'sparse':
return batch_x, batch_y
else:
return batch_x
class SegDataGenerator(object):
def __init__(self,
featurewise_center=False,
samplewise_center=False,
featurewise_std_normalization=False,
samplewise_std_normalization=False,
channelwise_center=False,
rotation_range=0.,
width_shift_range=0.,
height_shift_range=0.,
shear_range=0.,
zoom_range=0.,
zoom_maintain_shape=True,
channel_shift_range=0.,
fill_mode='constant',
cval=0.,
label_cval=255,
crop_mode='none',
crop_size=(0, 0),
pad_size=None,
horizontal_flip=False,
vertical_flip=False,
rescale=None,
data_format='default'):
if data_format == 'default':
data_format = K.image_data_format()
self.__dict__.update(locals())
self.mean = None
self.ch_mean = None
self.std = None
self.principal_components = None
self.rescale = rescale
if data_format not in {'channels_last', 'channels_first'}:
raise Exception('data_format should be channels_last (channel after row and '
'column) or channels_first (channel before row and column). '
'Received arg: ', data_format)
if crop_mode not in {'none', 'random', 'center'}:
raise Exception('crop_mode should be "none" or "random" or "center" '
'Received arg: ', crop_mode)
self.data_format = data_format
if data_format == 'channels_first':
self.channel_index = 1
self.row_index = 2
self.col_index = 3
if data_format == 'channels_last':
self.channel_index = 3
self.row_index = 1
self.col_index = 2
if np.isscalar(zoom_range):
self.zoom_range = [1 - zoom_range, 1 + zoom_range]
elif len(zoom_range) == 2:
self.zoom_range = [zoom_range[0], zoom_range[1]]
else:
raise Exception('zoom_range should be a float or '
'a tuple or list of two floats. '
'Received arg: ', zoom_range)
def flow_from_directory(self, file_path, data_dir, data_suffix,
label_dir, label_suffix, classes,
ignore_label=255,
target_size=None, color_mode='rgb',
class_mode='sparse',
batch_size=32, shuffle=True, seed=None,
save_to_dir=None, save_prefix='', save_format='jpeg',
loss_shape=None):
if self.crop_mode == 'random' or self.crop_mode == 'center':
target_size = self.crop_size
return SegDirectoryIterator(
file_path, self,
data_dir=data_dir, data_suffix=data_suffix,
label_dir=label_dir, label_suffix=label_suffix,
classes=classes, ignore_label=ignore_label,
crop_mode=self.crop_mode, label_cval=self.label_cval,
pad_size=self.pad_size,
target_size=target_size, color_mode=color_mode,
data_format=self.data_format, class_mode=class_mode,
batch_size=batch_size, shuffle=shuffle, seed=seed,
save_to_dir=save_to_dir, save_prefix=save_prefix,
save_format=save_format,
loss_shape=loss_shape)
def standardize(self, x):
if self.rescale:
x *= self.rescale
# x is a single image, so it doesn't have image number at index 0
img_channel_index = self.channel_index - 1
if self.samplewise_center:
x -= np.mean(x, axis=img_channel_index, keepdims=True)
if self.samplewise_std_normalization:
x /= (np.std(x, axis=img_channel_index, keepdims=True) + 1e-7)
if self.featurewise_center:
x -= self.mean
if self.featurewise_std_normalization:
x /= (self.std + 1e-7)
if self.channelwise_center:
x -= self.ch_mean
return x
def random_transform(self, x, y):
# x is a single image, so it doesn't have image number at index 0
img_row_index = self.row_index - 1
img_col_index = self.col_index - 1
img_channel_index = self.channel_index - 1
if self.crop_mode == 'none':
crop_size = (x.shape[img_row_index], x.shape[img_col_index])
else:
crop_size = self.crop_size
assert x.shape[img_row_index] == y.shape[img_row_index] and x.shape[img_col_index] == y.shape[
img_col_index], 'DATA ERROR: Different shape of data and label!\ndata shape: %s, label shape: %s' % (str(x.shape), str(y.shape))
# use composition of homographies to generate final transform that
# needs to be applied
if self.rotation_range:
theta = np.pi / 180 * \
np.random.uniform(-self.rotation_range, self.rotation_range)
else:
theta = 0
rotation_matrix = np.array([[np.cos(theta), -np.sin(theta), 0],
[np.sin(theta), np.cos(theta), 0],
[0, 0, 1]])
if self.height_shift_range:
# * x.shape[img_row_index]
tx = np.random.uniform(-self.height_shift_range,
self.height_shift_range) * crop_size[0]
else:
tx = 0
if self.width_shift_range:
# * x.shape[img_col_index]
ty = np.random.uniform(-self.width_shift_range,
self.width_shift_range) * crop_size[1]
else:
ty = 0
translation_matrix = np.array([[1, 0, tx],
[0, 1, ty],
[0, 0, 1]])
if self.shear_range:
shear = np.random.uniform(-self.shear_range, self.shear_range)
else:
shear = 0
shear_matrix = np.array([[1, -np.sin(shear), 0],
[0, np.cos(shear), 0],
[0, 0, 1]])
if self.zoom_range[0] == 1 and self.zoom_range[1] == 1:
zx, zy = 1, 1
else:
zx, zy = np.random.uniform(
self.zoom_range[0], self.zoom_range[1], 2)
if self.zoom_maintain_shape:
zy = zx
zoom_matrix = np.array([[zx, 0, 0],
[0, zy, 0],
[0, 0, 1]])
transform_matrix = np.dot(
np.dot(np.dot(rotation_matrix, translation_matrix), shear_matrix), zoom_matrix)
h, w = x.shape[img_row_index], x.shape[img_col_index]
transform_matrix = transform_matrix_offset_center(
transform_matrix, h, w)
x = apply_transform(x, transform_matrix, img_channel_index,
fill_mode=self.fill_mode, cval=self.cval)
y = apply_transform(y, transform_matrix, img_channel_index,
fill_mode='constant', cval=self.label_cval)
if self.channel_shift_range != 0:
x = random_channel_shift(
x, self.channel_shift_range, img_channel_index)
if self.horizontal_flip:
if np.random.random() < 0.5:
x = flip_axis(x, img_col_index)
y = flip_axis(y, img_col_index)
if self.vertical_flip:
if np.random.random() < 0.5:
x = flip_axis(x, img_row_index)
y = flip_axis(y, img_row_index)
if self.crop_mode == 'center':
x, y = pair_center_crop(x, y, self.crop_size, self.data_format)
elif self.crop_mode == 'random':
x, y = pair_random_crop(x, y, self.crop_size, self.data_format)
# TODO:
# channel-wise normalization
# barrel/fisheye
return x, y
def fit(self, X,
augment=False,
rounds=1,
seed=None):
'''Required for featurewise_center and featurewise_std_normalization
# Arguments
X: Numpy array, the data to fit on.
augment: whether to fit on randomly augmented samples
rounds: if `augment`,
how many augmentation passes to do over the data
seed: random seed.
'''
X = np.copy(X)
if augment:
aX = np.zeros(tuple([rounds * X.shape[0]] + list(X.shape)[1:]))
for r in range(rounds):
for i in range(X.shape[0]):
aX[i + r * X.shape[0]] = self.random_transform(X[i])
X = aX
if self.featurewise_center:
self.mean = np.mean(X, axis=0)
X -= self.mean
if self.featurewise_std_normalization:
self.std = np.std(X, axis=0)
X /= (self.std + 1e-7)
def set_ch_mean(self, ch_mean):
self.ch_mean = ch_mean
| random_crop |
_lang.lang.ts | export default {
test: '0x00中文',
langen: '英文',
langcn: '中文',
'langen-US': '英文',
'langzh-CN': '中文',
//
id: 'ID',
category: '分类',
user: '用户',
role: '角色',
email: 'Email',
name: '名字',
title: '标题',
parent: '父',
slug: '标识',
password: '密码',
login: '登录',
register: '注册',
status: '状态',
created_at: '创建时间',
updated_at: '更新时间',
action: '操作',
description: '描述',
link: '链接',
sort: '排序',
upload: '上传',
attachment: '附件',
empty: '暂无',
list: '列表',
mobile: '手机',
desktop: '电脑',
banner: '题图',
cover: '封面',
ax: '广告',
ad: '广告',
tag: '标签',
icon: '图标',
type: '类型',
value: '值',
options: '可选项',
tips: '提示',
// | submit: '提交',
//
select: '选择',
selected: '选中',
selectAll: '全选',
checkAll: '全选',
//
type_input: '文本',
type_textarea: '多行文本',
type_radio: '单选',
type_checkbox: '多选',
//
uploadSuccessfully: '上传成功',
uploadError: '上传出错',
readSuccessfully: '读取成功',
createdSuccessfully: '创建成功',
updatedSuccessfully: '更新成功',
deletedSuccessfully: '删除成功',
}; | create: '创建',
edit: '编辑',
update: '更新',
delete: '删除', |
column-search.min.js | /******/ (() => { // webpackBootstrap
/******/ "use strict";
var __webpack_exports__ = {};
/*!*****************************************************************************!*\
!*** ../demo1/src/js/pages/crud/datatables/search-options/column-search.js ***!
\*****************************************************************************/
var KTDatatablesSearchOptionsColumnSearch = function() {
$.fn.dataTable.Api.register('column().title()', function() {
return $(this.header()).text().trim();
});
var initTable1 = function() {
// begin first table
var table = $('#kt_datatable').DataTable({
responsive: true,
// Pagination settings
dom: `<'row'<'col-sm-12'tr>>
<'row'<'col-sm-12 col-md-5'i><'col-sm-12 col-md-7 dataTables_pager'lp>>`,
// read more: https://datatables.net/examples/basic_init/dom.html
lengthMenu: [5, 10, 25, 50],
| pageLength: 10,
language: {
'lengthMenu': 'Display _MENU_',
},
searchDelay: 500,
processing: true,
serverSide: true,
ajax: {
url: HOST_URL + '/api/datatables/demos/server.php',
type: 'POST',
data: {
// parameters for custom backend script demo
columnsDef: [
'RecordID', 'OrderID', 'Country', 'ShipCity', 'CompanyAgent',
'ShipDate', 'Status', 'Type', 'Actions',
],
},
},
columns: [{
data: 'RecordID'
},
{
data: 'OrderID'
},
{
data: 'Country'
},
{
data: 'ShipCity'
},
{
data: 'CompanyAgent'
},
{
data: 'ShipDate'
},
{
data: 'Status'
},
{
data: 'Type'
},
{
data: 'Actions',
responsivePriority: -1
},
],
initComplete: function() {
var thisTable = this;
var rowFilter = $('<tr class="filter"></tr>').appendTo($(table.table().header()));
this.api().columns().every(function() {
var column = this;
var input;
switch (column.title()) {
case 'Record ID':
case 'Order ID':
case 'Ship City':
case 'Company Agent':
input = $(`<input type="text" class="form-control form-control-sm form-filter datatable-input" data-col-index="` + column.index() + `"/>`);
break;
case 'Country':
input = $(`<select class="form-control form-control-sm form-filter datatable-input" title="Select" data-col-index="` + column.index() + `">
<option value="">Select</option></select>`);
column.data().unique().sort().each(function(d, j) {
$(input).append('<option value="' + d + '">' + d + '</option>');
});
break;
case 'Status':
var status = {
1: {
'title': 'Pending',
'class': 'label-light-primary'
},
2: {
'title': 'Delivered',
'class': ' label-light-danger'
},
3: {
'title': 'Canceled',
'class': ' label-light-primary'
},
4: {
'title': 'Success',
'class': ' label-light-success'
},
5: {
'title': 'Info',
'class': ' label-light-info'
},
6: {
'title': 'Danger',
'class': ' label-light-danger'
},
7: {
'title': 'Warning',
'class': ' label-light-warning'
},
};
input = $(`<select class="form-control form-control-sm form-filter datatable-input" title="Select" data-col-index="` + column.index() + `">
<option value="">Select</option></select>`);
column.data().unique().sort().each(function(d, j) {
$(input).append('<option value="' + d + '">' + status[d].title + '</option>');
});
break;
case 'Type':
var status = {
1: {
'title': 'Online',
'state': 'danger'
},
2: {
'title': 'Retail',
'state': 'primary'
},
3: {
'title': 'Direct',
'state': 'success'
},
};
input = $(`<select class="form-control form-control-sm form-filter datatable-input" title="Select" data-col-index="` + column.index() + `">
<option value="">Select</option></select>`);
column.data().unique().sort().each(function(d, j) {
$(input).append('<option value="' + d + '">' + status[d].title + '</option>');
});
break;
case 'Ship Date':
input = $(`
<div class="input-group date">
<input type="text" class="form-control form-control-sm datatable-input" readonly placeholder="From" id="kt_datepicker_1"
data-col-index="` + column.index() + `"/>
<div class="input-group-append">
<span class="input-group-text"><i class="la la-calendar-o glyphicon-th"></i></span>
</div>
</div>
<div class="input-group date d-flex align-content-center">
<input type="text" class="form-control form-control-sm datatable-input" readonly placeholder="To" id="kt_datepicker_2"
data-col-index="` + column.index() + `"/>
<div class="input-group-append">
<span class="input-group-text"><i class="la la-calendar-o glyphicon-th"></i></span>
</div>
</div>`);
break;
case 'Actions':
var search = $(`
<button class="btn btn-primary kt-btn btn-sm kt-btn--icon d-block">
<span>
<i class="la la-search"></i>
<span>Search</span>
</span>
</button>`);
var reset = $(`
<button class="btn btn-secondary kt-btn btn-sm kt-btn--icon">
<span>
<i class="la la-close"></i>
<span>Reset</span>
</span>
</button>`);
$('<th>').append(search).append(reset).appendTo(rowFilter);
$(search).on('click', function(e) {
e.preventDefault();
var params = {};
$(rowFilter).find('.datatable-input').each(function() {
var i = $(this).data('col-index');
if (params[i]) {
params[i] += '|' + $(this).val();
} else {
params[i] = $(this).val();
}
});
$.each(params, function(i, val) {
// apply search params to datatable
table.column(i).search(val ? val : '', false, false);
});
table.table().draw();
});
$(reset).on('click', function(e) {
e.preventDefault();
$(rowFilter).find('.datatable-input').each(function(i) {
$(this).val('');
table.column($(this).data('col-index')).search('', false, false);
});
table.table().draw();
});
break;
}
if (column.title() !== 'Actions') {
$(input).appendTo($('<th>').appendTo(rowFilter));
}
});
// hide search column for responsive table
var hideSearchColumnResponsive = function() {
thisTable.api().columns().every(function() {
var column = this
if (column.responsiveHidden()) {
$(rowFilter).find('th').eq(column.index()).show();
} else {
$(rowFilter).find('th').eq(column.index()).hide();
}
})
};
// init on datatable load
hideSearchColumnResponsive();
// recheck on window resize
window.onresize = hideSearchColumnResponsive;
$('#kt_datepicker_1,#kt_datepicker_2').datepicker();
},
columnDefs: [{
targets: -1,
title: 'Actions',
orderable: false,
render: function(data, type, full, meta) {
return '\
<div class="dropdown dropdown-inline">\
<a href="javascript:;" class="btn btn-sm btn-clean btn-icon" data-toggle="dropdown">\
<i class="la la-cog"></i>\
</a>\
<div class="dropdown-menu dropdown-menu-sm dropdown-menu-right">\
<ul class="nav nav-hoverable flex-column">\
<li class="nav-item"><a class="nav-link" href="#"><i class="nav-icon la la-edit"></i><span class="nav-text">Edit Details</span></a></li>\
<li class="nav-item"><a class="nav-link" href="#"><i class="nav-icon la la-leaf"></i><span class="nav-text">Update Status</span></a></li>\
<li class="nav-item"><a class="nav-link" href="#"><i class="nav-icon la la-print"></i><span class="nav-text">Print</span></a></li>\
</ul>\
</div>\
</div>\
<a href="javascript:;" class="btn btn-sm btn-clean btn-icon" title="Edit details">\
<i class="la la-edit"></i>\
</a>\
<a href="javascript:;" class="btn btn-sm btn-clean btn-icon" title="Delete">\
<i class="la la-trash"></i>\
</a>\
';
},
},
{
targets: 5,
width: '150px',
},
{
targets: 6,
width: '100px',
render: function(data, type, full, meta) {
var status = {
1: {
'title': 'Pending',
'class': 'label-light-primary'
},
2: {
'title': 'Delivered',
'class': ' label-light-danger'
},
3: {
'title': 'Canceled',
'class': ' label-light-primary'
},
4: {
'title': 'Success',
'class': ' label-light-success'
},
5: {
'title': 'Info',
'class': ' label-light-info'
},
6: {
'title': 'Danger',
'class': ' label-light-danger'
},
7: {
'title': 'Warning',
'class': ' label-light-warning'
},
};
if (typeof status[data] === 'undefined') {
return data;
}
return '<span class="label label-lg font-weight-bold' + status[data].class + ' label-inline">' + status[data].title + '</span>';
},
},
{
targets: 7,
width: '100px',
render: function(data, type, full, meta) {
var status = {
1: {
'title': 'Online',
'state': 'danger'
},
2: {
'title': 'Retail',
'state': 'primary'
},
3: {
'title': 'Direct',
'state': 'success'
},
};
if (typeof status[data] === 'undefined') {
return data;
}
return '<span class="label label-' + status[data].state + ' label-dot mr-2"></span>' +
'<span class="font-weight-bold text-' + status[data].state + '">' + status[data].title + '</span>';
},
},
],
});
};
return {
//main function to initiate the module
init: function() {
initTable1();
},
};
}();
jQuery(document).ready(function() {
KTDatatablesSearchOptionsColumnSearch.init();
});
/******/ })()
;
//# sourceMappingURL=column-search.js.map | |
models.py | from absl import flags
from absl.flags import FLAGS
import numpy as np
import tensorflow as tf
from tensorflow.keras import Model
from tensorflow.keras.layers import (
Add,
Concatenate,
Conv2D,
Input,
Lambda,
LeakyReLU,
MaxPool2D,
UpSampling2D,
ZeroPadding2D,
)
from tensorflow.keras.regularizers import l2
from tensorflow.keras.losses import (
binary_crossentropy,
sparse_categorical_crossentropy
)
from .batch_norm import BatchNormalization
from .utils import broadcast_iou
yolo_max_boxes = 100
yolo_iou_threshold = 0.1
yolo_score_threshold = 0.1
# customize your model through the following parameters
flags.DEFINE_integer('yolo_max_boxes', 100, 'maximum number of detections at one time')
flags.DEFINE_float('yolo_iou_threshold', 0.5, 'iou threshold')
flags.DEFINE_float('yolo_score_threshold', 0.5, 'score threshold')
yolo_anchors = np.array([(10, 13), (16, 30), (33, 23), (30, 61), (62, 45),
(59, 119), (116, 90), (156, 198), (373, 326)],
np.float32) / 416
yolo_anchor_masks = np.array([[6, 7, 8], [3, 4, 5], [0, 1, 2]])
yolo_tiny_anchors = np.array([(10, 14), (23, 27), (37, 58),
(81, 82), (135, 169), (344, 319)],
np.float32) / 416
yolo_tiny_anchor_masks = np.array([[3, 4, 5], [0, 1, 2]])
def DarknetConv(x, filters, size, strides=1, batch_norm=True):
if strides == 1:
padding = 'same'
else:
x = ZeroPadding2D(((1, 0), (1, 0)))(x) # top left half-padding
padding = 'valid'
x = Conv2D(filters=filters, kernel_size=size,
strides=strides, padding=padding,
use_bias=not batch_norm, kernel_regularizer=l2(0.0005))(x)
if batch_norm:
x = BatchNormalization()(x)
x = LeakyReLU(alpha=0.1)(x)
return x
def DarknetResidual(x, filters):
prev = x
x = DarknetConv(x, filters // 2, 1)
x = DarknetConv(x, filters, 3)
x = Add()([prev, x])
return x
def DarknetBlock(x, filters, blocks):
x = DarknetConv(x, filters, 3, strides=2)
for _ in range(blocks):
x = DarknetResidual(x, filters)
return x
def Darknet(name=None):
x = inputs = Input([None, None, 3])
x = DarknetConv(x, 32, 3)
x = DarknetBlock(x, 64, 1)
x = DarknetBlock(x, 128, 2) # skip connection
x = x_36 = DarknetBlock(x, 256, 8) # skip connection
x = x_61 = DarknetBlock(x, 512, 8)
x = DarknetBlock(x, 1024, 4)
return tf.keras.Model(inputs, (x_36, x_61, x), name=name)
def DarknetTiny(name=None):
x = inputs = Input([None, None, 3])
x = DarknetConv(x, 16, 3)
x = MaxPool2D(2, 2, 'same')(x)
x = DarknetConv(x, 32, 3)
x = MaxPool2D(2, 2, 'same')(x)
x = DarknetConv(x, 64, 3)
x = MaxPool2D(2, 2, 'same')(x)
x = DarknetConv(x, 128, 3)
x = MaxPool2D(2, 2, 'same')(x)
x = x_8 = DarknetConv(x, 256, 3) # skip connection
x = MaxPool2D(2, 2, 'same')(x)
x = DarknetConv(x, 512, 3)
x = MaxPool2D(2, 1, 'same')(x)
x = DarknetConv(x, 1024, 3)
return tf.keras.Model(inputs, (x_8, x), name=name)
def YoloConv(filters, name=None):
def yolo_conv(x_in):
if isinstance(x_in, tuple):
inputs = Input(x_in[0].shape[1:]), Input(x_in[1].shape[1:])
x, x_skip = inputs
# concat with skip connection
x = DarknetConv(x, filters, 1)
x = UpSampling2D(2)(x)
x = Concatenate()([x, x_skip])
else:
x = inputs = Input(x_in.shape[1:])
x = DarknetConv(x, filters, 1)
x = DarknetConv(x, filters * 2, 3)
x = DarknetConv(x, filters, 1)
x = DarknetConv(x, filters * 2, 3)
x = DarknetConv(x, filters, 1)
return Model(inputs, x, name=name)(x_in)
return yolo_conv
def YoloConvTiny(filters, name=None):
def yolo_conv(x_in):
if isinstance(x_in, tuple):
inputs = Input(x_in[0].shape[1:]), Input(x_in[1].shape[1:])
x, x_skip = inputs
# concat with skip connection
x = DarknetConv(x, filters, 1)
x = UpSampling2D(2)(x)
x = Concatenate()([x, x_skip])
else:
x = inputs = Input(x_in.shape[1:])
x = DarknetConv(x, filters, 1)
return Model(inputs, x, name=name)(x_in)
return yolo_conv
def YoloOutput(filters, anchors, classes, name=None):
def yolo_output(x_in):
x = inputs = Input(x_in.shape[1:])
x = DarknetConv(x, filters * 2, 3)
x = DarknetConv(x, anchors * (classes + 5), 1, batch_norm=False)
x = Lambda(lambda x: tf.reshape(x, (-1, tf.shape(x)[1], tf.shape(x)[2],
anchors, classes + 5)))(x)
return tf.keras.Model(inputs, x, name=name)(x_in)
return yolo_output
def yolo_boxes(pred, anchors, classes):
# pred: (batch_size, grid, grid, anchors, (x, y, w, h, obj, ...classes))
grid_size = tf.shape(pred)[1]
box_xy, box_wh, objectness, class_probs = tf.split(
pred, (2, 2, 1, classes), axis=-1)
box_xy = tf.sigmoid(box_xy)
objectness = tf.sigmoid(objectness)
class_probs = tf.sigmoid(class_probs)
pred_box = tf.concat((box_xy, box_wh), axis=-1) # original xywh for loss
# !!! grid[x][y] == (y, x)
grid = tf.meshgrid(tf.range(grid_size), tf.range(grid_size))
grid = tf.expand_dims(tf.stack(grid, axis=-1), axis=2) # [gx, gy, 1, 2]
box_xy = (box_xy + tf.cast(grid, tf.float32)) / \
tf.cast(grid_size, tf.float32)
box_wh = tf.exp(box_wh) * anchors
box_x1y1 = box_xy - box_wh / 2
box_x2y2 = box_xy + box_wh / 2
bbox = tf.concat([box_x1y1, box_x2y2], axis=-1)
return bbox, objectness, class_probs, pred_box
def yolo_nms(outputs, anchors, masks, classes):
# boxes, conf, type
b, c, t = [], [], []
for o in outputs:
b.append(tf.reshape(o[0], (tf.shape(o[0])[0], -1, tf.shape(o[0])[-1])))
c.append(tf.reshape(o[1], (tf.shape(o[1])[0], -1, tf.shape(o[1])[-1])))
t.append(tf.reshape(o[2], (tf.shape(o[2])[0], -1, tf.shape(o[2])[-1])))
bbox = tf.concat(b, axis=1)
confidence = tf.concat(c, axis=1)
class_probs = tf.concat(t, axis=1)
scores = confidence * class_probs
boxes, scores, classes, valid_detections = tf.image.combined_non_max_suppression(
boxes=tf.reshape(bbox, (tf.shape(bbox)[0], -1, 1, 4)),
scores=tf.reshape(
scores, (tf.shape(scores)[0], -1, tf.shape(scores)[-1])),
max_output_size_per_class=yolo_max_boxes,
max_total_size=yolo_max_boxes,
iou_threshold=yolo_iou_threshold,
score_threshold=yolo_score_threshold
)
return boxes, scores, classes, valid_detections
def YoloV3(size=None, channels=3, anchors=yolo_anchors,
masks=yolo_anchor_masks, classes=80, training=False):
physical_devices = tf.config.experimental.list_physical_devices('GPU')
if len(physical_devices) > 0:
tf.config.experimental.set_memory_growth(physical_devices[0], True)
x = inputs = Input([size, size, channels], name='input')
x_36, x_61, x = Darknet(name='yolo_darknet')(x)
x = YoloConv(512, name='yolo_conv_0')(x)
output_0 = YoloOutput(512, len(masks[0]), classes, name='yolo_output_0')(x)
x = YoloConv(256, name='yolo_conv_1')((x, x_61))
output_1 = YoloOutput(256, len(masks[1]), classes, name='yolo_output_1')(x)
x = YoloConv(128, name='yolo_conv_2')((x, x_36))
output_2 = YoloOutput(128, len(masks[2]), classes, name='yolo_output_2')(x)
if training:
return Model(inputs, (output_0, output_1, output_2), name='yolov3')
boxes_0 = Lambda(lambda x: yolo_boxes(x, anchors[masks[0]], classes),
name='yolo_boxes_0')(output_0)
boxes_1 = Lambda(lambda x: yolo_boxes(x, anchors[masks[1]], classes),
name='yolo_boxes_1')(output_1)
boxes_2 = Lambda(lambda x: yolo_boxes(x, anchors[masks[2]], classes),
name='yolo_boxes_2')(output_2)
outputs = Lambda(lambda x: yolo_nms(x, anchors, masks, classes),
name='yolo_nms')((boxes_0[:3], boxes_1[:3], boxes_2[:3]))
return Model(inputs, outputs, name='yolov3')
def YoloV3Tiny(size=None, channels=3, anchors=yolo_tiny_anchors,
masks=yolo_tiny_anchor_masks, classes=80, training=False):
physical_devices = tf.config.experimental.list_physical_devices('GPU')
if len(physical_devices) > 0:
tf.config.experimental.set_memory_growth(physical_devices[0], True)
x = inputs = Input([size, size, channels], name='input')
x_8, x = DarknetTiny(name='yolo_darknet')(x)
x = YoloConvTiny(256, name='yolo_conv_0')(x)
output_0 = YoloOutput(256, len(masks[0]), classes, name='yolo_output_0')(x)
x = YoloConvTiny(128, name='yolo_conv_1')((x, x_8))
output_1 = YoloOutput(128, len(masks[1]), classes, name='yolo_output_1')(x)
if training:
return Model(inputs, (output_0, output_1), name='yolov3')
boxes_0 = Lambda(lambda x: yolo_boxes(x, anchors[masks[0]], classes),
name='yolo_boxes_0')(output_0)
boxes_1 = Lambda(lambda x: yolo_boxes(x, anchors[masks[1]], classes),
name='yolo_boxes_1')(output_1)
outputs = Lambda(lambda x: yolo_nms(x, anchors, masks, classes),
name='yolo_nms')((boxes_0[:3], boxes_1[:3]))
return Model(inputs, outputs, name='yolov3_tiny')
def YoloLoss(anchors, classes=80, ignore_thresh=0.5):
def yolo_loss(y_true, y_pred):
# 1. transform all pred outputs
# y_pred: (batch_size, grid, grid, anchors, (x, y, w, h, obj, ...cls))
pred_box, pred_obj, pred_class, pred_xywh = yolo_boxes(
y_pred, anchors, classes)
pred_xy = pred_xywh[..., 0:2]
pred_wh = pred_xywh[..., 2:4]
# 2. transform all true outputs
# y_true: (batch_size, grid, grid, anchors, (x1, y1, x2, y2, obj, cls))
true_box, true_obj, true_class_idx = tf.split(
y_true, (4, 1, 1), axis=-1)
true_xy = (true_box[..., 0:2] + true_box[..., 2:4]) / 2
true_wh = true_box[..., 2:4] - true_box[..., 0:2]
# give higher weights to small boxes
box_loss_scale = 2 - true_wh[..., 0] * true_wh[..., 1]
# 3. inverting the pred box equations
grid_size = tf.shape(y_true)[1]
grid = tf.meshgrid(tf.range(grid_size), tf.range(grid_size))
grid = tf.expand_dims(tf.stack(grid, axis=-1), axis=2)
true_xy = true_xy * tf.cast(grid_size, tf.float32) - \
tf.cast(grid, tf.float32)
true_wh = tf.math.log(true_wh / anchors)
true_wh = tf.where(tf.math.is_inf(true_wh),
tf.zeros_like(true_wh), true_wh)
# 4. calculate all masks
obj_mask = tf.squeeze(true_obj, -1)
# ignore false positive when iou is over threshold
best_iou = tf.map_fn(
lambda x: tf.reduce_max(broadcast_iou(x[0], tf.boolean_mask(
x[1], tf.cast(x[2], tf.bool))), axis=-1),
(pred_box, true_box, obj_mask),
tf.float32)
ignore_mask = tf.cast(best_iou < ignore_thresh, tf.float32)
# 5. calculate all losses
xy_loss = obj_mask * box_loss_scale * \
tf.reduce_sum(tf.square(true_xy - pred_xy), axis=-1)
wh_loss = obj_mask * box_loss_scale * \
tf.reduce_sum(tf.square(true_wh - pred_wh), axis=-1)
obj_loss = binary_crossentropy(true_obj, pred_obj)
obj_loss = obj_mask * obj_loss + \
(1 - obj_mask) * ignore_mask * obj_loss
# TODO: use binary_crossentropy instead
class_loss = obj_mask * sparse_categorical_crossentropy(
true_class_idx, pred_class)
# 6. sum over (batch, gridx, gridy, anchors) => (batch, 1) | xy_loss = tf.reduce_sum(xy_loss, axis=(1, 2, 3))
wh_loss = tf.reduce_sum(wh_loss, axis=(1, 2, 3))
obj_loss = tf.reduce_sum(obj_loss, axis=(1, 2, 3))
class_loss = tf.reduce_sum(class_loss, axis=(1, 2, 3))
return xy_loss + wh_loss + obj_loss + class_loss
return yolo_loss | |
rflags.rs | //! Processor state stored in the RFLAGS register.
#[cfg(feature = "instructions")]
pub use self::x86_64::*;
use bitflags::bitflags;
bitflags! {
/// The RFLAGS register.
pub struct RFlags: u64 {
/// Processor feature identification flag.
///
/// If this flag is modifiable, the CPU supports CPUID.
const ID = 1 << 21;
/// Indicates that an external, maskable interrupt is pending.
///
/// Used when virtual-8086 mode extensions (CR4.VME) or protected-mode virtual
/// interrupts (CR4.PVI) are activated.
const VIRTUAL_INTERRUPT_PENDING = 1 << 20;
/// Virtual image of the INTERRUPT_FLAG bit.
///
/// Used when virtual-8086 mode extensions (CR4.VME) or protected-mode virtual
/// interrupts (CR4.PVI) are activated.
const VIRTUAL_INTERRUPT = 1 << 19;
/// Enable automatic alignment checking if CR0.AM is set. Only works if CPL is 3.
const ALIGNMENT_CHECK = 1 << 18;
/// Enable the virtual-8086 mode.
const VIRTUAL_8086_MODE = 1 << 17;
/// Allows to restart an instruction following an instrucion breakpoint.
const RESUME_FLAG = 1 << 16;
/// Used by `iret` in hardware task switch mode to determine if current task is nested.
const NESTED_TASK = 1 << 14;
/// The high bit of the I/O Privilege Level field.
///
/// Specifies the privilege level required for executing I/O address-space instructions.
const IOPL_HIGH = 1 << 13;
/// The low bit of the I/O Privilege Level field.
///
/// Specifies the privilege level required for executing I/O address-space instructions.
const IOPL_LOW = 1 << 12;
/// Set by hardware to indicate that the sign bit of the result of the last signed integer
/// operation differs from the source operands.
const OVERFLOW_FLAG = 1 << 11;
/// Determines the order in which strings are processed.
const DIRECTION_FLAG = 1 << 10;
/// Enable interrupts.
const INTERRUPT_FLAG = 1 << 9;
/// Enable single-step mode for debugging.
const TRAP_FLAG = 1 << 8;
/// Set by hardware if last arithmetic operation resulted in a negative value.
const SIGN_FLAG = 1 << 7;
/// Set by hardware if last arithmetic operation resulted in a zero value.
const ZERO_FLAG = 1 << 6;
/// Set by hardware if last arithmetic operation generated a carry ouf of bit 3 of the
/// result.
const AUXILIARY_CARRY_FLAG = 1 << 4;
/// Set by hardware if last result has an even number of 1 bits (only for some operations).
const PARITY_FLAG = 1 << 2;
/// Set by hardware if last arithmetic operation generated a carry out of the
/// most-significant bit of the result.
const CARRY_FLAG = 1;
}
}
#[cfg(feature = "instructions")]
mod x86_64 {
use super::*;
/// Returns the current value of the RFLAGS register.
///
/// Drops any unknown bits.
#[inline]
pub fn read() -> RFlags {
RFlags::from_bits_truncate(read_raw())
}
/// Returns the raw current value of the RFLAGS register.
#[inline]
pub fn read_raw() -> u64 {
let r: u64;
#[cfg(feature = "inline_asm")]
unsafe {
llvm_asm!("pushfq; popq $0" : "=r"(r) :: "memory")
};
#[cfg(not(feature = "inline_asm"))]
unsafe {
r = crate::asm::x86_64_asm_read_rflags();
};
r
}
/// Writes the RFLAGS register, preserves reserved bits.
#[inline]
pub fn write(flags: RFlags) {
let old_value = read_raw();
let reserved = old_value & !(RFlags::all().bits());
let new_value = reserved | flags.bits();
write_raw(new_value);
}
/// Writes the RFLAGS register.
///
/// Does not preserve any bits, including reserved bits.
#[inline]
pub fn write_raw(val: u64) {
#[cfg(feature = "inline_asm")]
unsafe {
llvm_asm!("pushq $0; popfq" :: "r"(val) : "memory" "flags")
};
#[cfg(not(feature = "inline_asm"))]
unsafe {
crate::asm::x86_64_asm_write_rflags(val)
}
}
#[cfg(test)]
mod test {
use crate::registers::rflags::read;
#[test]
fn | () {
let rflags = read();
println!("{:#?}", rflags);
}
}
}
| rflags_read |
__init__.py | # encoding: utf-8
"""Initializes lxml parser, particularly the custom element classes.
Also makes available a handful of functions that wrap its typical uses.
"""
from __future__ import (
absolute_import, division, print_function, unicode_literals
)
import os
from lxml import etree
from .ns import NamespacePrefixedTag
# configure etree XML parser -------------------------------
element_class_lookup = etree.ElementNamespaceClassLookup()
oxml_parser = etree.XMLParser(remove_blank_text=True, resolve_entities=False)
oxml_parser.set_element_class_lookup(element_class_lookup)
def parse_from_template(template_name):
|
def parse_xml(xml):
"""
Return root lxml element obtained by parsing XML character string in
*xml*, which can be either a Python 2.x string or unicode.
"""
root_element = etree.fromstring(xml, oxml_parser)
return root_element
def register_element_cls(nsptagname, cls):
"""
Register *cls* to be constructed when the oxml parser encounters an
element having name *nsptag_name*. *nsptag_name* is a string of the form
``nspfx:tagroot``, e.g. ``'w:document'``.
"""
nsptag = NamespacePrefixedTag(nsptagname)
namespace = element_class_lookup.get_namespace(nsptag.nsuri)
namespace[nsptag.local_part] = cls
from .action import CT_Hyperlink # noqa: E402
register_element_cls('a:hlinkClick', CT_Hyperlink)
register_element_cls('a:hlinkHover', CT_Hyperlink)
from .chart.axis import ( # noqa: E402
CT_AxisUnit, CT_CatAx, CT_ChartLines, CT_Crosses, CT_DateAx,
CT_LblOffset, CT_Scaling, CT_TickLblPos, CT_TickMark, CT_ValAx
)
register_element_cls('c:catAx', CT_CatAx)
register_element_cls('c:crosses', CT_Crosses)
register_element_cls('c:dateAx', CT_DateAx)
register_element_cls('c:lblOffset', CT_LblOffset)
register_element_cls('c:majorGridlines', CT_ChartLines)
register_element_cls('c:majorTickMark', CT_TickMark)
register_element_cls('c:majorUnit', CT_AxisUnit)
register_element_cls('c:minorTickMark', CT_TickMark)
register_element_cls('c:minorUnit', CT_AxisUnit)
register_element_cls('c:scaling', CT_Scaling)
register_element_cls('c:tickLblPos', CT_TickLblPos)
register_element_cls('c:valAx', CT_ValAx)
from .chart.chart import ( # noqa: E402
CT_Chart, CT_ChartSpace, CT_ExternalData, CT_PlotArea, CT_Style
)
register_element_cls('c:chart', CT_Chart)
register_element_cls('c:chartSpace', CT_ChartSpace)
register_element_cls('c:externalData', CT_ExternalData)
register_element_cls('c:plotArea', CT_PlotArea)
register_element_cls('c:style', CT_Style)
from .chart.datalabel import CT_DLbl, CT_DLblPos, CT_DLbls # noqa: E402
register_element_cls('c:dLbl', CT_DLbl)
register_element_cls('c:dLblPos', CT_DLblPos)
register_element_cls('c:dLbls', CT_DLbls)
from .chart.legend import CT_Legend, CT_LegendPos # noqa: E402
register_element_cls('c:legend', CT_Legend)
register_element_cls('c:legendPos', CT_LegendPos)
from .chart.marker import ( # noqa: E402
CT_Marker, CT_MarkerSize, CT_MarkerStyle
)
register_element_cls('c:marker', CT_Marker)
register_element_cls('c:size', CT_MarkerSize)
register_element_cls('c:symbol', CT_MarkerStyle)
from .chart.plot import ( # noqa: E402
CT_Area3DChart, CT_AreaChart, CT_BarChart, CT_BarDir, CT_BubbleChart,
CT_BubbleScale, CT_DoughnutChart, CT_GapAmount, CT_Grouping,
CT_LineChart, CT_Overlap, CT_PieChart, CT_RadarChart, CT_ScatterChart
)
register_element_cls('c:area3DChart', CT_Area3DChart)
register_element_cls('c:areaChart', CT_AreaChart)
register_element_cls('c:barChart', CT_BarChart)
register_element_cls('c:barDir', CT_BarDir)
register_element_cls('c:bubbleChart', CT_BubbleChart)
register_element_cls('c:bubbleScale', CT_BubbleScale)
register_element_cls('c:doughnutChart', CT_DoughnutChart)
register_element_cls('c:gapWidth', CT_GapAmount)
register_element_cls('c:grouping', CT_Grouping)
register_element_cls('c:lineChart', CT_LineChart)
register_element_cls('c:overlap', CT_Overlap)
register_element_cls('c:pieChart', CT_PieChart)
register_element_cls('c:radarChart', CT_RadarChart)
register_element_cls('c:scatterChart', CT_ScatterChart)
from .chart.series import ( # noqa: E402
CT_AxDataSource, CT_DPt, CT_Lvl, CT_NumDataSource, CT_SeriesComposite,
CT_StrVal_NumVal_Composite
)
register_element_cls('c:bubbleSize', CT_NumDataSource)
register_element_cls('c:cat', CT_AxDataSource)
register_element_cls('c:dPt', CT_DPt)
register_element_cls('c:lvl', CT_Lvl)
register_element_cls('c:pt', CT_StrVal_NumVal_Composite)
register_element_cls('c:ser', CT_SeriesComposite)
register_element_cls('c:val', CT_NumDataSource)
register_element_cls('c:xVal', CT_NumDataSource)
register_element_cls('c:yVal', CT_NumDataSource)
from .chart.shared import ( # noqa: E402
CT_Boolean, CT_Boolean_Explicit, CT_Double, CT_Layout, CT_LayoutMode,
CT_ManualLayout, CT_NumFmt, CT_Title, CT_Tx, CT_UnsignedInt
)
register_element_cls('c:autoUpdate', CT_Boolean)
register_element_cls('c:bubble3D', CT_Boolean)
register_element_cls('c:crossAx', CT_UnsignedInt)
register_element_cls('c:crossesAt', CT_Double)
register_element_cls('c:date1904', CT_Boolean)
register_element_cls('c:delete', CT_Boolean)
register_element_cls('c:idx', CT_UnsignedInt)
register_element_cls('c:invertIfNegative', CT_Boolean_Explicit)
register_element_cls('c:layout', CT_Layout)
register_element_cls('c:manualLayout', CT_ManualLayout)
register_element_cls('c:max', CT_Double)
register_element_cls('c:min', CT_Double)
register_element_cls('c:numFmt', CT_NumFmt)
register_element_cls('c:order', CT_UnsignedInt)
register_element_cls('c:overlay', CT_Boolean_Explicit)
register_element_cls('c:ptCount', CT_UnsignedInt)
register_element_cls('c:showLegendKey', CT_Boolean_Explicit)
register_element_cls('c:showVal', CT_Boolean_Explicit)
register_element_cls('c:smooth', CT_Boolean)
register_element_cls('c:title', CT_Title)
register_element_cls('c:tx', CT_Tx)
register_element_cls('c:varyColors', CT_Boolean)
register_element_cls('c:x', CT_Double)
register_element_cls('c:xMode', CT_LayoutMode)
from .coreprops import CT_CoreProperties # noqa: E402
register_element_cls('cp:coreProperties', CT_CoreProperties)
from .dml.color import ( # noqa: E402
CT_Color, CT_HslColor, CT_Percentage, CT_PresetColor, CT_SchemeColor,
CT_ScRgbColor, CT_SRgbColor, CT_SystemColor
)
register_element_cls('a:bgClr', CT_Color)
register_element_cls('a:fgClr', CT_Color)
register_element_cls('a:hslClr', CT_HslColor)
register_element_cls('a:lumMod', CT_Percentage)
register_element_cls('a:lumOff', CT_Percentage)
register_element_cls('a:prstClr', CT_PresetColor)
register_element_cls('a:schemeClr', CT_SchemeColor)
register_element_cls('a:scrgbClr', CT_ScRgbColor)
register_element_cls('a:srgbClr', CT_SRgbColor)
register_element_cls('a:sysClr', CT_SystemColor)
from .dml.fill import ( # noqa: E402
CT_Blip, CT_BlipFillProperties, CT_GradientFillProperties,
CT_GroupFillProperties, CT_NoFillProperties, CT_PatternFillProperties,
CT_RelativeRect, CT_SolidColorFillProperties
)
register_element_cls('a:blip', CT_Blip)
register_element_cls('a:blipFill', CT_BlipFillProperties)
register_element_cls('a:gradFill', CT_GradientFillProperties)
register_element_cls('a:grpFill', CT_GroupFillProperties)
register_element_cls('a:noFill', CT_NoFillProperties)
register_element_cls('a:pattFill', CT_PatternFillProperties)
register_element_cls('a:solidFill', CT_SolidColorFillProperties)
register_element_cls('a:srcRect', CT_RelativeRect)
from .dml.line import CT_PresetLineDashProperties # noqa: E402
register_element_cls('a:prstDash', CT_PresetLineDashProperties)
from .presentation import ( # noqa: E402
CT_Presentation, CT_SlideId, CT_SlideIdList, CT_SlideMasterIdList,
CT_SlideMasterIdListEntry, CT_SlideSize
)
register_element_cls('p:presentation', CT_Presentation)
register_element_cls('p:sldId', CT_SlideId)
register_element_cls('p:sldIdLst', CT_SlideIdList)
register_element_cls('p:sldMasterId', CT_SlideMasterIdListEntry)
register_element_cls('p:sldMasterIdLst', CT_SlideMasterIdList)
register_element_cls('p:sldSz', CT_SlideSize)
from .shapes.autoshape import ( # noqa: E402
CT_AdjPoint2D, CT_CustomGeometry2D, CT_GeomGuide, CT_GeomGuideList,
CT_NonVisualDrawingShapeProps, CT_Path2D, CT_Path2DClose,
CT_Path2DLineTo, CT_Path2DList, CT_Path2DMoveTo, CT_PresetGeometry2D,
CT_Shape, CT_ShapeNonVisual
)
register_element_cls('a:avLst', CT_GeomGuideList)
register_element_cls('a:custGeom', CT_CustomGeometry2D)
register_element_cls('a:gd', CT_GeomGuide)
register_element_cls('a:close', CT_Path2DClose)
register_element_cls('a:lnTo', CT_Path2DLineTo)
register_element_cls('a:moveTo', CT_Path2DMoveTo)
register_element_cls('a:path', CT_Path2D)
register_element_cls('a:pathLst', CT_Path2DList)
register_element_cls('a:prstGeom', CT_PresetGeometry2D)
register_element_cls('a:pt', CT_AdjPoint2D)
register_element_cls('p:cNvSpPr', CT_NonVisualDrawingShapeProps)
register_element_cls('p:nvSpPr', CT_ShapeNonVisual)
register_element_cls('p:sp', CT_Shape)
from .shapes.connector import ( # noqa: E402
CT_Connection, CT_Connector, CT_ConnectorNonVisual,
CT_NonVisualConnectorProperties
)
register_element_cls('a:endCxn', CT_Connection)
register_element_cls('a:stCxn', CT_Connection)
register_element_cls('p:cNvCxnSpPr', CT_NonVisualConnectorProperties)
register_element_cls('p:cxnSp', CT_Connector)
register_element_cls('p:nvCxnSpPr', CT_ConnectorNonVisual)
from .shapes.graphfrm import ( # noqa: E402
CT_GraphicalObject, CT_GraphicalObjectData, CT_GraphicalObjectFrame,
CT_GraphicalObjectFrameNonVisual
)
register_element_cls('a:graphic', CT_GraphicalObject)
register_element_cls('a:graphicData', CT_GraphicalObjectData)
register_element_cls('p:graphicFrame', CT_GraphicalObjectFrame)
register_element_cls('p:nvGraphicFramePr', CT_GraphicalObjectFrameNonVisual)
from .shapes.groupshape import ( # noqa: E402
CT_GroupShape, CT_GroupShapeNonVisual, CT_GroupShapeProperties
)
register_element_cls('p:grpSp', CT_GroupShape)
register_element_cls('p:grpSpPr', CT_GroupShapeProperties)
register_element_cls('p:nvGrpSpPr', CT_GroupShapeNonVisual)
register_element_cls('p:spTree', CT_GroupShape)
from .shapes.picture import CT_Picture, CT_PictureNonVisual # noqa: E402
register_element_cls('p:blipFill', CT_BlipFillProperties)
register_element_cls('p:nvPicPr', CT_PictureNonVisual)
register_element_cls('p:pic', CT_Picture)
from .shapes.shared import ( # noqa: E402
CT_ApplicationNonVisualDrawingProps, CT_LineProperties,
CT_NonVisualDrawingProps, CT_Placeholder, CT_Point2D, CT_PositiveSize2D,
CT_ShapeProperties, CT_Transform2D
)
register_element_cls('a:ext', CT_PositiveSize2D)
register_element_cls('a:ln', CT_LineProperties)
register_element_cls('a:off', CT_Point2D)
register_element_cls('a:xfrm', CT_Transform2D)
register_element_cls('c:spPr', CT_ShapeProperties)
register_element_cls('p:cNvPr', CT_NonVisualDrawingProps)
register_element_cls('p:nvPr', CT_ApplicationNonVisualDrawingProps)
register_element_cls('p:ph', CT_Placeholder)
register_element_cls('p:spPr', CT_ShapeProperties)
register_element_cls('p:xfrm', CT_Transform2D)
from .shapes.table import ( # noqa: E402
CT_Table, CT_TableCell, CT_TableCellProperties, CT_TableCol,
CT_TableGrid, CT_TableProperties, CT_TableRow
)
register_element_cls('a:gridCol', CT_TableCol)
register_element_cls('a:tbl', CT_Table)
register_element_cls('a:tblGrid', CT_TableGrid)
register_element_cls('a:tblPr', CT_TableProperties)
register_element_cls('a:tc', CT_TableCell)
register_element_cls('a:tcPr', CT_TableCellProperties)
register_element_cls('a:tr', CT_TableRow)
from .slide import ( # noqa: E402
CT_CommonSlideData, CT_NotesMaster, CT_NotesSlide, CT_Slide,
CT_SlideLayout, CT_SlideLayoutIdList, CT_SlideLayoutIdListEntry,
CT_SlideMaster, CT_SlideTiming, CT_TimeNodeList, CT_TLMediaNodeVideo
)
register_element_cls('p:childTnLst', CT_TimeNodeList)
register_element_cls('p:cSld', CT_CommonSlideData)
register_element_cls('p:notes', CT_NotesSlide)
register_element_cls('p:notesMaster', CT_NotesMaster)
register_element_cls('p:sld', CT_Slide)
register_element_cls('p:sldLayout', CT_SlideLayout)
register_element_cls('p:sldLayoutId', CT_SlideLayoutIdListEntry)
register_element_cls('p:sldLayoutIdLst', CT_SlideLayoutIdList)
register_element_cls('p:sldMaster', CT_SlideMaster)
register_element_cls('p:timing', CT_SlideTiming)
register_element_cls('p:video', CT_TLMediaNodeVideo)
from .text import ( # noqa: E402
CT_RegularTextRun, CT_TextBody, CT_TextBodyProperties,
CT_TextCharacterProperties, CT_TextField, CT_TextFont, CT_TextLineBreak,
CT_TextNormalAutofit, CT_TextParagraph, CT_TextParagraphProperties,
CT_TextSpacing, CT_TextSpacingPercent, CT_TextSpacingPoint
)
register_element_cls('a:bodyPr', CT_TextBodyProperties)
register_element_cls('a:br', CT_TextLineBreak)
register_element_cls('a:defRPr', CT_TextCharacterProperties)
register_element_cls('a:endParaRPr', CT_TextCharacterProperties)
register_element_cls('a:fld', CT_TextField)
register_element_cls('a:latin', CT_TextFont)
register_element_cls('a:lnSpc', CT_TextSpacing)
register_element_cls('a:normAutofit', CT_TextNormalAutofit)
register_element_cls('a:r', CT_RegularTextRun)
register_element_cls('a:p', CT_TextParagraph)
register_element_cls('a:pPr', CT_TextParagraphProperties)
register_element_cls('c:rich', CT_TextBody)
register_element_cls('a:rPr', CT_TextCharacterProperties)
register_element_cls('a:spcAft', CT_TextSpacing)
register_element_cls('a:spcBef', CT_TextSpacing)
register_element_cls('a:spcPct', CT_TextSpacingPercent)
register_element_cls('a:spcPts', CT_TextSpacingPoint)
register_element_cls('a:txBody', CT_TextBody)
register_element_cls('c:txPr', CT_TextBody)
register_element_cls('p:txBody', CT_TextBody)
from .theme import CT_OfficeStyleSheet # noqa: E402
register_element_cls('a:theme', CT_OfficeStyleSheet)
| """
Return an element loaded from the XML in the template file identified by
*template_name*.
"""
thisdir = os.path.split(__file__)[0]
filename = os.path.join(
thisdir, '..', 'templates', '%s.xml' % template_name
)
with open(filename, 'rb') as f:
xml = f.read()
return parse_xml(xml) |
admission_test.go | /**
* Copyright (C) 2015 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
Copyright 2015 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package label
import (
"testing"
"fmt"
"k8s.io/apimachinery/pkg/api/resource"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/types"
"k8s.io/apiserver/pkg/admission"
api "k8s.io/kubernetes/pkg/apis/core"
"k8s.io/kubernetes/pkg/cloudprovider/providers/aws"
)
type mockVolumes struct {
volumeLabels map[string]string
volumeLabelsError error
}
var _ aws.Volumes = &mockVolumes{}
func (v *mockVolumes) AttachDisk(diskName aws.KubernetesVolumeID, nodeName types.NodeName, readOnly bool) (string, error) {
return "", fmt.Errorf("not implemented")
}
func (v *mockVolumes) DetachDisk(diskName aws.KubernetesVolumeID, nodeName types.NodeName) (string, error) {
return "", fmt.Errorf("not implemented")
}
func (v *mockVolumes) CreateDisk(volumeOptions *aws.VolumeOptions) (volumeName aws.KubernetesVolumeID, err error) {
return "", fmt.Errorf("not implemented")
}
func (v *mockVolumes) DeleteDisk(volumeName aws.KubernetesVolumeID) (bool, error) {
return false, fmt.Errorf("not implemented")
}
func (v *mockVolumes) GetVolumeLabels(volumeName aws.KubernetesVolumeID) (map[string]string, error) {
return v.volumeLabels, v.volumeLabelsError
}
func (c *mockVolumes) GetDiskPath(volumeName aws.KubernetesVolumeID) (string, error) {
return "", fmt.Errorf("not implemented")
}
func (c *mockVolumes) DiskIsAttached(volumeName aws.KubernetesVolumeID, nodeName types.NodeName) (bool, error) {
return false, fmt.Errorf("not implemented")
}
func (c *mockVolumes) DisksAreAttached(nodeDisks map[types.NodeName][]aws.KubernetesVolumeID) (map[types.NodeName]map[aws.KubernetesVolumeID]bool, error) {
return nil, fmt.Errorf("not implemented")
}
func (c *mockVolumes) ResizeDisk(
diskName aws.KubernetesVolumeID,
oldSize resource.Quantity,
newSize resource.Quantity) (resource.Quantity, error) {
return oldSize, nil
}
func mockVolumeFailure(err error) *mockVolumes {
return &mockVolumes{volumeLabelsError: err}
}
func mockVolumeLabels(labels map[string]string) *mockVolumes {
return &mockVolumes{volumeLabels: labels}
}
// TestAdmission
func TestAdmission(t *testing.T) {
pvHandler := NewPersistentVolumeLabel()
handler := admission.NewChainHandler(pvHandler)
ignoredPV := api.PersistentVolume{
ObjectMeta: metav1.ObjectMeta{Name: "noncloud", Namespace: "myns"},
Spec: api.PersistentVolumeSpec{
PersistentVolumeSource: api.PersistentVolumeSource{
HostPath: &api.HostPathVolumeSource{
Path: "/",
},
},
},
}
awsPV := api.PersistentVolume{
ObjectMeta: metav1.ObjectMeta{Name: "noncloud", Namespace: "myns"},
Spec: api.PersistentVolumeSpec{
PersistentVolumeSource: api.PersistentVolumeSource{
AWSElasticBlockStore: &api.AWSElasticBlockStoreVolumeSource{
VolumeID: "123",
},
},
},
}
// Non-cloud PVs are ignored
err := handler.Admit(admission.NewAttributesRecord(&ignoredPV, nil, api.Kind("PersistentVolume").WithVersion("version"), ignoredPV.Namespace, ignoredPV.Name, api.Resource("persistentvolumes").WithVersion("version"), "", admission.Create, nil))
if err != nil {
t.Errorf("Unexpected error returned from admission handler (on ignored pv): %v", err)
}
// We only add labels on creation
err = handler.Admit(admission.NewAttributesRecord(&awsPV, nil, api.Kind("PersistentVolume").WithVersion("version"), awsPV.Namespace, awsPV.Name, api.Resource("persistentvolumes").WithVersion("version"), "", admission.Delete, nil))
if err != nil {
t.Errorf("Unexpected error returned from admission handler (when deleting aws pv): %v", err)
}
// Errors from the cloudprovider block creation of the volume
pvHandler.ebsVolumes = mockVolumeFailure(fmt.Errorf("invalid volume"))
err = handler.Admit(admission.NewAttributesRecord(&awsPV, nil, api.Kind("PersistentVolume").WithVersion("version"), awsPV.Namespace, awsPV.Name, api.Resource("persistentvolumes").WithVersion("version"), "", admission.Create, nil))
if err == nil {
t.Errorf("Expected error when aws pv info fails")
}
// Don't add labels if the cloudprovider doesn't return any
labels := make(map[string]string)
pvHandler.ebsVolumes = mockVolumeLabels(labels)
err = handler.Admit(admission.NewAttributesRecord(&awsPV, nil, api.Kind("PersistentVolume").WithVersion("version"), awsPV.Namespace, awsPV.Name, api.Resource("persistentvolumes").WithVersion("version"), "", admission.Create, nil))
if err != nil {
t.Errorf("Expected no error when creating aws pv")
}
if len(awsPV.ObjectMeta.Labels) != 0 {
t.Errorf("Unexpected number of labels")
}
// Don't panic if the cloudprovider returns nil, nil
pvHandler.ebsVolumes = mockVolumeFailure(nil)
err = handler.Admit(admission.NewAttributesRecord(&awsPV, nil, api.Kind("PersistentVolume").WithVersion("version"), awsPV.Namespace, awsPV.Name, api.Resource("persistentvolumes").WithVersion("version"), "", admission.Create, nil))
if err != nil {
t.Errorf("Expected no error when cloud provider returns empty labels")
}
// Labels from the cloudprovider should be applied to the volume
labels = make(map[string]string)
labels["a"] = "1"
labels["b"] = "2"
pvHandler.ebsVolumes = mockVolumeLabels(labels)
err = handler.Admit(admission.NewAttributesRecord(&awsPV, nil, api.Kind("PersistentVolume").WithVersion("version"), awsPV.Namespace, awsPV.Name, api.Resource("persistentvolumes").WithVersion("version"), "", admission.Create, nil))
if err != nil |
if awsPV.Labels["a"] != "1" || awsPV.Labels["b"] != "2" {
t.Errorf("Expected label a to be added when creating aws pv")
}
// User-provided labels should be honored, but cloudprovider labels replace them when they overlap
awsPV.ObjectMeta.Labels = make(map[string]string)
awsPV.ObjectMeta.Labels["a"] = "not1"
awsPV.ObjectMeta.Labels["c"] = "3"
err = handler.Admit(admission.NewAttributesRecord(&awsPV, nil, api.Kind("PersistentVolume").WithVersion("version"), awsPV.Namespace, awsPV.Name, api.Resource("persistentvolumes").WithVersion("version"), "", admission.Create, nil))
if err != nil {
t.Errorf("Expected no error when creating aws pv")
}
if awsPV.Labels["a"] != "1" || awsPV.Labels["b"] != "2" {
t.Errorf("Expected cloudprovider labels to replace user labels when creating aws pv")
}
if awsPV.Labels["c"] != "3" {
t.Errorf("Expected (non-conflicting) user provided labels to be honored when creating aws pv")
}
}
| {
t.Errorf("Expected no error when creating aws pv")
} |
types.go | package fsm // import "github.com/docker/infrakit/pkg/fsm"
// ID is the id of the instance in a given set. It's unique in that set.
type ID uint64
// FSM is the interface that returns ID and state of the fsm instance safely.
type FSM interface {
// ID returns the ID of the instance
ID() ID
// State returns the state of the instance. This is an expensive call to be submitted to queue to view
State() Index
// Data returns the custom data attached to the instance. It's set via the optional arg in Signal
Data() interface{}
// Signal signals the instance with optional custom data
Signal(Signal, ...interface{}) error
// CanReceive returns true if the current state of the instance can receive the given signal
CanReceive(Signal) bool
}
// Index is the index of the state in a FSM
type Index int
// Action is the action to take when a signal is received, prior to transition
// to the next state. The error returned by the function is an exception which
// will put the state machine in an error state. This error state is not the same
// as some application-specific error state which is a state defined to correspond
// to some external event indicating a real-world error event (as opposed to a
// programming error here).
type Action func(FSM) error
| type Tick int64
// Time is a unit of time not corresponding to wall time
type Time int64
// Expiry specifies the rule for TTL.. A state can have TTL / deadline that when it
// expires a signal can be raised.
type Expiry struct {
TTL Tick
Raise Signal
}
// Limit is a struct that captures the limit and what signal to raise
type Limit struct {
Value int
Raise Signal
}
// Signal is a signal that can drive the state machine to transfer from one state to next.
type Signal int
// State encapsulates all the possible transitions and actions to perform during the
// state transition. A state can have a TTL so that it is allowed to be in that
// state for a given TTL. On expiration, a signal is raised.
type State struct {
// Index is a unique key of the state
Index Index
// Transitions fully specifies all the possible transitions from this state, by the way of signals.
Transitions map[Signal]Index
// Actions specify for each signal, what code / action is to be executed as the fsm transits from one state to next.
Actions map[Signal]Action
// Errors specifies the handling of errors when executing action. On action error, the mapped state is transitioned.
Errors map[Signal]Index
// TTL specifies how long this state can last before a signal is raised.
TTL Expiry
// Visit specifies a limit on the number of times the fsm can visit this state before raising a signal.
Visit Limit
}
// DefaultOptions returns default values
func DefaultOptions(name string) Options {
return Options{
Name: name,
BufferSize: defaultBufferSize,
IgnoreUndefinedTransitions: true,
IgnoreUndefinedSignals: true,
IgnoreUndefinedStates: true,
}
}
// Options contains options for the set
type Options struct {
// Name is the name of the set
Name string
// BufferSize is the size of transaction queue/buffered channel
BufferSize int
// IgnoreUndefinedStates will not report error from undefined states for transition on Error() chan, if true
IgnoreUndefinedStates bool
// IgnoreUndefinedTransitions will not report error from undefined transitions for signal on Error() chan, if true
IgnoreUndefinedTransitions bool
// IgnoreUndefinedSignals will not report error from undefined signal for the state on Error() chan, if true
IgnoreUndefinedSignals bool
}
type addOp struct {
initial Index
result chan FSM
}
// Set is a collection of fsm instances that follow a given spec. This is
// the primary interface to manipulate the instances... by sending signals to it via channels.
type Set struct {
options Options
spec Spec
now Time
next ID
clock *Clock
members map[ID]*instance
bystate map[Index]map[ID]*instance
reads chan func(Set) // given a view which is a copy of the Set
stop chan struct{}
add chan addOp
delete chan ID // delete an instance with id
errors chan error
events chan *event
transactions chan *txn
deadlines *queue
running bool
} | // Tick is a unit of time. Time is in relative terms and synchronized with an actual
// timer that's provided by the client. |
_stdlib.py | import os
import shutil
import sys
import tempfile
import types
from importlib2._fixers import (swap, SimpleNamespace, new_class,
_thread, builtins)
from importlib2._fixers._modules import mod_from_ns
def fix_builtins(builtins=builtins):
sys.modules.setdefault('builtins', builtins)
def fix_types(types=types):
types.SimpleNamespace = SimpleNamespace
types.new_class = new_class
return types
def fix_collections():
try:
import collections.abc
except ImportError:
import collections
collections.abc = collections
sys.modules['collections.abc'] = collections
def fix_tempfile():
if not hasattr(tempfile, 'TemporaryDirectory'):
class TemporaryDirectory(object):
def __init__(self):
self.name = tempfile.mkdtemp()
def __enter__(self):
return self
def __exit__(self, *args):
shutil.rmtree(self.name, ignore_errors=True)
tempfile.TemporaryDirectory = TemporaryDirectory
def fix_os(os=os):
if not hasattr(os, 'fsencode'):
os.fsencode = lambda s: s
if not hasattr(os, 'fsdecode'):
os.fsdecode = lambda s: s
def fix_thread(_thread=_thread):
sys.modules['_thread'] = _thread
if not hasattr(_thread, 'TIMEOUT_MAX'):
_thread.TIMEOUT_MAX = 10 # XXX Make it accurate.
if not hasattr(_thread, '_set_sentinel'):
_thread._set_sentinel = lambda: _thread.allocate_lock()
def inject_threading():
from . import threading
sys.modules['threading'] = threading
#################################################
# testing
def fix_unittest():
import unittest
# Add in unittest.TestCase.subTest.
if not hasattr(unittest.TestCase, 'subTest'):
from contextlib import contextmanager
@contextmanager
def subTest(self, *args, **kwargs):
yield
unittest.TestCase.subTest = subTest
# Add in a fake unittest.mock.
try:
import unittest.mock
except ImportError:
def patched(obj, attr):
def mocked(*args, **kwargs):
try:
exc = mocked.side_effect
except AttributeError:
return mocked.return_value
else:
raise exc
return swap(obj, attr, mocked, pop=False)
from importlib2 import _bootstrap
mock = _bootstrap._new_module('unittest.mock')
mock.__loader__ = _bootstrap.BuiltinImporter
mock.__spec__ = _bootstrap.ModuleSpec(mock.__name__, mock.__loader__,
origin=__file__)
mock.patch = lambda: None
mock.patch.object = patched
sys.modules['unittest.mock'] = mock
unittest.mock = mock
def _format_obj(obj):
if isinstance(obj, dict) and '__builtins__' in obj:
refmod = mod_from_ns(obj)
return ('<ns for module {!r} ({} {})>'
).format(obj['__name__'], refmod, id(refmod))
else:
return '{} {}'.format(obj, id(obj))
def check_mod(module_name, mod=None, orig=None):
if module_name is None:
if mod is None:
|
module_name = mod.__name__
if module_name is None:
raise ImportError('{!r}: mod.__name__ is None'.format(mod))
if mod is None:
if module_name not in sys.modules:
return
mod = sys.modules[module_name]
# Check the module.
if module_name.startswith('importlib'):
if not hasattr(mod, '_bootstrap'):
try:
f = mod._resolve_name
except AttributeError:
f = mod.ModuleSpec.__init__
bsname = f.__globals__['__name__']
assert bsname is not None, module_name
def fix_support(support=None):
if support is None:
from tests import support
if not hasattr(support, 'check_mod'):
support.check_mod = check_mod
| raise TypeError('missing module_name') |
netcdf_cf.py | #!/usr/bin/env python
###############################################################################
# $Id$
#
# Project: GDAL/OGR Test Suite
# Purpose: Test NetCDF driver support.
# Author: Frank Warmerdam <[email protected]>
#
###############################################################################
# Copyright (c) 2007, Frank Warmerdam <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
import os
import sys
from osgeo import gdal
from osgeo import osr
from gdalconst import *
sys.path.append( '../pymod' )
import gdaltest
import imp # for netcdf_cf_setup()
import netcdf
from netcdf import netcdf_setup, netcdf_test_copy
###############################################################################
# Netcdf CF compliance Functions
###############################################################################
###############################################################################
#check for necessary files and software
def netcdf_cf_setup():
#global vars
gdaltest.netcdf_cf_method = None
gdaltest.netcdf_cf_files = None
gdaltest.netcdf_cf_check_error = ''
#if netcdf is not supported, skip detection
if gdaltest.netcdf_drv is None:
return 'skip'
#skip if on windows
if os.name != 'posix':
print('NOTICE: will skip CF checks because OS is not posix!')
return 'skip'
#try local method
cdms2_installed = False
try:
imp.find_module( 'cdms2' )
cdms2_installed = True
except ImportError:
print( 'NOTICE: cdms2 not installed!' )
print( ' see installation notes at http://pypi.python.org/pypi/cfchecker' )
pass
if cdms2_installed:
xml_dir = './data/netcdf_cf_xml'
tmp_dir = './tmp/cache'
files = dict()
files['a'] = xml_dir+'/area-type-table.xml'
files['s'] = tmp_dir+'/cf-standard-name-table-v18.xml'
#either find udunits path in UDUNITS_PATH, or based on location of udunits app, or copy all .xml files to data
#opt_u = '/home/soft/share/udunits/udunits2.xml'
files['u'] = xml_dir+'/udunits2.xml'
#look for xml files
if not ( os.path.exists(files['a']) and os.path.exists(files['s']) and os.path.exists(files['u']) ):
print('NOTICE: cdms2 installed, but necessary xml files are not found!')
print(' the following files must exist:')
print(' '+xml_dir+'/area-type-table.xml from http://cf-pcmdi.llnl.gov/documents/cf-standard-names/area-type-table/1/area-type-table.xml')
print(' '+tmp_dir+'/cf-standard-name-table-v18.xml - http://cf-pcmdi.llnl.gov/documents/cf-standard-names/standard-name-table/18/cf-standard-name-table.xml')
print(' '+xml_dir+'/udunits2*.xml from a UDUNITS2 install')
#try to get cf-standard-name-table
if not os.path.exists(files['s']):
#print ' downloading cf-standard-name-table.xml (v18) from http://cf-pcmdi.llnl.gov ...'
if not gdaltest.download_file('http://cf-pcmdi.llnl.gov/documents/cf-standard-names/standard-name-table/18/cf-standard-name-table.xml',
'cf-standard-name-table-v18.xml'):
print(' Failed to download, please get it and try again.')
if os.path.exists(files['a']) and os.path.exists(files['s']) and os.path.exists(files['u']):
gdaltest.netcdf_cf_method = 'local'
gdaltest.netcdf_cf_files = files
print('NOTICE: netcdf CF compliance checks: using local checker script')
return 'success'
#skip http method if GDAL_DOWNLOAD_TEST_DATA and GDAL_RUN_SLOW_TESTS are not defined
if not 'GDAL_DOWNLOAD_TEST_DATA' in os.environ:
print('NOTICE: skipping netcdf CF compliance checks')
print('to enable remote http checker script, define GDAL_DOWNLOAD_TEST_DATA')
return 'success'
if not gdaltest.run_slow_tests():
print('NOTICE: skipping netcdf CF compliance checks')
return 'success'
#http method with curl, should use python module but easier for now
success = False
try:
(ret, err) = gdaltest.runexternal_out_and_err('curl')
except :
print('no curl executable')
else:
#make sure script is responding
handle = gdaltest.gdalurlopen("http://puma.nerc.ac.uk/cgi-bin/cf-checker.pl")
if handle is not None:
success = True
else:
print('script not responding')
if success:
gdaltest.netcdf_cf_method = 'http'
print('NOTICE: netcdf CF compliance ckecks: using remote http checker script, consider installing cdms2 locally')
return 'success'
if gdaltest.netcdf_cf_method is None:
print('NOTICE: skipping netcdf CF compliance checks')
return 'success'
###############################################################################
#build a command used to check ifile
def netcdf_cf_get_command(ifile, version='auto'):
command = ''
#fetch method obtained previously
method = gdaltest.netcdf_cf_method
if method is not None:
if method is 'local':
command = './netcdf_cfchecks.py -a ' + gdaltest.netcdf_cf_files['a'] \
+ ' -s ' + gdaltest.netcdf_cf_files['s'] \
+ ' -u ' + gdaltest.netcdf_cf_files['u'] \
+ ' -v ' + version +' ' + ifile
elif method is 'http':
#command = shlex.split( 'curl --form cfversion="1.5" --form upload=@' + ifile + ' --form submit=\"Check file\" "http://puma.nerc.ac.uk/cgi-bin/cf-checker.pl"' )
#switch to 1.5 as driver now supports, and auto when it becomes available
version = '1.5'
command = 'curl --form cfversion=' + version + ' --form upload=@' + ifile + ' --form submit=\"Check file\" "http://puma.nerc.ac.uk/cgi-bin/cf-checker.pl"'
return command
###############################################################################
# Check a file for CF compliance
def netcdf_cf_check_file(ifile,version='auto', silent=True):
#if not silent:
# print 'checking file ' + ifile
|
###############################################################################
# Netcdf CF projection Functions and data
###############################################################################
###############################################################################
# Definitions to test projections that are supported by CF
# Tuple structure:
# 0: Short code (eg AEA) - (no GDAL significance, just for filenames etc)
# 1: official name from CF-1 conventions
# 2: EPSG code, or WKT, to tell GDAL to do reprojection
# 3: Actual attribute official name of grid mapping
# 4: List of required attributes to define projection
# 5: List of required coordinate variable standard name attributes
netcdf_cfproj_tuples = [
("AEA", "Albers Equal Area", "EPSG:3577", "albers_conical_equal_area",
['standard_parallel', 'longitude_of_central_meridian',
'latitude_of_projection_origin', 'false_easting', 'false_northing'],
['projection_x_coordinate','projection_y_coordinate']),
("AZE", "Azimuthal Equidistant",
#Didn't have EPSG suitable for AU
"+proj=aeqd +lat_0=-37 +lon_0=145 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs",
"azimuthal_equidistant",
['longitude_of_projection_origin',
'latitude_of_projection_origin', 'false_easting', 'false_northing'],
['projection_x_coordinate','projection_y_coordinate']),
("LAZEA", "Lambert azimuthal equal area",
#Specify proj4 since no approp LAZEA for AU
#"+proj=laea +lat_0=0 +lon_0=134 +x_0=0 +y_0=0 +ellps=GRS80 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs",
"+proj=laea +lat_0=-37 +lon_0=145 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs",
"lambert_azimuthal_equal_area",
['longitude_of_projection_origin',
'latitude_of_projection_origin', 'false_easting', 'false_northing'],
['projection_x_coordinate','projection_y_coordinate']),
("LC_2SP", "Lambert conformal", "EPSG:3112", "lambert_conformal_conic",
['standard_parallel',
'longitude_of_central_meridian',
'latitude_of_projection_origin', 'false_easting', 'false_northing'],
['projection_x_coordinate','projection_y_coordinate']),
# TODO: Test LCC with 1SP
("LCEA", "Lambert Cylindrical Equal Area",
"+proj=cea +lat_ts=-37 +lon_0=145 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs",
"lambert_cylindrical_equal_area",
['longitude_of_central_meridian',
'standard_parallel', # TODO: OR 'scale_factor_at_projection_origin'
'false_easting', 'false_northing'],
['projection_x_coordinate','projection_y_coordinate']),
# 2 entries for Mercator, since attribs different for 1SP or 2SP
("M-1SP", "Mercator",
"+proj=merc +lon_0=145 +k_0=1 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs",
"mercator",
['longitude_of_projection_origin',
'scale_factor_at_projection_origin',
'false_easting', 'false_northing'],
['projection_x_coordinate','projection_y_coordinate']),
# Commented out as it seems GDAL itself's support of Mercator with 2SP
# is a bit dodgy
("M-2SP", "Mercator",
"+proj=merc +lat_ts=-37 +lon_0=145 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs",
# Trying with full WKT:
#"""PROJCS["unnamed", GEOGCS["WGS 84", DATUM["WGS_1984", SPHEROID["WGS 84",6378137,298.257223563, AUTHORITY["EPSG","7030"]], AUTHORITY["EPSG","6326"]], PRIMEM["Greenwich",0], UNIT["degree",0.0174532925199433], AUTHORITY["EPSG","4326"]], PROJECTION["Mercator_2SP"], PARAMETER["central_meridian",146], PARAMETER["standard_parallel_1",-37], PARAMETER["latitude_of_origin",0], PARAMETER["false_easting",0], PARAMETER["false_northing",0], UNIT["metre",1, AUTHORITY["EPSG","9001"]]]""",
"mercator",
['longitude_of_projection_origin',
'standard_parallel',
'false_easting', 'false_northing'],
['projection_x_coordinate','projection_y_coordinate']),
("Ortho", "Orthographic",
"+proj=ortho +lat_0=-37 +lon_0=145 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs",
"orthographic",
['longitude_of_projection_origin',
'latitude_of_projection_origin',
'false_easting', 'false_northing'],
['projection_x_coordinate', 'projection_y_coordinate']),
# Seems GDAL may have problems with Polar stereographic, as it
# considers these "local coordinate systems"
("PSt", "Polar stereographic",
"+proj=stere +lat_ts=-37 +lat_0=-90 +lon_0=145 +k_0=1.0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs",
"polar_stereographic",
['straight_vertical_longitude_from_pole',
'latitude_of_projection_origin',
'standard_parallel',
'false_easting', 'false_northing'],
['projection_x_coordinate', 'projection_y_coordinate']),
("St", "Stereographic",
"+proj=stere +lat_0=-37 +lon_0=145 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs",
#'PROJCS["unnamed", GEOGCS["WGS 84", DATUM["WGS_1984", SPHEROID["WGS 84",6378137,298.257223563, AUTHORITY["EPSG","7030"]], AUTHORITY["EPSG","6326"]], PRIMEM["Greenwich",0], UNIT["degree",0.0174532925199433], AUTHORITY["EPSG","4326"]], PROJECTION["Stereographic"], PARAMETER["latitude_of_origin",-37.5], PARAMETER["central_meridian",145], PARAMETER["scale_factor",1], PARAMETER["false_easting",0], PARAMETER["false_northing",0], UNIT["metre",1, AUTHORITY["EPSG","9001"]]]',
"stereographic",
['longitude_of_projection_origin',
'latitude_of_projection_origin',
'scale_factor_at_projection_origin',
'false_easting', 'false_northing'],
['projection_x_coordinate', 'projection_y_coordinate']),
#Note: Rotated Pole not in this list, as seems not GDAL-supported
("TM", "Transverse Mercator", "EPSG:32655", #UTM Zone 55N
"transverse_mercator",
[
'scale_factor_at_central_meridian',
'longitude_of_central_meridian',
'latitude_of_projection_origin',
'false_easting', 'false_northing'],
['projection_x_coordinate','projection_y_coordinate'])
]
#By default, we will use GeoTiff as the 'intermediate' raster format
# for gdalwarp'ing into before gdal_translate to NetCDF.
# But since GeoTiff can't act as a storage format for certain projections
# (eg Mercator-2SP), we will choose other intermediate formats for certain
# projection.
# The following array maps projection short code, to driver format to use
netcdf_cfproj_def_int_format = "GTiff"
netcdf_cfproj_int_fmt_maps = {
"M-2SP":'HFA'
}
netcdf_cfproj_format_fnames = {"HFA":"img", "GTiff":"tif", "NITF":"nitf",
"ERS":"ers"}
###############################################################################
# Check support for given projection tuple definitions
# For each projection, warp the original file and then create a netcdf
def netcdf_cfproj_testcopy(projTuples, origTiff, interFormats, inPath, outPath,
resFilename):
"""Test a Geotiff file can be converted to NetCDF, and projection in
CF-1 conventions can be successfully maintained. Save results to file.
:arg: projTuples - list of tuples
:arg: interFormats - dict of intermediate format overrides
:arg: outPath - path to save output
:arg: resFilename - results filename to write to.
"""
silent = True
gdaltest.netcdf_drv_silent = True
bWriteGdalTags="YES"
#silent = False
gdaltest.netcdf_drv_silent = False
# bWriteGdalTags="NO"
result = 'success'
# Test if ncdump is available
try:
(ret, err) = gdaltest.runexternal_out_and_err('ncdump -h')
except:
#nothing is supported as ncdump not found
print('NOTICE: netcdf version not found')
return 'skip'
i = err.find('netcdf library version ')
#version not found
if i == -1:
print('NOTICE: netcdf version not found')
return 'skip'
if not os.path.exists(outPath):
os.makedirs(outPath)
resFile = open(os.path.join(outPath, resFilename), "w")
if not os.path.exists(outPath):
os.makedirs(outPath)
heading = "Testing GDAL translation results to NetCDF\n"
resFile.write(heading)
resFile.write(len(heading)*"="+"\n")
# now = datetime.datetime.now()
# resFile.write("*Date/time:* %s\n" % (now.strftime("%Y-%m-%d %H:%M")))
resFile.write("\n")
resPerProj = {}
dsTiff = gdal.Open( os.path.join(inPath, origTiff), GA_ReadOnly );
s_srs_wkt = dsTiff.GetProjection()
#objects to hold the various tests
i_t = 0
tst = {}
tst_res = {}
for proj in projTuples:
try:
intFmt = interFormats[proj[0]]
except KeyError:
intFmt = netcdf_cfproj_def_int_format
intExt = netcdf_cfproj_format_fnames[intFmt]
# Our little results data structures
if not silent:
print("")
print("Testing %s (%s) translation:" % (proj[0], proj[1]))
if not silent:
print("About to create raster in chosen SRS")
projVrt = os.path.join(outPath, "%s_%s.vrt" % \
(origTiff.rstrip('.tif'), proj[0] ))
projRaster = os.path.join(outPath, "%s_%s.%s" % \
(origTiff.rstrip('.tif'), proj[0], intExt ))
srs = osr.SpatialReference()
srs.SetFromUserInput(proj[2])
t_srs_wkt = srs.ExportToWkt()
if not silent:
print("going to warp file "+origTiff+"\n" + s_srs_wkt + "\ninto file "+projRaster + "\n" + t_srs_wkt)
dswarp = gdal.AutoCreateWarpedVRT( dsTiff, s_srs_wkt, t_srs_wkt, GRA_NearestNeighbour, 0 );
drv_inter = gdal.GetDriverByName(intFmt);
drv_netcdf = gdal.GetDriverByName("netcdf");
dsw = drv_inter.CreateCopy(projRaster, dswarp, 0)
if not silent:
print("Warped %s to %s" % (proj[0], projRaster))
projNc = os.path.join(outPath, "%s_%s.nc" % \
(origTiff.rstrip('.tif'), proj[0] ))
#Force GDAL tags to be written to make testing easier, with preserved datum etc
ncCoOpts = "-co WRITE_GDAL_TAGS=yes"
if not silent:
print("About to translate to NetCDF")
dst = drv_netcdf.CreateCopy(projNc, dsw, 0, [ 'WRITE_GDAL_TAGS='+bWriteGdalTags ])
#For drivers like HFA, line below ESSENTIAL so that all info is
# saved to new raster file - which we'll reopen later and want
# to be fully updated.
dsw = None
dst = None
if not silent:
print("Translated to %s" % (projNc))
transWorked, resDetails = netcdf_cfproj_test_cf(proj, projNc)
resPerProj[proj[0]] = resDetails
resFile.write("%s (%s): " % (proj[0], proj[1]))
if transWorked:
resFile.write("OK\n")
else:
resFile.write("BAD\n")
if 'missingProjName' in resPerProj[proj[0]]:
resFile.write("\tMissing proj name '%s'\n" % \
(resPerProj[proj[0]]['missingProjName']))
for attrib in resPerProj[proj[0]]['missingAttrs']:
resFile.write("\tMissing attrib '%s'\n" % (attrib))
for cVarStdName in resPerProj[proj[0]]['missingCoordVarStdNames']:
resFile.write("\tMissing coord var with std name '%s'\n" \
% (cVarStdName))
if 'cfcheck_error' in resPerProj[proj[0]]:
resFile.write("\tFailed cf check: %s\n" % \
(resPerProj[proj[0]]['cfcheck_error']))
# test file copy
# We now copy to a new file, just to be safe
projNc2 = projNc.rstrip('.nc') + '2.nc'
projRaster2 = os.path.join(outPath, "%s_%s2.%s" % \
(origTiff.rstrip('.tif'), proj[0], intExt ))
tst_res[i_t+1] = netcdf_test_copy( projRaster, 1, None, projNc2, [], 'NETCDF' )
tst_res[i_t+2] = netcdf_test_copy( projNc2, 1, None, projRaster2, [], intFmt )
if tst_res[i_t+1] == 'fail' or tst_res[i_t+2] == 'fail':
result = 'fail'
i_t = i_t + 2
resFile.close()
if not silent:
print("\n" + "*" * 80)
print("Saved results to file %s" % (os.path.join(outPath, resFilename)))
#result = 'success'
resFile = open(os.path.join(outPath, resFilename), "r")
resStr = resFile.read()
if resStr.find('BAD') != -1:
print('\nCF projection tests failed, here is the output (stored in file %s)\n' % \
(os.path.join(outPath, resFilename)))
print(resStr)
result = 'fail'
return result
###############################################################################
# Test an NC file has valid conventions according to passed-in proj tuple
# Note: current testing strategy is a fairly simple attribute search.
# this could use gdal netcdf driver for getting attribs instead...
def netcdf_cfproj_test_cf(proj, projNc):
transWorked = True
command = 'ncdump -h ' + projNc
(ret, err) = gdaltest.runexternal_out_and_err(command)
if err != '':
print(err)
dumpStr = ret
resDetails = {}
resDetails['missingAttrs'] = []
resDetails['missingCoordVarStdNames'] = []
if (':grid_mapping_name = "%s"' % (proj[3])) not in dumpStr:
transWorked = False
resDetails['missingProjName'] = proj[3]
# Check attributes in the projection are included
for attrib in proj[4]:
# The ':' prefix and ' ' suffix is to help check for exact name,
# eg to catch the standard_parallel_1 and 2 issue.
if (":"+attrib+" ") not in dumpStr:
transWorked = False
resDetails['missingAttrs'].append(attrib)
# print "**Error for proj '%s': CF-1 attrib '%s' not found.**" % \
# (proj[0], attrib)
# Now we check the required X and Y attributes are included (e.g. Rotated Pole
# has special names required here.
for coordVarStdName in proj[5]:
if coordVarStdName not in dumpStr:
transWorked = False
resDetails['missingCoordVarStdNames'].append(coordVarStdName)
#Final check use the cf-checker
result_cf = netcdf_cf_check_file( projNc,'auto',True )
if result_cf == 'fail':
resDetails['cfcheck_error'] = gdaltest.netcdf_cf_check_error
transWorked = False
return transWorked, resDetails
###############################################################################
# Netcdf CF Tests
###############################################################################
###############################################################################
#test copy and CF compliance for lat/lon (no datum, no GEOGCS) file, tif->nc->tif
def netcdf_cf_1():
#setup netcdf and netcdf_cf environment
netcdf_setup()
netcdf_cf_setup()
if gdaltest.netcdf_drv is None:
return 'skip'
#tst1 = gdaltest.GDALTest( 'NETCDF', 'trmm.tif', 1, 14 )
#result = tst1.testCreateCopy(check_gt=1, check_srs=1, new_filename='tmp/netcdf_cf_1.nc', delete_copy = 0)
result = netcdf_test_copy( 'data/trmm.nc', 1, 14, 'tmp/netcdf_cf_1.nc' )
if result != 'fail':
#tst2 = gdaltest.GDALTest( 'GTIFF', '../tmp/netcdf_cf_1.nc', 1, 14 )
#result = tst2.testCreateCopy(check_gt=1, check_srs=1, new_filename='tmp/netcdf_cf_1.tiff', delete_copy = 0)
result = netcdf_test_copy( 'tmp/netcdf_cf_1.nc', 1, 14, 'tmp/netcdf_cf_1.tif', [], 'GTIFF' )
result_cf = 'success'
if gdaltest.netcdf_cf_method is not None:
result_cf = netcdf_cf_check_file( 'tmp/netcdf_18.nc','auto',False )
if result != 'fail' and result_cf != 'fail':
return 'success'
else:
return 'fail'
###############################################################################
#test copy and CF compliance for lat/lon (no datum, no GEOGCS) file, nc->nc
def netcdf_cf_2():
if gdaltest.netcdf_drv is None:
return 'skip'
result = netcdf_test_copy( 'data/trmm.nc', 1, 14, 'tmp/netcdf_cf_2.nc' )
result_cf = 'success'
if gdaltest.netcdf_cf_method is not None:
result_cf = netcdf_cf_check_file( 'tmp/netcdf_cf_2.nc','auto',False )
if result != 'fail' and result_cf != 'fail':
return 'success'
else:
return 'fail'
###############################################################################
#test copy and CF compliance for lat/lon (W*S84) file, tif->nc->tif
# note: this test fails in trunk (before r23246)
def netcdf_cf_3():
if gdaltest.netcdf_drv is None:
return 'skip'
result = 'success'
result_cf = 'success'
result = netcdf_test_copy( 'data/trmm-wgs84.tif', 1, 14, 'tmp/netcdf_cf_3.nc' )
if result == 'success':
#tst = gdaltest.GDALTest( 'GTIFF', '../tmp/netcdf_cf_3.nc', 1, 14 )
#result = tst.testCreateCopy(check_gt=1, check_srs=1, new_filename='tmp/netcdf_cf_3.tif', delete_copy = 0)
result = netcdf_test_copy( 'tmp/netcdf_cf_3.nc', 1, 14, 'tmp/netcdf_cf_3.tif', [], 'GTIFF' )
result_cf = 'success'
if gdaltest.netcdf_cf_method is not None:
result_cf = netcdf_cf_check_file( 'tmp/netcdf_cf_3.nc','auto',False )
if result != 'fail' and result_cf != 'fail':
return 'success'
else:
return 'fail'
###############################################################################
#test support for various CF projections
def netcdf_cf_4():
result = netcdf_cfproj_testcopy(netcdf_cfproj_tuples, 'melb-small.tif',
netcdf_cfproj_int_fmt_maps,
'data', 'tmp', 'translate_results.txt')
# result = netcdf_cfproj_testcopy(netcdf_cfproj_tuples1, 'melb-small.tif', \
# 'data', 'tmp', 'translate_results.txt')
return result
###############################################################################
#test support for PS variants (bug #2893)
def netcdf_cf_5():
if gdaltest.netcdf_drv is None:
return 'skip'
ifiles = [ 'NETCDF:data/orog_CRCM1.nc:orog', 'NETCDF:data/orog_CRCM2.nc:orog' ]
for ifile in ifiles:
ds = gdal.Open( ifile )
prj = ds.GetProjection()
sr = osr.SpatialReference( )
sr.ImportFromWkt( prj )
lat_origin = sr.GetProjParm( 'latitude_of_origin' )
if lat_origin != 60:
gdaltest.post_reason( 'Latitude of origin in %s does not match expected: %f'
% (ifile, lat_origin) )
return 'fail'
return 'success'
###############################################################################
gdaltest_list = [
netcdf_cf_1,
netcdf_cf_2,
netcdf_cf_3,
netcdf_cf_4,
netcdf_cf_5,
None ]
if __name__ == '__main__':
gdaltest.setup_run( 'netcdf_cf' )
gdaltest.run_tests( gdaltest_list )
#make sure we cleanup
gdaltest.clean_tmp()
gdaltest.summarize()
| gdaltest.netcdf_cf_check_error = ''
if ( not os.path.exists(ifile) ):
return 'skip'
output_all = ''
command = netcdf_cf_get_command(ifile, version='auto')
if command is None or command=='':
gdaltest.post_reason('no suitable method found, skipping')
return 'skip'
try:
if gdaltest.netcdf_cf_method == 'http':
print('calling ' + command)
(ret, err) = gdaltest.runexternal_out_and_err(command)
except :
gdaltest.post_reason('ERROR with command - ' + command)
return 'fail'
output_all = ret
output_err = ''
output_warn = ''
for line in output_all.splitlines( ):
#optimize this with regex
if 'ERROR' in line and not 'ERRORS' in line:
output_err = output_err + '\n' + line
elif 'WARNING' in line and not 'WARNINGS' in line:
output_warn = output_warn + '\n' + line
result = 'success'
if output_err != '':
result = 'fail'
if output_err != '':
gdaltest.netcdf_cf_check_error += output_err.strip()
if not silent:
print('=> CF check ERRORS for file ' + ifile + ' : ' + output_err)
if output_warn != '':
if not silent:
print('CF check WARNINGS for file ' + ifile + ' : ' + output_warn)
return result |
plugin_mysql_server.go | /*
Copyright 2017 Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreedto in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package vtqueryserver
import (
"flag"
"fmt"
"net"
"os"
"syscall"
"golang.org/x/net/context"
"gopkg.in/src-d/go-vitess.v1/mysql"
"gopkg.in/src-d/go-vitess.v1/sqltypes"
"gopkg.in/src-d/go-vitess.v1/vt/callerid"
"gopkg.in/src-d/go-vitess.v1/vt/log"
"gopkg.in/src-d/go-vitess.v1/vt/mysqlproxy"
"gopkg.in/src-d/go-vitess.v1/vt/servenv"
"gopkg.in/src-d/go-vitess.v1/vt/vttls"
querypb "gopkg.in/src-d/go-vitess.v1/vt/proto/query"
)
var (
mysqlServerPort = flag.Int("mysqlproxy_server_port", -1, "If set, also listen for MySQL binary protocol connections on this port.")
mysqlServerBindAddress = flag.String("mysqlproxy_server_bind_address", "", "Binds on this address when listening to MySQL binary protocol. Useful to restrict listening to 'localhost' only for instance.")
mysqlServerSocketPath = flag.String("mysqlproxy_server_socket_path", "", "This option specifies the Unix socket file to use when listening for local connections. By default it will be empty and it won't listen to a unix socket")
mysqlAuthServerImpl = flag.String("mysql_auth_server_impl", "static", "Which auth server implementation to use.")
mysqlAllowClearTextWithoutTLS = flag.Bool("mysql_allow_clear_text_without_tls", false, "If set, the server will allow the use of a clear text password over non-SSL connections.")
mysqlSslCert = flag.String("mysqlproxy_server_ssl_cert", "", "Path to the ssl cert for mysql server plugin SSL")
mysqlSslKey = flag.String("mysqlproxy_server_ssl_key", "", "Path to ssl key for mysql server plugin SSL")
mysqlSslCa = flag.String("mysqlproxy_server_ssl_ca", "", "Path to ssl CA for mysql server plugin SSL. If specified, server will require and validate client certs.")
mysqlSlowConnectWarnThreshold = flag.Duration("mysqlproxy_slow_connect_warn_threshold", 0, "Warn if it takes more than the given threshold for a mysql connection to establish")
mysqlConnReadTimeout = flag.Duration("mysql_server_read_timeout", 0, "connection read timeout")
mysqlConnWriteTimeout = flag.Duration("mysql_server_write_timeout", 0, "connection write timeout")
mysqlQueryTimeout = flag.Duration("mysql_server_query_timeout", 0, "mysql query timeout")
)
// proxyHandler implements the Listener interface.
// It stores the Session in the ClientData of a Connection, if a transaction
// is in progress.
type proxyHandler struct {
mp *mysqlproxy.Proxy
}
func | (mp *mysqlproxy.Proxy) *proxyHandler {
return &proxyHandler{
mp: mp,
}
}
func (mh *proxyHandler) NewConnection(c *mysql.Conn) {
}
func (mh *proxyHandler) ConnectionClosed(c *mysql.Conn) {
// Rollback if there is an ongoing transaction. Ignore error.
var ctx context.Context
var cancel context.CancelFunc
if *mysqlQueryTimeout != 0 {
ctx, cancel = context.WithTimeout(context.Background(), *mysqlQueryTimeout)
defer cancel()
} else {
ctx = context.Background()
}
session, _ := c.ClientData.(*mysqlproxy.ProxySession)
if session != nil && session.TransactionID != 0 {
_ = mh.mp.Rollback(ctx, session)
}
}
func (mh *proxyHandler) ComQuery(c *mysql.Conn, query string, callback func(*sqltypes.Result) error) error {
var ctx context.Context
var cancel context.CancelFunc
if *mysqlQueryTimeout != 0 {
ctx, cancel = context.WithTimeout(context.Background(), *mysqlQueryTimeout)
defer cancel()
} else {
ctx = context.Background()
}
// Fill in the ImmediateCallerID with the UserData returned by
// the AuthServer plugin for that user. If nothing was
// returned, use the User. This lets the plugin map a MySQL
// user used for authentication to a Vitess User used for
// Table ACLs and Vitess authentication in general.
im := c.UserData.Get()
ef := callerid.NewEffectiveCallerID(
c.User, /* principal: who */
c.RemoteAddr().String(), /* component: running client process */
"mysqlproxy MySQL Connector" /* subcomponent: part of the client */)
ctx = callerid.NewContext(ctx, ef, im)
session, _ := c.ClientData.(*mysqlproxy.ProxySession)
if session == nil {
session = &mysqlproxy.ProxySession{
Options: &querypb.ExecuteOptions{
IncludedFields: querypb.ExecuteOptions_ALL,
},
Autocommit: true,
}
if c.Capabilities&mysql.CapabilityClientFoundRows != 0 {
session.Options.ClientFoundRows = true
}
}
if c.SchemaName != "" {
session.TargetString = c.SchemaName
}
session, result, err := mh.mp.Execute(ctx, session, query, make(map[string]*querypb.BindVariable))
c.ClientData = session
err = mysql.NewSQLErrorFromError(err)
if err != nil {
return err
}
return callback(result)
}
var mysqlListener *mysql.Listener
var mysqlUnixListener *mysql.Listener
// initiMySQLProtocol starts the mysql protocol.
// It should be called only once in a process.
func initMySQLProtocol() {
log.Infof("initializing mysql protocol")
// Flag is not set, just return.
if *mysqlServerPort < 0 && *mysqlServerSocketPath == "" {
return
}
// If no mysqlproxy was created, just return.
if mysqlProxy == nil {
log.Fatalf("mysqlProxy not initialized")
return
}
// Initialize registered AuthServer implementations (or other plugins)
for _, initFn := range pluginInitializers {
initFn()
}
authServer := mysql.GetAuthServer(*mysqlAuthServerImpl)
// Create a Listener.
var err error
mh := newProxyHandler(mysqlProxy)
if *mysqlServerPort >= 0 {
mysqlListener, err = mysql.NewListener("tcp", net.JoinHostPort(*mysqlServerBindAddress, fmt.Sprintf("%v", *mysqlServerPort)), authServer, mh, *mysqlConnReadTimeout, *mysqlConnWriteTimeout)
if err != nil {
log.Exitf("mysql.NewListener failed: %v", err)
}
if *mysqlSslCert != "" && *mysqlSslKey != "" {
mysqlListener.TLSConfig, err = vttls.ServerConfig(*mysqlSslCert, *mysqlSslKey, *mysqlSslCa)
if err != nil {
log.Exitf("grpcutils.TLSServerConfig failed: %v", err)
return
}
}
mysqlListener.AllowClearTextWithoutTLS = *mysqlAllowClearTextWithoutTLS
// Check for the connection threshold
if *mysqlSlowConnectWarnThreshold != 0 {
log.Infof("setting mysql slow connection threshold to %v", mysqlSlowConnectWarnThreshold)
mysqlListener.SlowConnectWarnThreshold = *mysqlSlowConnectWarnThreshold
}
// Start listening for tcp
go mysqlListener.Accept()
log.Infof("listening on %s:%d", *mysqlServerBindAddress, *mysqlServerPort)
}
if *mysqlServerSocketPath != "" {
// Let's create this unix socket with permissions to all users. In this way,
// clients can connect to mysqlproxy mysql server without being mysqlproxy user
oldMask := syscall.Umask(000)
mysqlUnixListener, err = newMysqlUnixSocket(*mysqlServerSocketPath, authServer, mh)
_ = syscall.Umask(oldMask)
if err != nil {
log.Exitf("mysql.NewListener failed: %v", err)
return
}
// Listen for unix socket
go mysqlUnixListener.Accept()
}
}
// newMysqlUnixSocket creates a new unix socket mysql listener. If a socket file already exists, attempts
// to clean it up.
func newMysqlUnixSocket(address string, authServer mysql.AuthServer, handler mysql.Handler) (*mysql.Listener, error) {
listener, err := mysql.NewListener("unix", address, authServer, handler, *mysqlConnReadTimeout, *mysqlConnWriteTimeout)
switch err := err.(type) {
case nil:
return listener, nil
case *net.OpError:
log.Warningf("Found existent socket when trying to create new unix mysql listener: %s, attempting to clean up", address)
// err.Op should never be different from listen, just being extra careful
// in case in the future other errors are returned here
if err.Op != "listen" {
return nil, err
}
_, dialErr := net.Dial("unix", address)
if dialErr == nil {
log.Errorf("Existent socket '%s' is still accepting connections, aborting", address)
return nil, err
}
removeFileErr := os.Remove(address)
if removeFileErr != nil {
log.Errorf("Couldn't remove existent socket file: %s", address)
return nil, err
}
listener, listenerErr := mysql.NewListener("unix", address, authServer, handler, *mysqlConnReadTimeout, *mysqlConnWriteTimeout)
return listener, listenerErr
default:
return nil, err
}
}
func shutdownMySQLProtocol() {
log.Infof("shutting down mysql protocol")
if mysqlListener != nil {
mysqlListener.Close()
mysqlListener = nil
}
if mysqlUnixListener != nil {
mysqlUnixListener.Close()
mysqlUnixListener = nil
}
}
func init() {
servenv.OnRun(initMySQLProtocol)
servenv.OnTerm(shutdownMySQLProtocol)
}
var pluginInitializers []func()
// RegisterPluginInitializer lets plugins register themselves to be init'ed at servenv.OnRun-time
func RegisterPluginInitializer(initializer func()) {
pluginInitializers = append(pluginInitializers, initializer)
}
| newProxyHandler |
test_commands.py | from unittest import TestCase
|
class TestNew(TestCase):
pass |
|
utils.rs | use colored::*;
#[allow(dead_code)]
pub fn get_extension(path: String) -> String {
let path_name: Vec<&str> = path.split('.').collect::<Vec<&str>>();
let extension: &str = path_name.last().clone().unwrap();
if path_name.len() > 1 && path_name[0] != "" {
return extension.to_string();
} else {
return "".to_string();
}
}
#[allow(dead_code)]
pub fn get_file_name_without_extension(path: String) -> String {
let path_name: Vec<&str> = path.split('.').collect::<Vec<&str>>();
let file_name: &str = path_name[0];
if path_name.len() > 1 && path_name[0] != "" {
return file_name.to_string();
} else {
return "".to_string();
}
}
#[allow(dead_code)]
pub fn get_prefix_or_suffix_or_contain(file_name: String, search_term: &str) -> i32 {
// -1 -> not found
// 0 -> prefix
// 1 -> suffix
// 2 -> contains
let split_name: Vec<&str> = file_name.split(&search_term.to_lowercase())
.collect::<Vec<&str>>();
if split_name.len() < 2 | else {
if split_name[0].is_empty() {
return 0;
} else if split_name[1].is_empty() {
return 1;
} else {
return 2;
}
}
}
#[allow(dead_code)]
pub fn log_success(title: &str, message: &str) {
println!("{}: {}", title.bold(), message)
}
#[allow(dead_code)]
pub fn log_error(error: std::io::Error) {
println!("{}: {}", "ERROR".red().bold(), error.to_string().red())
} | {
return -1;
} |
carto_editor.js | /**
* Carto CSS editor module
*
* new cdb.admin.mod.CartoCSSEditor({
* model: dataLayer
* table: table
* })
*
*/
cdb.admin.mod = cdb.admin.mod || {};
cdb.admin.mod.CartoCSSEditor = cdb.admin.Module.extend({
_TEXTS: {
tip: '<strong>Ctrl + SPACE</strong> to autocomplete. <strong><%- key %> + S</strong> to apply your styles.'
},
_ACTION: {
type: 'show',
width: 600
},
buttonClass: 'cartocss_mod',
type: 'tool',
events: {
'click .actions button': 'applyStyle',
'click .actions a.next': '_do',
'click .actions a.back': '_undo',
'click .doc_info': '_showDoc'
},
initialize: function() {
_.bindAll(this, '_onKeyUpEditor');
this.template = this.getTemplate('table/menu_modules/views/carto_editor');
this.model.bind('change', this._updateStyle, this);
this.add_related_model(this.model);
this.add_related_model(this.model.table);
// Set query position from history array and last sql applied
var history = this.model.get('tile_style_history')
, position = this.model.tile_style_history_position
, style = this.model.get('tile_style');
// Model doesn't persist last change, let's add in the history
if (style && style != "" && history && _.indexOf(history, style) === -1) {
history.push(style);
this.model.set({ "tile_style_history": history }, { silent:true });
}
// Get history position
this.model.tile_style_history_position =
_.indexOf(history, style) !== -1
? _.indexOf(history, style)
: 0;
this.model.bind('parseError', this._showErrorFromServer, this);
/*
this.model.bind('tileError', this._renderError, this);
*/
this.model.bind('tileOk', this._checkLocalErrors, this);
this.model.table.bind('change:schema', this._checkLocalErrors, this);
//this.buildAutocomplete();
this._initBinds();
cdb.god.bind('end_show', this.activated, this)
this.add_related_model(cdb.god);
},
/** builds autocomplete from cartcss reference */
buildAutocomplete: function() {
this.autocomplete = [];
if (typeof(window._mapnik_reference_latest) !== 'undefined') {
var symbolizers = _mapnik_reference_latest.symbolizers;
for (var s in symbolizers) {
var sym = symbolizers[s];
for (var p in sym) {
var css = sym[p].css;
if (css && css.length) {
this.autocomplete.push(css);
}
}
}
}
},
activated: function() {
if(this.codeEditor) {
this.codeEditor.refresh();
this.codeEditor.focus();
this._adjustCodeEditorSize();
}
},
render: function() {
var self = this;
this.clearSubViews();
this.$el.append(this.template({}));
this._initHelp();
this._initEditor();
this._updateStyle();
this._adjustCodeEditorSize();
return this;
},
_initBinds: function() {
// Codemirror extrakey
// Add save keymap
// PC & LINUX -> Ctrl + s
// MAC -> Cmd + s
var ua = navigator.userAgent.toLowerCase()
, so = "rest"
, keymap = "ctrl+s"
, self = this;
if (/mac os/.test(ua)) {
keymap = "meta+s";
so = "mac";
}
| self.applyStyle();
}
});
},
_initEditor: function() {
var self = this;
this.codeEditor = CodeMirror.fromTextArea(this.$('textarea')[0], {
mode: "text/x-carto",
tabMode: "indent",
matchBrackets: true,
lineNumbers: true,
lineWrapping: true,
onKeyEvent: this._onKeyUpEditor,
extraKeys: {
"Ctrl-Space": function(cm) { self._showAutocomplete(cm) }
}
});
var color_picker = new cdb.admin.CodemirrorColorPicker({
editor: this.codeEditor,
model: this.model
});
color_picker.bind('colorChosen', this.applyStyle, this);
this.addView(color_picker);
// Add tooltip for undo/redo buttons
this.$("a.next, a.back").tipsy({
gravity: "s",
fade: true
});
},
_initHelp: function() {
var so = "rest";
var ua = navigator.userAgent.toLowerCase();
if (/mac os/.test(ua)) {
so = "mac";
}
var help = new cdb.admin.mod.HTMLEditorHelp({
localStorageKey: this._STORAGE_NAMESPACE + this.model.table.get('id'),
text: _.template(this._TEXTS.tip)({ key: (so == "mac") ? "CMD" : "Ctrl" })
}).bind("hide show", this._adjustCodeEditorSize, this);
this.$el.append(help.render().$el);
this.addView(help);
},
_showAutocomplete: function(cm) {
CodeMirror.showHint(cm, CodeMirror.hint['custom-list-with-type'], {
completeSingle: false,
list: _.union( this._getTableName(), this._getSQLColumns())
});
},
_getTableName: function() {
return [ [ this.model.table.get('name'), "T" ] ]
},
_getSQLColumns: function() {
return _.map(
this.model.table.get('schema'),
function(pair) {
// Column name and type
return [pair[0], "C"]
});
},
_onKeyUpEditor: function(cm, e) {
var code = (e.keyCode ? e.keyCode : e.which);
if (e.type == "keyup" && code != 27 ) {
var self = this;
if (this.autocomplete_timeout) clearTimeout(this.autocomplete_timeout);
this.autocomplete_timeout = setTimeout(function() {
var cur = cm.getCursor();
var str = cm.getTokenAt(cur).string;
var schema = self.model.table.get('schema');
if (schema && str.length > 2) {
var arr = _.union(self.model.table.get('schema'), self._getTableName());
var list = _.compact(_.map(arr, function(pair) {
if (pair[0].search(str) != -1)
return pair[0];
return null;
}));
if (!cm.state.completionActive && str.length > 2 && list.length > 0) {
self._showAutocomplete(cm)
}
}
}, 150);
}
},
/** hack used to format the old styles transformed to cartodb 2.0*/
formatStyle: function(s) {
try {
if (s && s.length) {
s = s.replace(/{/g,'{\n')
.replace(/}/g,'}\n')
.replace(/;/g,';\n')
var t = s.split('\n');
var lines = [];
var c = 0;
for(var i = 0; i < t.length; ++i) {
lines.push(c);
if (t[i].indexOf('{') != -1) {
++c;
}
if (t[i].indexOf('}') != -1) {
--c;
}
}
var r = [];
for(var i = 0; i < t.length; ++i) {
var spaces = '';
if(t[i].indexOf('}') >= 0) lines[i]-=1;
for(var j = 0; j < lines[i]; ++j) {
spaces = spaces + ' ';
}
r.push(spaces + t[i]);
}
return '/** this cartoCSS has been processed in order to be compatible with the new cartodb 2.0 */\n\n' + r.join('\n');
}
} catch(e) {
}
return s;
},
_updateStyle: function(){
var st = this.model.get('tile_style')
, editor_st = this.codeEditor ? this.codeEditor.getValue() : null;
if(this.codeEditor && st && st != editor_st) {
if(st.indexOf('\n') === -1) {
st = this.formatStyle(st);
}
this.codeEditor.setValue(st);
this.codeEditor.refresh();
}
// If model is using history, check buttons
if (this.model.get('tile_style_history'))
this._checkDoButtons();
},
/**
* gets an array of parse errors from windshaft
* and returns an array of {line:1, error: 'string'] with user friendly
* strings. Parses errors in format:
*
* 'style.mss:7:2 Invalid code: asdasdasda'
*/
_parseError: function(errors) {
var parsedErrors = [];
for(var i in errors) {
var err = errors[i];
if(err) {
if (err.length > 0) {
var g = err.match(/.*:(\d+):(\d+)\s*(.*)/);
if(g) {
parsedErrors.push({
line: parseInt(g[1], 10),
message: g[3]
});
} else {
parsedErrors.push({
line: null,
message: err
})
}
} else if(err.line) {
parsedErrors.push(err);
}
}
}
// sort by line
parsedErrors.sort(function(a, b) { return a.line - b.line; });
parsedErrors = _.uniq(parsedErrors, true, function(a) { return a.line + a.message; });
return parsedErrors;
},
_showErrorFromServer: function(err) {
this._showError(this._parseError(err));
},
_showError: function(err) {
var parsedErrors = err;
if(parsedErrors.length > 0) {
var errors = _(parsedErrors).map(function(e) {
if(e.line) {
return "line " + e.line + ": " + e.message;
}
return e.error || e.message;
})
this._renderError(errors.join('</br>'));
}
},
_renderError: function(errors) {
this.trigger('hasErrors');
// Get actions block height
var actions_h = this.$('.actions').outerHeight();
// Add error text
this.$('.info')
.addClass('error')
.html("<p>" + errors + "</p>")
// If layer is not visible, we need to move error message
.css({
bottom: actions_h + (!this.model.get('visible') ? 57 : 0)
})
.show();
this._adjustCodeEditorSize();
},
_adjustCodeEditorSize: function() {
// Fit editor with the error
var info_h = this.$('.info').is(':visible') ? this.$('.info').outerHeight() : 0;
var help_h = this.$('.help-tip').is(':visible') ? 36 : 0 ;
// If layer is not visible, we need to take into account
var vis_msg_h = !this.model.get('visible') ? 57 : 0 ;
this.$('.CodeMirror-wrap').css({
bottom: info_h + vis_msg_h + 80, /* the space we need to show the action buttons */
top: help_h
});
},
_checkLocalErrors: function() {
var style = this.model.get('tile_style');
var cartoParser = new cdb.admin.CartoParser(style);
if(cartoParser.errors().length) {
this._showError(this._parseError(cartoParser.errors()));
} else {
// check variables used
var err = this.checkVariables(cartoParser.variablesUsed());
if(err.length) {
this._showError(err);
return;
}
}
this._clearErrors();
},
_clearErrors: function() {
this.trigger('clearError');
// Hide info
this.$('.info')
.html('')
.removeClass('error')
.hide();
this._adjustCodeEditorSize();
},
_do: function(e) {
e.preventDefault();
var newCarto = this.model.redoHistory('tile_style');
if(this.codeEditor) this.codeEditor.setValue(newCarto);
this._checkDoButtons();
return false;
},
_undo: function(e) {
e.preventDefault();
var newCarto = this.model.undoHistory('tile_style');
if(this.codeEditor) this.codeEditor.setValue(newCarto);
this._checkDoButtons();
return false;
},
/**
* checks variabels used in cartocss are in the schem
*/
checkVariables: function(vars) {
var columns = this.model.table.columnNames();
var err = [];
for(var i in vars) {
if(!_.contains(columns, vars[i])) {
err.push({
error: "sql/table must contain " + vars[i] + " variable"
});
}
}
return err;
},
applyStyle: function() {
this._clearErrors();
var style = this.codeEditor.getValue();
var cartoParser = new cdb.admin.CartoParser(style);
if(cartoParser.errors().length) {
var errors = this._parseError(cartoParser.errors());
if(errors) this._showError(errors);
} else {
// check variables used
var err = this.checkVariables(cartoParser.variablesUsed());
if(err.length) {
this._showError(err)
return;
}
this.model.addToHistory('tile_style', style);
//TODO: check if the style has been changed
this.model.save({
tile_style: style,
tile_style_custom: true
});
// we save the new applied query on the history array
this.trigger('applyStyle', style);
}
// Event tracking "Applied CartoCSS style manually"
cdb.god.trigger('metrics', 'cartocss_manually', {
email: window.user_data.email
});
},
/**
* Check if the editor is different from the saved value
* @return {Boolean}
*/
hasChanges: function() {
return this.model.get('tile_style') != this.codeEditor.getValue();
},
_checkDoButtons: function() {
// Redo
if (!this.model.isHistoryAtLastPosition('tile_style')) {
this.$el.find('a.next').removeClass("disabled")
} else {
this.$el.find('a.next').addClass("disabled")
}
// Undo
if (!this.model.isHistoryAtFirstPosition('tile_style')) {
this.$el.find('a.back').removeClass("disabled")
} else {
this.$el.find('a.back').addClass("disabled")
}
},
_showDoc: function(ev) {
ev.preventDefault();
cdb.editor.ViewFactory.createDialogByTemplate('common/dialogs/help/carto_css').appendToBody();
}
}); | this.$el.bind('keydown', keymap, function(ev) {
if (((so=="mac" && ev.metaKey) || (so=="rest" && ev.ctrlKey)) && ev.keyCode == 83 ) {
ev.preventDefault(); |
controller.js | "use strict";
var path = require("path");
var shell = require('shelljs');
var assert = require('assert');
var helper = require("../helper.js");
var extKey = 'aa39b5490c4a4ed0e56d7ec1232a428f7ad78ebb7347db3fc9875cb10c2bce39bbf8aabacf9e00420afb580b15698c04ce10d659d1972ebc53e76b6bbae0c113bee1e23062800bc830e4c329ca913fefebd1f1222295cf2eb5486224044b4d0c';
var soajs = helper.requireModule('index.js');
var assert = require('assert');
var helper = require("../helper.js");
var shell = require('shelljs');
describe("Proxy Tests", function () {
let controller;
before(function (done) {
controller = new soajs.server.controller();
controller.init(function() {
console.log("**** start controller init");
controller.start(done);
});
});
after(function (done) {
controller.stop(done);
});
it("Get permissions", function (done) {
var options = {
uri: 'http://127.0.0.1:4000/key/permission/get',
headers: {
'Content-Type': 'application/json',
key: extKey
}
}; | helper.requester('get', options, function (error, body) {
// assert.ifError(error);
// assert.ok(body);
done();
});
});
}); | |
fizzy.rs | use crate::matcher::Matcher;
#[derive(Default)]
pub struct Fizzy<'a, T> {
matchers: Vec<Matcher<'a, T>>,
}
impl<'a, T> Fizzy<'a, T> {
pub fn new() -> Self {
Self {
matchers: Vec::new(),
}
}
pub fn add_matcher(mut self, matcher: Matcher<'a, T>) -> Self {
self.matchers.push(matcher);
self
}
}
impl<T: Copy + ToString> Fizzy<'_, T> {
pub fn apply<'a, I>(&'a self, values: I) -> impl 'a + Iterator<Item = String>
where
I: 'a + Iterator<Item = T>,
{
values.map(move |v| self.word_for(v))
} |
fn word_for(&self, value: T) -> String {
let compound_word = self
.matchers
.iter()
.filter_map(|m| m.matching_word(value))
.collect::<String>();
if compound_word.is_empty() {
value.to_string()
} else {
compound_word
}
}
} | |
App.js | import React from 'react';
import { BrowserRouter as Router, Switch, Route } from 'react-router-dom';
import SearchBooks from './pages/SearchBooks';
import SavedBooks from './pages/SavedBooks';
import Navbar from './components/Navbar';
import { ApolloProvider } from '@apollo/react-hooks';
import ApolloClient from 'apollo-boost';
const client = new ApolloClient({
request: operation => {
const token = localStorage.getItem('id_token');
operation.setContext({
headers: {
authorization: token ? `Bearer ${token}` : ''
}
});
},
uri: '/graphql'
});
function | () {
return (
<ApolloProvider client={client}>
<Router>
<>
<Navbar />
<Switch>
<Route exact path='/' component={SearchBooks} />
<Route exact path='/saved' component={SavedBooks} />
<Route render={() => <h1 className='display-2'>Wrong page!</h1>} />
</Switch>
</>
</Router>
</ApolloProvider>
);
}
export default App;
| App |
ffm.py | import subprocess
import warnings
import os.path as osp
import os
import numpy as np | # Note: libffm doesn't handle relative paths very well, hence abspath used.
class FFM:
def __init__(self, train_binary_path, predict_binary_path, model_path=None):
self.train_binary_path = osp.abspath(train_binary_path)
self.predict_binary_path = osp.abspath(predict_binary_path)
self.model_path = osp.abspath(model_path) if model_path is not None else None
def fit(self, X, model_path='model', l=0.00002, k=4, t=15, r=0.2, s=1):
"""
-l <lambda>: regularization parameter (default 0.00002)
-k <factor>: number of latent factors (default 4)
-t <iteration>: number of iterations (default 15)
-r <eta>: learning rate (default 0.2)
-s <nr_threads>: number of threads (default 1)
"""
# validation support?
warnings.warn('Please note that unix newline format (LF) is required for libffm binaries to work correctly.' +
' Windows (CR LF) will cause the issues.')
if type(X) != str:
raise ValueError(f'Improper input type {type(X)}.X must be a path to ffm file.')
self.model_path = osp.abspath(model_path)
train_data_abspath = osp.abspath(X)
cmd = f'{self.train_binary_path} -l {l} -k {k} -t {t} -r {r} -s {s} {train_data_abspath} {self.model_path}'
proc = subprocess.Popen(cmd)
proc.wait()
os.remove(f'{train_data_abspath}.bin')
def predict(self, X, output_file):
warnings.warn('Please note that unix newline format (LF) is required for libffm binaries to work correctly.' +
' Windows (CR LF) will cause the issues.')
if self.model_path is None:
raise RuntimeError('Model must be fitted first!')
if type(X) != str:
raise ValueError(f'Improper input type {type(X)}.X must be a path to ffm file.')
predicted_data_abspath = osp.abspath(X)
output_file_abspath = osp.abspath(output_file)
cmd = f'{self.predict_binary_path} {predicted_data_abspath} {self.model_path} {output_file_abspath}'
proc = subprocess.Popen(cmd)
proc.wait()
@classmethod
def pred_file_to_numpy(cls, preds_file):
return np.loadtxt(preds_file)
@classmethod
def ground_truth_from_ffm_file(cls, ffm_file):
with open(ffm_file, 'r') as f:
labels = [line.split(' ')[0] for line in f]
return np.array(labels).astype(float) | |
peer_id.rs | // Copyright 2018 Parity Technologies (UK) Ltd.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
use crate::PublicKey;
use bs58;
use quick_error::quick_error;
use multihash;
use std::{convert::TryFrom, fmt, str::FromStr};
/// Identifier of a peer of the network.
///
/// The data is a multihash of the public key of the peer.
// TODO: maybe keep things in decoded version?
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct PeerId {
multihash: multihash::Multihash,
}
impl fmt::Debug for PeerId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("PeerId")
.field(&self.to_base58())
.finish()
}
}
impl fmt::Display for PeerId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.to_base58().fmt(f)
}
}
impl PeerId {
/// Builds a `PeerId` from a public key.
#[inline]
pub fn from_public_key(key: PublicKey) -> PeerId {
let key_enc = key.into_protobuf_encoding();
let multihash = multihash::encode(multihash::Hash::SHA2256, &key_enc)
.expect("sha2-256 is always supported");
PeerId { multihash }
}
/// Checks whether `data` is a valid `PeerId`. If so, returns the `PeerId`. If not, returns
/// back the data as an error.
#[inline]
pub fn from_bytes(data: Vec<u8>) -> Result<PeerId, Vec<u8>> {
match multihash::Multihash::from_bytes(data) {
Ok(multihash) => {
if multihash.algorithm() == multihash::Hash::SHA2256 {
Ok(PeerId { multihash })
} else {
Err(multihash.into_bytes())
}
},
Err(err) => Err(err.data),
}
}
/// Turns a `Multihash` into a `PeerId`. If the multihash doesn't use the correct algorithm,
/// returns back the data as an error.
#[inline]
pub fn from_multihash(data: multihash::Multihash) -> Result<PeerId, multihash::Multihash> {
if data.algorithm() == multihash::Hash::SHA2256 {
Ok(PeerId { multihash: data })
} else {
Err(data)
}
}
/// Generates a random peer ID from a cryptographically secure PRNG.
///
/// This is useful for randomly walking on a DHT, or for testing purposes.
#[inline]
pub fn random() -> PeerId {
PeerId {
multihash: multihash::Multihash::random(multihash::Hash::SHA2256)
}
}
/// Returns a raw bytes representation of this `PeerId`.
///
/// Note that this is not the same as the public key of the peer.
#[inline]
pub fn into_bytes(self) -> Vec<u8> {
self.multihash.into_bytes()
}
/// Returns a raw bytes representation of this `PeerId`.
///
/// Note that this is not the same as the public key of the peer.
#[inline]
pub fn as_bytes(&self) -> &[u8] {
self.multihash.as_bytes()
}
/// Returns a base-58 encoded string of this `PeerId`.
#[inline]
pub fn | (&self) -> String {
bs58::encode(self.multihash.as_bytes()).into_string()
}
/// Returns the raw bytes of the hash of this `PeerId`.
#[inline]
pub fn digest(&self) -> &[u8] {
self.multihash.digest()
}
/// Checks whether the public key passed as parameter matches the public key of this `PeerId`.
///
/// Returns `None` if this `PeerId`s hash algorithm is not supported when encoding the
/// given public key, otherwise `Some` boolean as the result of an equality check.
pub fn is_public_key(&self, public_key: &PublicKey) -> Option<bool> {
let alg = self.multihash.algorithm();
let enc = public_key.clone().into_protobuf_encoding();
match multihash::encode(alg, &enc) {
Ok(h) => Some(h == self.multihash),
Err(multihash::EncodeError::UnsupportedType) => None
}
}
}
impl From<PublicKey> for PeerId {
#[inline]
fn from(key: PublicKey) -> PeerId {
PeerId::from_public_key(key)
}
}
impl TryFrom<Vec<u8>> for PeerId {
type Error = Vec<u8>;
fn try_from(value: Vec<u8>) -> Result<Self, Self::Error> {
PeerId::from_bytes(value)
}
}
impl TryFrom<multihash::Multihash> for PeerId {
type Error = multihash::Multihash;
fn try_from(value: multihash::Multihash) -> Result<Self, Self::Error> {
PeerId::from_multihash(value)
}
}
impl PartialEq<multihash::Multihash> for PeerId {
#[inline]
fn eq(&self, other: &multihash::Multihash) -> bool {
&self.multihash == other
}
}
impl PartialEq<PeerId> for multihash::Multihash {
#[inline]
fn eq(&self, other: &PeerId) -> bool {
self == &other.multihash
}
}
impl AsRef<multihash::Multihash> for PeerId {
#[inline]
fn as_ref(&self) -> &multihash::Multihash {
&self.multihash
}
}
impl AsRef<[u8]> for PeerId {
#[inline]
fn as_ref(&self) -> &[u8] {
self.as_bytes()
}
}
impl Into<multihash::Multihash> for PeerId {
#[inline]
fn into(self) -> multihash::Multihash {
self.multihash
}
}
quick_error! {
#[derive(Debug)]
pub enum ParseError {
B58(e: bs58::decode::DecodeError) {
display("base-58 decode error: {}", e)
cause(e)
from()
}
MultiHash {
display("decoding multihash failed")
}
}
}
impl FromStr for PeerId {
type Err = ParseError;
#[inline]
fn from_str(s: &str) -> Result<Self, Self::Err> {
let bytes = bs58::decode(s).into_vec()?;
PeerId::from_bytes(bytes).map_err(|_| ParseError::MultiHash)
}
}
#[cfg(test)]
mod tests {
use crate::{PeerId, identity};
#[test]
fn peer_id_is_public_key() {
let key = identity::Keypair::generate_ed25519().public();
let peer_id = key.clone().into_peer_id();
assert_eq!(peer_id.is_public_key(&key), Some(true));
}
#[test]
fn peer_id_into_bytes_then_from_bytes() {
let peer_id = identity::Keypair::generate_ed25519().public().into_peer_id();
let second = PeerId::from_bytes(peer_id.clone().into_bytes()).unwrap();
assert_eq!(peer_id, second);
}
#[test]
fn peer_id_to_base58_then_back() {
let peer_id = identity::Keypair::generate_ed25519().public().into_peer_id();
let second: PeerId = peer_id.to_base58().parse().unwrap();
assert_eq!(peer_id, second);
}
#[test]
fn random_peer_id_is_valid() {
for _ in 0 .. 5000 {
let peer_id = PeerId::random();
assert_eq!(peer_id, PeerId::from_bytes(peer_id.clone().into_bytes()).unwrap());
}
}
}
| to_base58 |
messageListener.js | import { MSG_START, MSG_IS_STARTED, MSG_STOP } from "utils/msgTypes";
import { delay } from "./../utils/helper.js";
let is_started = false;
const onRequest = (message, sender, reply) => {
switch (message.type) {
case MSG_START: {
is_started = true;
startCheckOut();
reply();
break;
}
case MSG_IS_STARTED: {
reply(is_started);
break;
}
case MSG_STOP: {
is_started = false; | }
}
return true;
};
const startCheckOut = async () => {
if (!is_started) {
console.log("~~~~~~~~~~~~~~~ stopped");
return;
}
const form = document.querySelector("form[name='mainf']");
if (!form) return;
const imgContainer = form.firstElementChild;
const allImages = imgContainer.querySelectorAll("img");
if (!allImages) return;
const allNumbers = Array.from(allImages).map(img => {
const url = img.src;
const splitedUrl = url.split("/");
console.log("~~~~~~~~~~~~~~ img", url);
const fileName = splitedUrl[splitedUrl.length - 1];
console.log("~~~~~~~~~~~~~~ filename", fileName);
const spitedFileName = fileName.split(".");
return spitedFileName[0];
});
console.log("~~~~~~~~~~~~ all numbers", allNumbers.join());
const allInputs = Array.from(form.querySelectorAll("input"));
allInputs[0].value = allNumbers.join("");
allInputs[1].click();
await delay(200);
startCheckOut();
};
export default onRequest; | reply();
break; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.