prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>commands.go<|end_file_name|><|fim▁begin|>package main
import (
"fmt"
"log"
)
type ListCommand struct {
All bool `short:"a" long:"available" description:"also prints all available version for installation"`
}
type InitCommand struct{}
type InstallCommand struct {
Use bool `short:"u" long:"use" description:"force use of this new version after installation"`
}
type UseCommand struct{}
type Interactor struct {
archive WebotsArchive
manager WebotsInstanceManager
templates TemplateManager
}
func NewInteractor() (*Interactor, error) {
res := &Interactor{}
var err error
res.archive, err = NewWebotsHttpArchive("http://www.cyberbotics.com/archive/")
if err != nil {
return nil, err
}
manager, err := NewSymlinkManager(res.archive)
if err != nil {
return nil, err
}
res.manager = manager
res.templates = manager.templates
return res, nil
}
func (x *ListCommand) Execute(args []string) error {
xx, err := NewInteractor()
if err != nil {
return err
}
installed := xx.manager.Installed()
if len(installed) == 0 {
fmt.Printf("No webots version installed.\n")
} else {
for _, v := range installed {
if xx.manager.IsUsed(v) == true {
fmt.Printf(" -* %s\n", v)
} else {
fmt.Printf(" - %s\n", v)
}
}
}
if x.All {
fmt.Println("List of all available versions:")
for _, v := range xx.archive.AvailableVersions() {
fmt.Printf(" - %s\n", v)
}
} else {
vers := xx.archive.AvailableVersions()
if len(vers) == 0 {
return fmt.Errorf("No version are available")
}
fmt.Printf("Last available version is %s\n",
vers[len(vers)-1])
}
return nil
}
func (x *InitCommand) Execute(args []string) error {
return SymlinkManagerSystemInit()
}
func (x *InstallCommand) Execute(args []string) error {
if len(args) != 1 {
return fmt.Errorf("Missing version to install")
}
v, err := ParseWebotsVersion(args[0])
if err != nil {
return err
}
xx, err := NewInteractor()
if err != nil {
return err
}
err = xx.manager.Install(v)
if err != nil {
return err
}
notUsed := true
for _, vv := range xx.manager.Installed() {
if xx.manager.IsUsed(vv) {
notUsed = false
break
}
}
if notUsed || x.Use {
err = xx.manager.Use(v)
if err != nil {
return err
}
log.Printf("Using now version %s", v)
}
return nil
}
func (x *UseCommand) Execute(args []string) error {
if len(args) != 1 {
return fmt.Errorf("Missing version to use")
}
v, err := ParseWebotsVersion(args[0])
if err != nil {
return err
}
xx, err := NewInteractor()
if err != nil {
return err
}
return xx.manager.Use(v)
}
type AddTemplateCommand struct {
Only []string `short:"o" long:"only" description:"apply template only for these versions"`
Except []string `short:"e" long:"except" description:"do not apply template on these versions"`
}
func (x *AddTemplateCommand) Execute(args []string) error {
if len(args) != 2 {
return fmt.Errorf("Need file to read and where to install")
}
<|fim▁hole|> var white, black []WebotsVersion
for _, w := range x.Only {
v, err := ParseWebotsVersion(w)
if err != nil {
return err
}
white = append(white, v)
}
for _, w := range x.Except {
v, err := ParseWebotsVersion(w)
if err != nil {
return err
}
black = append(black, v)
}
xx, err := NewInteractor()
if err != nil {
return err
}
err = xx.templates.RegisterTemplate(args[0], args[1])
if err != nil {
return err
}
err = xx.templates.WhiteList(args[1], white)
if err != nil {
return err
}
err = xx.templates.BlackList(args[1], black)
if err != nil {
return err
}
return xx.manager.ApplyAllTemplates()
}
type RemoveTemplateCommand struct{}
func (x *RemoveTemplateCommand) Execute(args []string) error {
if len(args) != 1 {
return fmt.Errorf("Need install path to remove template from")
}
xx, err := NewInteractor()
if err != nil {
return err
}
err = xx.templates.RemoveTemplate(args[0])
if err != nil {
return err
}
return xx.manager.ApplyAllTemplates()
}
func init() {
parser.AddCommand("list",
"Prints all the available version of webots",
"Prints all installed version, and current version in use. Can also prinst all available version for installation",
&ListCommand{})
parser.AddCommand("init",
"Initialiaze the system for webots_manager",
"Initialiaze the system with all requirement for webots_manager",
&InitCommand{})
parser.AddCommand("install",
"Install a new webots version on the system",
"Installs a new webots version on the system",
&InstallCommand{})
parser.AddCommand("use",
"Use a webots version on the system",
"Use a webots version on the system. If it is not installed, it will first install it",
&UseCommand{})
parser.AddCommand("add-template",
"Adds a template file to all version",
"Install a file to all version of webots. -o and -e can be used to explicitely whitelist or blacklist a version",
&AddTemplateCommand{})
parser.AddCommand("remove-template",
"Removes a template file from all version",
"Removes a previously installed template from all version of webots.",
&RemoveTemplateCommand{})
}<|fim▁end|> | |
<|file_name|>logout.component.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>import { logoutAction } from '../../../rx/auth';
@Component({
selector: 'dng-logout',
templateUrl: './logout.component.html'
})
export class LogoutComponent implements OnInit {
constructor(private store: Store<any>) { }
ngOnInit() {
this.store.dispatch(logoutAction());
}
}<|fim▁end|> | import { Component, OnInit } from '@angular/core';
import { Store } from '@ngrx/store';
|
<|file_name|>content_disposition.rs<|end_file_name|><|fim▁begin|>// # References
//
// "The Content-Disposition Header Field" https://www.ietf.org/rfc/rfc2183.txt
// "The Content-Disposition Header Field in the Hypertext Transfer Protocol (HTTP)" https://www.ietf.org/rfc/rfc6266.txt
// "Returning Values from Forms: multipart/form-data" https://www.ietf.org/rfc/rfc2388.txt
// Browser conformance tests at: http://greenbytes.de/tech/tc2231/
// IANA assignment: http://www.iana.org/assignments/cont-disp/cont-disp.xhtml
use language_tags::LanguageTag;
use std::fmt;
use std::str::FromStr;
use unicase::UniCase;
use url::percent_encoding;
use header::{Header, HeaderFormat, parsing};
use header::shared::Charset;
/// The implied disposition of the content of the HTTP body
#[derive(Clone, Debug, PartialEq)]
pub enum DispositionType {
/// Inline implies default processing
Inline,
/// Attachment implies that the recipient should prompt the user to save the response locally,
/// rather than process it normally (as per its media type).
Attachment,
/// Extension type. Should be handled by recipients the same way as Attachment
Ext(String)
}
/// A parameter to the disposition type
#[derive(Clone, Debug, PartialEq)]
pub enum DispositionParam {
/// A Filename consisting of a Charset, an optional LanguageTag, and finally a sequence of
/// bytes representing the filename
Filename(Charset, Option<LanguageTag>, Vec<u8>),<|fim▁hole|>
/// A `Content-Disposition` header, (re)defined in [RFC6266](https://tools.ietf.org/html/rfc6266)
///
/// The Content-Disposition response header field is used to convey
/// additional information about how to process the response payload, and
/// also can be used to attach additional metadata, such as the filename
/// to use when saving the response payload locally.
///
/// # ABNF
/// ```plain
/// content-disposition = "Content-Disposition" ":"
/// disposition-type *( ";" disposition-parm )
///
/// disposition-type = "inline" | "attachment" | disp-ext-type
/// ; case-insensitive
///
/// disp-ext-type = token
///
/// disposition-parm = filename-parm | disp-ext-parm
///
/// filename-parm = "filename" "=" value
/// | "filename*" "=" ext-value
///
/// disp-ext-parm = token "=" value
/// | ext-token "=" ext-value
///
/// ext-token = <the characters in token, followed by "*">
/// ```
///
/// # Example
/// ```
/// use hyper::header::{Headers, ContentDisposition, DispositionType, DispositionParam, Charset};
///
/// let mut headers = Headers::new();
/// headers.set(ContentDisposition {
/// disposition: DispositionType::Attachment,
/// parameters: vec![DispositionParam::Filename(
/// Charset::Iso_8859_1, // The character set for the bytes of the filename
/// None, // The optional language tag (see `language-tag` crate)
/// b"\xa9 Copyright 1989.txt".to_vec() // the actual bytes of the filename
/// )]
/// });
/// ```
#[derive(Clone, Debug, PartialEq)]
pub struct ContentDisposition {
/// The disposition
pub disposition: DispositionType,
/// Disposition parameters
pub parameters: Vec<DispositionParam>,
}
impl Header for ContentDisposition {
fn header_name() -> &'static str {
"Content-Disposition"
}
fn parse_header(raw: &[Vec<u8>]) -> ::Result<ContentDisposition> {
parsing::from_one_raw_str(raw).and_then(|s: String| {
let mut sections = s.split(';');
let disposition = match sections.next() {
Some(s) => s.trim(),
None => return Err(::Error::Header),
};
let mut cd = ContentDisposition {
disposition: if UniCase(&*disposition) == UniCase("inline") {
DispositionType::Inline
} else if UniCase(&*disposition) == UniCase("attachment") {
DispositionType::Attachment
} else {
DispositionType::Ext(disposition.to_owned())
},
parameters: Vec::new(),
};
for section in sections {
let mut parts = section.splitn(2, '=');
let key = if let Some(key) = parts.next() {
key.trim()
} else {
return Err(::Error::Header);
};
let val = if let Some(val) = parts.next() {
val.trim()
} else {
return Err(::Error::Header);
};
cd.parameters.push(
if UniCase(&*key) == UniCase("filename") {
DispositionParam::Filename(
Charset::Ext("UTF-8".to_owned()), None,
val.trim_matches('"').as_bytes().to_owned())
} else if UniCase(&*key) == UniCase("filename*") {
let (charset, opt_language, value) = try!(parse_ext_value(val));
DispositionParam::Filename(charset, opt_language, value)
} else {
DispositionParam::Ext(key.to_owned(), val.trim_matches('"').to_owned())
}
);
}
Ok(cd)
})
}
}
impl HeaderFormat for ContentDisposition {
#[inline]
fn fmt_header(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&self, f)
}
}
impl fmt::Display for ContentDisposition {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.disposition {
DispositionType::Inline => try!(write!(f, "inline")),
DispositionType::Attachment => try!(write!(f, "attachment")),
DispositionType::Ext(ref s) => try!(write!(f, "{}", s)),
}
for param in self.parameters.iter() {
match param {
&DispositionParam::Filename(ref charset, ref opt_lang, ref bytes) => {
let mut use_simple_format: bool = false;
if opt_lang.is_none() {
if let Charset::Ext(ref ext) = *charset {
if UniCase(&**ext) == UniCase("utf-8") {
use_simple_format = true;
}
}
}
if use_simple_format {
try!(write!(f, "; filename=\"{}\"",
match String::from_utf8(bytes.clone()) {
Ok(s) => s,
Err(_) => return Err(fmt::Error),
}));
} else {
try!(write!(f, "; filename*={}'", charset));
if let Some(ref lang) = *opt_lang {
try!(write!(f, "{}", lang));
};
try!(write!(f, "'"));
try!(f.write_str(
&*percent_encoding::percent_encode(
bytes, percent_encoding::HTTP_VALUE_ENCODE_SET)))
}
},
&DispositionParam::Ext(ref k, ref v) => try!(write!(f, "; {}=\"{}\"", k, v)),
}
}
Ok(())
}
}
/// Parsing of `ext-value`
/// https://tools.ietf.org/html/rfc5987#section-3.2
///
/// # ABNF
/// ```plain
/// ext-value = charset "'" [ language ] "'" value-chars
/// ; like RFC 2231's <extended-initial-value>
/// ; (see [RFC2231], Section 7)
///
/// charset = "UTF-8" / "ISO-8859-1" / mime-charset
///
/// mime-charset = 1*mime-charsetc
/// mime-charsetc = ALPHA / DIGIT
/// / "!" / "#" / "$" / "%" / "&"
/// / "+" / "-" / "^" / "_" / "`"
/// / "{" / "}" / "~"
/// ; as <mime-charset> in Section 2.3 of [RFC2978]
/// ; except that the single quote is not included
/// ; SHOULD be registered in the IANA charset registry
///
/// language = <Language-Tag, defined in [RFC5646], Section 2.1>
///
/// value-chars = *( pct-encoded / attr-char )
///
/// pct-encoded = "%" HEXDIG HEXDIG
/// ; see [RFC3986], Section 2.1
///
/// attr-char = ALPHA / DIGIT
/// / "!" / "#" / "$" / "&" / "+" / "-" / "."
/// / "^" / "_" / "`" / "|" / "~"
/// ; token except ( "*" / "'" / "%" )
/// ```
fn parse_ext_value(val: &str) -> ::Result<(Charset, Option<LanguageTag>, Vec<u8>)> {
// Break into three pieces separated by the single-quote character
let mut parts = val.splitn(3,'\'');
// Interpret the first piece as a Charset
let charset: Charset = match parts.next() {
None => return Err(::Error::Header),
Some(n) => try!(FromStr::from_str(n)),
};
// Interpret the second piece as a language tag
let lang: Option<LanguageTag> = match parts.next() {
None => return Err(::Error::Header),
Some("") => None,
Some(s) => match s.parse() {
Ok(lt) => Some(lt),
Err(_) => return Err(::Error::Header),
}
};
// Interpret the third piece as a sequence of value characters
let value: Vec<u8> = match parts.next() {
None => return Err(::Error::Header),
Some(v) => percent_encoding::percent_decode(v.as_bytes()),
};
Ok( (charset, lang, value) )
}
#[cfg(test)]
mod tests {
use super::{ContentDisposition,DispositionType,DispositionParam};
use ::header::Header;
use ::header::shared::Charset;
#[test]
fn test_parse_header() {
assert!(ContentDisposition::parse_header([b"".to_vec()].as_ref()).is_err());
let a = [b"form-data; dummy=3; name=upload;\r\n filename=\"sample.png\"".to_vec()];
let a: ContentDisposition = ContentDisposition::parse_header(a.as_ref()).unwrap();
let b = ContentDisposition {
disposition: DispositionType::Ext("form-data".to_owned()),
parameters: vec![
DispositionParam::Ext("dummy".to_owned(), "3".to_owned()),
DispositionParam::Ext("name".to_owned(), "upload".to_owned()),
DispositionParam::Filename(
Charset::Ext("UTF-8".to_owned()),
None,
"sample.png".bytes().collect()) ]
};
assert_eq!(a, b);
let a = [b"attachment; filename=\"image.jpg\"".to_vec()];
let a: ContentDisposition = ContentDisposition::parse_header(a.as_ref()).unwrap();
let b = ContentDisposition {
disposition: DispositionType::Attachment,
parameters: vec![
DispositionParam::Filename(
Charset::Ext("UTF-8".to_owned()),
None,
"image.jpg".bytes().collect()) ]
};
assert_eq!(a, b);
let a = [b"attachment; filename*=UTF-8''%c2%a3%20and%20%e2%82%ac%20rates".to_vec()];
let a: ContentDisposition = ContentDisposition::parse_header(a.as_ref()).unwrap();
let b = ContentDisposition {
disposition: DispositionType::Attachment,
parameters: vec![
DispositionParam::Filename(
Charset::Ext("UTF-8".to_owned()),
None,
vec![0xc2, 0xa3, 0x20, b'a', b'n', b'd', 0x20,
0xe2, 0x82, 0xac, 0x20, b'r', b'a', b't', b'e', b's']) ]
};
assert_eq!(a, b);
}
#[test]
fn test_display() {
let a = [b"attachment; filename*=UTF-8'en'%C2%A3%20and%20%E2%82%AC%20rates".to_vec()];
let as_string = ::std::str::from_utf8(&(a[0])).unwrap();
let a: ContentDisposition = ContentDisposition::parse_header(a.as_ref()).unwrap();
let display_rendered = format!("{}",a);
assert_eq!(as_string, display_rendered);
let a = [b"attachment; filename*=UTF-8''black%20and%20white.csv".to_vec()];
let a: ContentDisposition = ContentDisposition::parse_header(a.as_ref()).unwrap();
let display_rendered = format!("{}",a);
assert_eq!("attachment; filename=\"black and white.csv\"".to_owned(), display_rendered);
let a = [b"attachment; filename=colourful.csv".to_vec()];
let a: ContentDisposition = ContentDisposition::parse_header(a.as_ref()).unwrap();
let display_rendered = format!("{}",a);
assert_eq!("attachment; filename=\"colourful.csv\"".to_owned(), display_rendered);
}
}<|fim▁end|> | /// Extension type consisting of token and value. Recipients should ignore unrecognized
/// parameters.
Ext(String, String)
} |
<|file_name|>tap_config_base.cc<|end_file_name|><|fim▁begin|>#include "extensions/common/tap/tap_config_base.h"
#include "envoy/config/tap/v3/common.pb.h"
#include "envoy/data/tap/v3/common.pb.h"
#include "envoy/data/tap/v3/wrapper.pb.h"
#include "common/common/assert.h"
#include "common/common/fmt.h"
#include "common/config/version_converter.h"
#include "common/protobuf/utility.h"
#include "extensions/common/matcher/matcher.h"
#include "absl/container/fixed_array.h"
namespace Envoy {
namespace Extensions {
namespace Common {
namespace Tap {
using namespace Matcher;
bool Utility::addBufferToProtoBytes(envoy::data::tap::v3::Body& output_body,
uint32_t max_buffered_bytes, const Buffer::Instance& data,
uint32_t buffer_start_offset, uint32_t buffer_length_to_copy) {
// TODO(mattklein123): Figure out if we can use the buffer API here directly in some way. This is
// is not trivial if we want to avoid extra copies since we end up appending to the existing
// protobuf string.
// Note that max_buffered_bytes is assumed to include any data already contained in output_bytes.
// This is to account for callers that may be tracking this over multiple body objects.
ASSERT(buffer_start_offset + buffer_length_to_copy <= data.length());
const uint32_t final_bytes_to_copy = std::min(max_buffered_bytes, buffer_length_to_copy);
Buffer::RawSliceVector slices = data.getRawSlices();
trimSlices(slices, buffer_start_offset, final_bytes_to_copy);
for (const Buffer::RawSlice& slice : slices) {
output_body.mutable_as_bytes()->append(static_cast<const char*>(slice.mem_), slice.len_);
}
if (final_bytes_to_copy < buffer_length_to_copy) {
output_body.set_truncated(true);
return true;
} else {
return false;
}
}
TapConfigBaseImpl::TapConfigBaseImpl(envoy::config::tap::v3::TapConfig&& proto_config,
Common::Tap::Sink* admin_streamer)
: max_buffered_rx_bytes_(PROTOBUF_GET_WRAPPED_OR_DEFAULT(
proto_config.output_config(), max_buffered_rx_bytes, DefaultMaxBufferedBytes)),
max_buffered_tx_bytes_(PROTOBUF_GET_WRAPPED_OR_DEFAULT(
proto_config.output_config(), max_buffered_tx_bytes, DefaultMaxBufferedBytes)),
streaming_(proto_config.output_config().streaming()) {
ASSERT(proto_config.output_config().sinks().size() == 1);
// TODO(mattklein123): Add per-sink checks to make sure format makes sense. I.e., when using
// streaming, we should require the length delimited version of binary proto, etc.
sink_format_ = proto_config.output_config().sinks()[0].format();
switch (proto_config.output_config().sinks()[0].output_sink_type_case()) {
case envoy::config::tap::v3::OutputSink::OutputSinkTypeCase::kStreamingAdmin:
ASSERT(admin_streamer != nullptr, "admin output must be configured via admin");
// TODO(mattklein123): Graceful failure, error message, and test if someone specifies an
// admin stream output with the wrong format.
RELEASE_ASSERT(sink_format_ == envoy::config::tap::v3::OutputSink::JSON_BODY_AS_BYTES ||
sink_format_ == envoy::config::tap::v3::OutputSink::JSON_BODY_AS_STRING,
"admin output only supports JSON formats");
sink_to_use_ = admin_streamer;
break;
case envoy::config::tap::v3::OutputSink::OutputSinkTypeCase::kFilePerTap:
sink_ =
std::make_unique<FilePerTapSink>(proto_config.output_config().sinks()[0].file_per_tap());
sink_to_use_ = sink_.get();
break;
default:
NOT_REACHED_GCOVR_EXCL_LINE;
}
envoy::config::common::matcher::v3::MatchPredicate match;
if (proto_config.has_match()) {
// Use the match field whenever it is set.
match = proto_config.match();
} else if (proto_config.has_match_config()) {
// Fallback to use the deprecated match_config field and upgrade (wire cast) it to the new
// MatchPredicate which is backward compatible with the old MatchPredicate originally
// introduced in the Tap filter.
Config::VersionConverter::upgrade(proto_config.match_config(), match);
} else {
throw EnvoyException(fmt::format("Neither match nor match_config is set in TapConfig: {}",
proto_config.DebugString()));
}
buildMatcher(match, matchers_);
}
const Matcher& TapConfigBaseImpl::rootMatcher() const {
ASSERT(!matchers_.empty());
return *matchers_[0];
}
namespace {
void swapBytesToString(envoy::data::tap::v3::Body& body) {
body.set_allocated_as_string(body.release_as_bytes());
}
} // namespace
void Utility::bodyBytesToString(envoy::data::tap::v3::TraceWrapper& trace,
envoy::config::tap::v3::OutputSink::Format sink_format) {
// Swap the "bytes" string into the "string" string. This is done purely so that JSON
// serialization will serialize as a string vs. doing base64 encoding.
if (sink_format != envoy::config::tap::v3::OutputSink::JSON_BODY_AS_STRING) {
return;
}
switch (trace.trace_case()) {
case envoy::data::tap::v3::TraceWrapper::TraceCase::kHttpBufferedTrace: {
auto* http_trace = trace.mutable_http_buffered_trace();
if (http_trace->has_request() && http_trace->request().has_body()) {
swapBytesToString(*http_trace->mutable_request()->mutable_body());
}
if (http_trace->has_response() && http_trace->response().has_body()) {
swapBytesToString(*http_trace->mutable_response()->mutable_body());
}
break;
}
case envoy::data::tap::v3::TraceWrapper::TraceCase::kHttpStreamedTraceSegment: {
auto* http_trace = trace.mutable_http_streamed_trace_segment();<|fim▁hole|> if (http_trace->has_response_body_chunk()) {
swapBytesToString(*http_trace->mutable_response_body_chunk());
}
break;
}
case envoy::data::tap::v3::TraceWrapper::TraceCase::kSocketBufferedTrace: {
auto* socket_trace = trace.mutable_socket_buffered_trace();
for (auto& event : *socket_trace->mutable_events()) {
if (event.has_read()) {
swapBytesToString(*event.mutable_read()->mutable_data());
} else {
ASSERT(event.has_write());
swapBytesToString(*event.mutable_write()->mutable_data());
}
}
break;
}
case envoy::data::tap::v3::TraceWrapper::TraceCase::kSocketStreamedTraceSegment: {
auto& event = *trace.mutable_socket_streamed_trace_segment()->mutable_event();
if (event.has_read()) {
swapBytesToString(*event.mutable_read()->mutable_data());
} else if (event.has_write()) {
swapBytesToString(*event.mutable_write()->mutable_data());
}
break;
}
case envoy::data::tap::v3::TraceWrapper::TraceCase::TRACE_NOT_SET:
NOT_REACHED_GCOVR_EXCL_LINE;
}
}
void TapConfigBaseImpl::PerTapSinkHandleManagerImpl::submitTrace(TraceWrapperPtr&& trace) {
Utility::bodyBytesToString(*trace, parent_.sink_format_);
handle_->submitTrace(std::move(trace), parent_.sink_format_);
}
void FilePerTapSink::FilePerTapSinkHandle::submitTrace(
TraceWrapperPtr&& trace, envoy::config::tap::v3::OutputSink::Format format) {
if (!output_file_.is_open()) {
std::string path = fmt::format("{}_{}", parent_.config_.path_prefix(), trace_id_);
switch (format) {
case envoy::config::tap::v3::OutputSink::PROTO_BINARY:
path += MessageUtil::FileExtensions::get().ProtoBinary;
break;
case envoy::config::tap::v3::OutputSink::PROTO_BINARY_LENGTH_DELIMITED:
path += MessageUtil::FileExtensions::get().ProtoBinaryLengthDelimited;
break;
case envoy::config::tap::v3::OutputSink::PROTO_TEXT:
path += MessageUtil::FileExtensions::get().ProtoText;
break;
case envoy::config::tap::v3::OutputSink::JSON_BODY_AS_BYTES:
case envoy::config::tap::v3::OutputSink::JSON_BODY_AS_STRING:
path += MessageUtil::FileExtensions::get().Json;
break;
default:
NOT_REACHED_GCOVR_EXCL_LINE;
}
ENVOY_LOG_MISC(debug, "Opening tap file for [id={}] to {}", trace_id_, path);
// When reading and writing binary files, we need to be sure std::ios_base::binary
// is set, otherwise we will not get the expected results on Windows
output_file_.open(path, std::ios_base::binary);
}
ENVOY_LOG_MISC(trace, "Tap for [id={}]: {}", trace_id_, trace->DebugString());
switch (format) {
case envoy::config::tap::v3::OutputSink::PROTO_BINARY:
trace->SerializeToOstream(&output_file_);
break;
case envoy::config::tap::v3::OutputSink::PROTO_BINARY_LENGTH_DELIMITED: {
Protobuf::io::OstreamOutputStream stream(&output_file_);
Protobuf::io::CodedOutputStream coded_stream(&stream);
coded_stream.WriteVarint32(trace->ByteSize());
trace->SerializeWithCachedSizes(&coded_stream);
break;
}
case envoy::config::tap::v3::OutputSink::PROTO_TEXT:
output_file_ << trace->DebugString();
break;
case envoy::config::tap::v3::OutputSink::JSON_BODY_AS_BYTES:
case envoy::config::tap::v3::OutputSink::JSON_BODY_AS_STRING:
output_file_ << MessageUtil::getJsonStringFromMessage(*trace, true, true);
break;
default:
NOT_REACHED_GCOVR_EXCL_LINE;
}
}
} // namespace Tap
} // namespace Common
} // namespace Extensions
} // namespace Envoy<|fim▁end|> | if (http_trace->has_request_body_chunk()) {
swapBytesToString(*http_trace->mutable_request_body_chunk());
} |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""<|fim▁hole|>Created on Mon Jul 23 13:23:20 2018
@author: BallBlueMeercat
"""
from distutils.core import setup
from Cython.Build import cythonize
setup(ext_modules = cythonize('firstderivs_cython.pyx'))<|fim▁end|> | |
<|file_name|>configure_websites.py<|end_file_name|><|fim▁begin|>import os
import re
import sys
"""
* Perform initial configuration to ensure that the server is set up to work with Burton's format
sudo chown -R ubuntu:ubuntu /var/www<|fim▁hole|> mkdir -p /var/www/default/public_html
mv /var/www/html/index.html /var/www/default/public_html # Ubuntu >=14.04
mv /var/www/index.html /var/www/default/public_html # Ubuntu <14.04
rm -rf /var/www/html
sudo vim /etc/apache2/sites-available/000-default.conf # Ubuntu >=14.04
sudo vim /etc/apache2/sites-available/default # Ubuntu <14.04
sudo a2enmod ssl
sudo service apache2 restart
* Enable / disable .htaccess for a site
* PHP configuration
"""
environment = ''
def main(env):
global environment
environment = env
while True:
print("\nConfigure Websites\n")
print("Please select an operation:")
print(" 1. Restart Apache")
print(" 2. Add a new website")
print(" 3. Add SSL to website")
print(" 0. Go Back")
print(" -. Exit")
operation = input(environment.prompt)
if operation == '0':
return True
elif operation == '-':
sys.exit()
elif operation == '1':
restart_apache()
elif operation == '2':
add_website()
elif operation == '3':
add_ssl()
else:
print("Invalid input.")
def restart_apache():
print("\nAttempting to restart Apache:")
# TODO: Print an error when the user does not have permissions to perform the action.
result = os.system("sudo service apache2 restart")
print(result)
return True
def add_website():
global environment
print('\nAdd website.\n')
input_file = open('./example-files/apache-site', 'r')
input_file_text = input_file.read()
input_file.close()
site_name = input('Website name (without www or http)' + environment.prompt)
new_filename = '/etc/apache2/sites-available/%s.conf' % (site_name,)
tmp_filename = '/tmp/%s.conf' % (site_name,)
# TODO: Check that site_name is legal for both a domain name and a filename.
while os.path.isfile(new_filename):
print('Site exists! Please choose another.')
site_name = input('Website name (without www or http)' + environment.prompt)
new_filename = '/etc/apache2/sites-available/%s.conf' % (site_name,)
tmp_filename = '/tmp/%s.conf' % (site_name,)
new_config = re.sub('SITE', site_name, input_file_text)
try:
output_file = open(tmp_filename, 'w')
output_file.write(new_config)
output_file.close()
tmp_move = os.system("sudo mv %s %s" % (tmp_filename, new_filename))
except PermissionError as e:
print('\n\nError!')
print('The current user does not have permission to perform this action.')
#print('Please run Burton with elevated permissions to resolve this error.\n\n')
if tmp_move != 0:
print('\n\nError!')
print('The current user does not have permission to perform this action.')
#print('Please run Burton with elevated permissions to resolve this error.\n\n')
current_user = str(os.getuid())
result = os.system('sudo mkdir -p /var/www/%s/public_html/' % (site_name,))
result = os.system('sudo mkdir -p /var/www/%s/logs/' % (site_name,))
result = os.system('sudo chown -R %s:%s /var/www/%s/' % (current_user, current_user,))
result = os.system('sudo a2ensite %s.conf' % (site_name,))
restart_apache()
return True
def add_ssl():
global environment
print("\nAdd SSL to website.\n")
print("Please enter the URL of the website.\n")
site_name = input(environment.prompt)
print("Is this a wildcard certificate? (y/N)\n")
wildcard = input(environment.prompt)
if wildcard.lower()=='y':
print("Generating wildcard cert for *.%s" % (site_name,))
wildcard = '*.'
else:
print("Generating cert for %s" % (site_name,))
wildcard = ''
# http://serverfault.com/questions/649990/non-interactive-creation-of-ssl-certificate-requests
#command_template = 'openssl req -new -newkey rsa:2048 -nodes -sha256 -keyout foobar.com.key -out foobar.com.csr -subj "/C=US/ST=New foobar/L=foobar/O=foobar foobar, Inc./CN=foobar.com/[email protected]"'
command_template = "openssl req -new -newkey rsa:2048 -nodes -sha256 -keyout %s.key -out %s.csr -subj \"/CN=%s%s\""
print(command_template % (site_name, site_name, wildcard, site_name))
return True<|fim▁end|> | |
<|file_name|>Register.cpp<|end_file_name|><|fim▁begin|>// Register.cpp : implementation file
//
#include "stdafx.h"
#include "Portal.h"
#include "Register.h"
#include "afxdialogex.h"
// Register dialog
IMPLEMENT_DYNAMIC(Register, CDialogEx)
Register::Register(CWnd* pParent /*=NULL*/)
: CDialogEx(Register::IDD, pParent)
, m_ID(_T(""))
, m_Password(_T(""))
, m_EnterYear(0)
, m_Phone(_T(""))
, m_Address(_T(""))
, m_Name(_T(""))
{
#ifndef _WIN32_WCE
EnableActiveAccessibility();
#endif
EnableAutomation();
}
Register::~Register()
{
}
void Register::OnFinalRelease()
{
// When the last reference for an automation object is released
// OnFinalRelease is called. The base class will automatically
// deletes the object. Add additional cleanup required for your
// object before calling the base class.
CDialogEx::OnFinalRelease();
}
void Register::DoDataExchange(CDataExchange* pDX)
{
CDialogEx::DoDataExchange(pDX);
DDX_Text(pDX, 1007, m_ID);
DDX_Text(pDX, 1009, m_Password);
DDX_Control(pDX, 1025, m_ComboMajor);
DDX_Text(pDX, 1013, m_EnterYear);
DDX_Text(pDX, 1015, m_Phone);
DDX_Text(pDX, 1012, m_Address);
DDX_Text(pDX, 1008, m_Name);
DDX_Control(pDX, IDC_COMBO1, m_ComboGender);
}
BEGIN_MESSAGE_MAP(Register, CDialogEx)
ON_BN_CLICKED(1026, &Register::OnBnClicked1026)
ON_BN_CLICKED(1027, &Register::OnBnClicked1027)
END_MESSAGE_MAP()
BEGIN_DISPATCH_MAP(Register, CDialogEx)
END_DISPATCH_MAP()
// Note: we add support for IID_IRegister to support typesafe binding
// from VBA. This IID must match the GUID that is attached to the
// dispinterface in the .IDL file.
// {26007234-3268-44D1-AEA7-4EB880885DB9}
static const IID IID_IRegister =
{ 0x26007234, 0x3268, 0x44D1, { 0xAE, 0xA7, 0x4E, 0xB8, 0x80, 0x88, 0x5D, 0xB9 } };
BEGIN_INTERFACE_MAP(Register, CDialogEx)
INTERFACE_PART(Register, IID_IRegister, Dispatch)
END_INTERFACE_MAP()
<|fim▁hole|>
// Register message handlers
// 100835 ÇѽÂȯ
BOOL Register::OnInitDialog()
{
CDialogEx::OnInitDialog();
// TODO: Add extra initialization here
SYSTEMTIME curTime; // system ½Ã°£ ±¸Á¶Ã¼ ¼±¾ð
GetSystemTime(&curTime); // system ½Ã°£ ±¸Á¶Ã¼ ÃʱâÈ
int semester = 0;
if (curTime.wMonth > 2 && curTime.wMonth < 7)
semester = 1; // system ½Ã°£ÀÌ 3~6¿ù »çÀÌÀ̸é semester = 1
else if (curTime.wMonth>7 && curTime.wMonth <= 12)
semester = 2; // system ½Ã°£ÀÌ 8~12¿ù »çÀÌÀ̸é semester = 2
m_EnterYear = curTime.wYear;
UpdateData(FALSE);
CSUBJECT_info<CSUBJECT_GETMAJOR> info;
UpdateData(TRUE);
info.m_YEAR = curTime.wYear;
info.m_dwYEARLength = sizeof(LONG);
info.m_dwYEARStatus = DBSTATUS_S_OK;
info.m_SEMESTER = semester;
info.m_dwSEMESTERLength = sizeof(LONG);
info.m_dwSEMESTERStatus = DBSTATUS_S_OK;
if (info.OpenAll() == S_OK)
{
while (info.MoveNext() == S_OK)
m_ComboMajor.AddString(info.m_MAJOR);
m_ComboMajor.SetCurSel(0); // Ãʱâ Ä¿¼ 0À¸·Î ¼³Á¤
}
return TRUE; // return TRUE unless you set the focus to a control
// EXCEPTION: OCX Property Pages should return FALSE
}
// È®ÀÎ ¹öưÀ» ´·¶À» ¶§
void Register::OnBnClicked1026()
{
// TODO: Add your control notification handler code here
CRegister_Student<CRegister_StudentAccessor> info;
UpdateData(TRUE);
info.m_SID = atoi(m_ID);
info.m_PASSWORD = atoi(m_Password);
info.m_Curri_Year = m_EnterYear;
CString major;
m_ComboMajor.GetLBText(m_ComboMajor.GetCurSel(), major);
CString Gender;
m_ComboGender.GetLBText(m_ComboGender.GetCurSel(), Gender);
strcpy_s(info.m_MAJOR, major);
strcpy_s(info.m_Phone_Number, m_Phone);
strcpy_s(info.m_SNAME, m_Name);
strcpy_s(info.m_ADDERSS, m_Address);
strcpy_s(info.m_GENDER, Gender);
if (info.OpenAll() == S_OK) // µ¥ÀÌÅͺ£À̽º Á¢¼Ó ¼º°ø
AfxMessageBox("ȸ¿ø µî·Ï ¿Ï·á");
else
AfxMessageBox("µî·Ï ½ÇÆÐ");
OnOK();
}
// Ãë¼Ò ¹öưÀ» ´·¶À» ¶§
void Register::OnBnClicked1027()
{
// TODO: Add your control notification handler code here
OnCancel();
}
// 100835 ÇѽÂȯ<|fim▁end|> | |
<|file_name|>list.js<|end_file_name|><|fim▁begin|>var async = require('async');
var settings = require('../settings/settings.js');
var Post = require('../models/post.js');
var List = require('../models/list.js');
module.exports = function(app){
app.get('/getArchive',function(req,res,next){
if(req.sessionID){
var list = new List({
pageIndex:1,
pageSize:settings.pageSize,
queryObj:{}
});
list.getArchive(function(err,archiveArray){
if(!(err)&&archiveArray){
res.json(archiveArray);
res.end();
}else{
res.json({status:404,message:''});
res.end();
}
});
}else{
res.end();
}
});
app.get('/getPageCount',function(req,res,next){
if(req.sessionID){
var list = new List({
pageIndex:1,
pageSize:settings.pageSize,
queryObj:{}
});
list.getCount(function(err,count){
if(!(err)&&(count!=0)){
res.json(Math.ceil(count/settings.pageSize));
res.end();
}else{
res.json({status:404,message:''});
res.end();
}
});
}else{
res.end();
}
});
app.get('/',function(req,res,next){
if(req.sessionID){
var list = new List({
pageIndex:1,
pageSize:settings.pageSize,
queryObj:{}
});
list.getList(function(err,docs){
if(!(err)&&docs){
res.json(docs);
res.end();
}else{
res.json({status:404,message:''});
res.end();
}<|fim▁hole|> }
});
}<|fim▁end|> | });
}else{
res.end(); |
<|file_name|>MetaMmCorpusWrapper.py<|end_file_name|><|fim▁begin|>from gensim.corpora import MmCorpus
from gensim.utils import unpickle
class MetaMmCorpusWrapper:
"""Wrapper which loads MM corpus with metadata."""
def __init__(self, filename):
self.corpus = MmCorpus(filename)
self.metadata = unpickle(filename + ".metadata.cpickle")
def __iter__(self):<|fim▁hole|><|fim▁end|> | for i, doc in enumerate(self.corpus):
yield doc, self.metadata[i] |
<|file_name|>h2o.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
"""
h2o -- module for using H2O services.
:copyright: (c) 2016 H2O.ai
:license: Apache License Version 2.0 (see LICENSE for details)
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import os
import warnings
import webbrowser
import types
from h2o.backend import H2OConnection
from h2o.backend import H2OConnectionConf
from h2o.backend import H2OLocalServer
from h2o.exceptions import H2OConnectionError, H2OValueError
from h2o.utils.config import H2OConfigReader
from h2o.utils.shared_utils import check_frame_id, deprecated, gen_header, py_tmp_key, quoted, urlopen
from h2o.utils.typechecks import assert_is_type, assert_satisfies, BoundInt, BoundNumeric, I, is_type, numeric, U
from .estimators.deeplearning import H2OAutoEncoderEstimator
from .estimators.deeplearning import H2ODeepLearningEstimator
from .estimators.deepwater import H2ODeepWaterEstimator
from .estimators.estimator_base import H2OEstimator
from .estimators.xgboost import H2OXGBoostEstimator
from .estimators.gbm import H2OGradientBoostingEstimator
from .estimators.glm import H2OGeneralizedLinearEstimator
from .estimators.glrm import H2OGeneralizedLowRankEstimator
from .estimators.kmeans import H2OKMeansEstimator
from .estimators.naive_bayes import H2ONaiveBayesEstimator
from .estimators.pca import H2OPrincipalComponentAnalysisEstimator
from .estimators.random_forest import H2ORandomForestEstimator
from .estimators.stackedensemble import H2OStackedEnsembleEstimator
from .estimators.word2vec import H2OWord2vecEstimator
from .estimators.isolation_forest import H2OIsolationForestEstimator
from .expr import ExprNode
from .frame import H2OFrame
from .grid.grid_search import H2OGridSearch
from .job import H2OJob
from .model.model_base import ModelBase
from .transforms.decomposition import H2OSVD
from .utils.debugging import * # NOQA
from .utils.compatibility import * # NOQA
from .utils.compatibility import PY3
logging.basicConfig()
# An IPython deprecation warning is triggered after h2o.init(). Remove this once the deprecation has been resolved
warnings.filterwarnings('ignore', category=DeprecationWarning, module='.*/IPython/.*')
h2oconn = None # type: H2OConnection
def connect(server=None, url=None, ip=None, port=None, https=None, verify_ssl_certificates=None, auth=None,
proxy=None, cookies=None, verbose=True, config=None):
"""
Connect to an existing H2O server, remote or local.
There are two ways to connect to a server: either pass a `server` parameter containing an instance of
an H2OLocalServer, or specify `ip` and `port` of the server that you want to connect to.
:param server: An H2OLocalServer instance to connect to (optional).
:param url: Full URL of the server to connect to (can be used instead of `ip` + `port` + `https`).
:param ip: The ip address (or host name) of the server where H2O is running.
:param port: Port number that H2O service is listening to.
:param https: Set to True to connect via https:// instead of http://.
:param verify_ssl_certificates: When using https, setting this to False will disable SSL certificates verification.
:param auth: Either a (username, password) pair for basic authentication, an instance of h2o.auth.SpnegoAuth
or one of the requests.auth authenticator objects.
:param proxy: Proxy server address.
:param cookies: Cookie (or list of) to add to request
:param verbose: Set to False to disable printing connection status messages.
:param connection_conf: Connection configuration object encapsulating connection parameters.
:returns: the new :class:`H2OConnection` object.
"""
global h2oconn
if config:
if "connect_params" in config:
h2oconn = _connect_with_conf(config["connect_params"])
else:
h2oconn = _connect_with_conf(config)
else:
h2oconn = H2OConnection.open(server=server, url=url, ip=ip, port=port, https=https,
auth=auth, verify_ssl_certificates=verify_ssl_certificates,
proxy=proxy, cookies=cookies,
verbose=verbose)
if verbose:
h2oconn.cluster.show_status()
return h2oconn
def api(endpoint, data=None, json=None, filename=None, save_to=None):
"""
Perform a REST API request to a previously connected server.
This function is mostly for internal purposes, but may occasionally be useful for direct access to
the backend H2O server. It has same parameters as :meth:`H2OConnection.request <h2o.backend.H2OConnection.request>`.
"""
# type checks are performed in H2OConnection class
_check_connection()
return h2oconn.request(endpoint, data=data, json=json, filename=filename, save_to=save_to)
def connection():
"""Return the current :class:`H2OConnection` handler."""
return h2oconn
def version_check():
"""Used to verify that h2o-python module and the H2O server are compatible with each other."""
from .__init__ import __version__ as ver_pkg
ci = h2oconn.cluster
if not ci:
raise H2OConnectionError("Connection not initialized. Did you run h2o.connect()?")
ver_h2o = ci.version
if ver_pkg == "SUBST_PROJECT_VERSION": ver_pkg = "UNKNOWN"
if str(ver_h2o) != str(ver_pkg):
branch_name_h2o = ci.branch_name
build_number_h2o = ci.build_number
if build_number_h2o is None or build_number_h2o == "unknown":
raise H2OConnectionError(
"Version mismatch. H2O is version {0}, but the h2o-python package is version {1}. "
"Upgrade H2O and h2o-Python to latest stable version - "
"http://h2o-release.s3.amazonaws.com/h2o/latest_stable.html"
"".format(ver_h2o, ver_pkg))
elif build_number_h2o == "99999":
raise H2OConnectionError(
"Version mismatch. H2O is version {0}, but the h2o-python package is version {1}. "
"This is a developer build, please contact your developer."
"".format(ver_h2o, ver_pkg))
else:
raise H2OConnectionError(
"Version mismatch. H2O is version {0}, but the h2o-python package is version {1}. "
"Install the matching h2o-Python version from - "
"http://h2o-release.s3.amazonaws.com/h2o/{2}/{3}/index.html."
"".format(ver_h2o, ver_pkg, branch_name_h2o, build_number_h2o))
# Check age of the install
if ci.build_too_old:
print("Warning: Your H2O cluster version is too old ({})! Please download and install the latest "
"version from http://h2o.ai/download/".format(ci.build_age))
def init(url=None, ip=None, port=None, name=None, https=None, insecure=None, username=None, password=None,
cookies=None, proxy=None, start_h2o=True, nthreads=-1, ice_root=None, log_dir=None, log_level=None,
enable_assertions=True, max_mem_size=None, min_mem_size=None, strict_version_check=None, ignore_config=False,
extra_classpath=None, jvm_custom_args=None, bind_to_localhost=True, **kwargs):
"""
Attempt to connect to a local server, or if not successful start a new server and connect to it.
:param url: Full URL of the server to connect to (can be used instead of `ip` + `port` + `https`).
:param ip: The ip address (or host name) of the server where H2O is running.
:param port: Port number that H2O service is listening to.
:param name: cloud name. If None while connecting to an existing cluster it will not check the cloud name.
If set then will connect only if the target cloud name matches. If no instance is found and decides to start a local
one then this will be used as the cloud name or a random one will be generated if set to None.
:param https: Set to True to connect via https:// instead of http://.
:param insecure: When using https, setting this to True will disable SSL certificates verification.
:param username: Username and
:param password: Password for basic authentication.
:param cookies: Cookie (or list of) to add to each request.
:param proxy: Proxy server address.
:param start_h2o: If False, do not attempt to start an h2o server when connection to an existing one failed.
:param nthreads: "Number of threads" option when launching a new h2o server.
:param ice_root: Directory for temporary files for the new h2o server.
:param log_dir: Directory for H2O logs to be stored if a new instance is started. Ignored if connecting to an existing node.
:param log_level: The logger level for H2O if a new instance is started. One of TRACE,DEBUG,INFO,WARN,ERRR,FATA. Default is INFO. Ignored if connecting to an existing node.
:param enable_assertions: Enable assertions in Java for the new h2o server.
:param max_mem_size: Maximum memory to use for the new h2o server. Integer input will be evaluated as gigabytes. Other units can be specified by passing in a string (e.g. "160M" for 160 megabytes).
:param min_mem_size: Minimum memory to use for the new h2o server. Integer input will be evaluated as gigabytes. Other units can be specified by passing in a string (e.g. "160M" for 160 megabytes).
:param strict_version_check: If True, an error will be raised if the client and server versions don't match.
:param ignore_config: Indicates whether a processing of a .h2oconfig file should be conducted or not. Default value is False.
:param extra_classpath: List of paths to libraries that should be included on the Java classpath when starting H2O from Python.
:param kwargs: (all other deprecated attributes)
:param jvm_custom_args: Customer, user-defined argument's for the JVM H2O is instantiated in. Ignored if there is an instance of H2O already running and the client connects to it.
"""
global h2oconn
assert_is_type(url, str, None)
assert_is_type(ip, str, None)
assert_is_type(port, int, str, None)
assert_is_type(name, str, None)
assert_is_type(https, bool, None)
assert_is_type(insecure, bool, None)
assert_is_type(username, str, None)
assert_is_type(password, str, None)
assert_is_type(cookies, str, [str], None)
assert_is_type(proxy, {str: str}, None)
assert_is_type(start_h2o, bool, None)
assert_is_type(nthreads, int)
assert_is_type(ice_root, str, None)
assert_is_type(log_dir, str, None)
assert_is_type(log_level, str, None)
assert_satisfies(log_level, log_level in [None, "TRACE", "DEBUG", "INFO", "WARN", "ERRR", "FATA"])
assert_is_type(enable_assertions, bool)
assert_is_type(max_mem_size, int, str, None)
assert_is_type(min_mem_size, int, str, None)
assert_is_type(strict_version_check, bool, None)
assert_is_type(extra_classpath, [str], None)
assert_is_type(jvm_custom_args, [str], None)
assert_is_type(bind_to_localhost, bool)
assert_is_type(kwargs, {"proxies": {str: str}, "max_mem_size_GB": int, "min_mem_size_GB": int,
"force_connect": bool, "as_port": bool})
def get_mem_size(mmint, mmgb):
if not mmint: # treat 0 and "" as if they were None
if mmgb is None: return None
return mmgb << 30
if is_type(mmint, int):
# If the user gives some small number just assume it's in Gigabytes...
if mmint < 1000: return mmint << 30
return mmint
if is_type(mmint, str):
last = mmint[-1].upper()
num = mmint[:-1]
if not (num.isdigit() and last in "MGT"):
raise H2OValueError("Wrong format for a *_memory_size argument: %s (should be a number followed by "
"a suffix 'M', 'G' or 'T')" % mmint)
if last == "T": return int(num) << 40
if last == "G": return int(num) << 30
if last == "M": return int(num) << 20
scheme = "https" if https else "http"
proxy = proxy[scheme] if proxy is not None and scheme in proxy else \
kwargs["proxies"][scheme] if "proxies" in kwargs and scheme in kwargs["proxies"] else None
mmax = get_mem_size(max_mem_size, kwargs.get("max_mem_size_GB"))
mmin = get_mem_size(min_mem_size, kwargs.get("min_mem_size_GB"))
auth = (username, password) if username and password else None
check_version = True
verify_ssl_certificates = True
# Apply the config file if ignore_config=False
if not ignore_config:
config = H2OConfigReader.get_config()
if url is None and ip is None and port is None and https is None and "init.url" in config:
url = config["init.url"]
if proxy is None and "init.proxy" in config:
proxy = config["init.proxy"]
if cookies is None and "init.cookies" in config:
cookies = config["init.cookies"].split(";")
if auth is None and "init.username" in config and "init.password" in config:
auth = (config["init.username"], config["init.password"])
if strict_version_check is None:
if "init.check_version" in config:
check_version = config["init.check_version"].lower() != "false"
elif os.environ.get("H2O_DISABLE_STRICT_VERSION_CHECK"):
check_version = False
else:
check_version = strict_version_check
if insecure is None:
if "init.verify_ssl_certificates" in config:
verify_ssl_certificates = config["init.verify_ssl_certificates"].lower() != "false"
else:
verify_ssl_certificates = not insecure
if not start_h2o:
print("Warning: if you don't want to start local H2O server, then use of `h2o.connect()` is preferred.")
try:
h2oconn = H2OConnection.open(url=url, ip=ip, port=port, name=name, https=https,
verify_ssl_certificates=verify_ssl_certificates,
auth=auth, proxy=proxy,cookies=cookies, verbose=True,
_msgs=("Checking whether there is an H2O instance running at {url} ",
"connected.", "not found."))
except H2OConnectionError:
# Backward compatibility: in init() port parameter really meant "baseport" when starting a local server...
if port and not str(port).endswith("+") and not kwargs.get("as_port", False):
port = str(port) + "+"
if not start_h2o: raise
if ip and not (ip == "localhost" or ip == "127.0.0.1"):
raise H2OConnectionError('Can only start H2O launcher if IP address is localhost.')
hs = H2OLocalServer.start(nthreads=nthreads, enable_assertions=enable_assertions, max_mem_size=mmax,
min_mem_size=mmin, ice_root=ice_root, log_dir=log_dir, log_level=log_level,
port=port, name=name,
extra_classpath=extra_classpath, jvm_custom_args=jvm_custom_args,
bind_to_localhost=bind_to_localhost)
h2oconn = H2OConnection.open(server=hs, https=https, verify_ssl_certificates=not insecure,
auth=auth, proxy=proxy,cookies=cookies, verbose=True)
if check_version:
version_check()
h2oconn.cluster.timezone = "UTC"
h2oconn.cluster.show_status()
def lazy_import(path, pattern=None):
"""
Import a single file or collection of files.
:param path: A path to a data file (remote or local).
:param pattern: Character string containing a regular expression to match file(s) in the folder.
:returns: either a :class:`H2OFrame` with the content of the provided file, or a list of such frames if
importing multiple files.
"""
assert_is_type(path, str, [str])
assert_is_type(pattern, str, None)
paths = [path] if is_type(path, str) else path
return _import_multi(paths, pattern)
def _import_multi(paths, pattern):
assert_is_type(paths, [str])
assert_is_type(pattern, str, None)
j = api("POST /3/ImportFilesMulti", {"paths": paths, "pattern": pattern})
if j["fails"]: raise ValueError("ImportFiles of '" + ".".join(paths) + "' failed on " + str(j["fails"]))
return j["destination_frames"]
def upload_file(path, destination_frame=None, header=0, sep=None, col_names=None, col_types=None,
na_strings=None, skipped_columns=None):
"""
Upload a dataset from the provided local path to the H2O cluster.
Does a single-threaded push to H2O. Also see :meth:`import_file`.
:param path: A path specifying the location of the data to upload.
:param destination_frame: The unique hex key assigned to the imported file. If none is given, a key will
be automatically generated.
:param header: -1 means the first line is data, 0 means guess, 1 means first line is header.
:param sep: The field separator character. Values on each line of the file are separated by
this character. If not provided, the parser will automatically detect the separator.
:param col_names: A list of column names for the file.
:param col_types: A list of types or a dictionary of column names to types to specify whether columns
should be forced to a certain type upon import parsing. If a list, the types for elements that are
one will be guessed. The possible types a column may have are:
- "unknown" - this will force the column to be parsed as all NA
- "uuid" - the values in the column must be true UUID or will be parsed as NA
- "string" - force the column to be parsed as a string
- "numeric" - force the column to be parsed as numeric. H2O will handle the compression of the numeric
data in the optimal manner.
- "enum" - force the column to be parsed as a categorical column.
- "time" - force the column to be parsed as a time column. H2O will attempt to parse the following
list of date time formats: (date) "yyyy-MM-dd", "yyyy MM dd", "dd-MMM-yy", "dd MMM yy", (time)
"HH:mm:ss", "HH:mm:ss:SSS", "HH:mm:ss:SSSnnnnnn", "HH.mm.ss" "HH.mm.ss.SSS", "HH.mm.ss.SSSnnnnnn".
Times can also contain "AM" or "PM".
:param na_strings: A list of strings, or a list of lists of strings (one list per column), or a dictionary
of column names to strings which are to be interpreted as missing values.
:param skipped_columns: an integer lists of column indices to skip and not parsed into the final frame from the import file.
:returns: a new :class:`H2OFrame` instance.
:examples:
>>> frame = h2o.upload_file("/path/to/local/data")
"""
coltype = U(None, "unknown", "uuid", "string", "float", "real", "double", "int", "numeric",
"categorical", "factor", "enum", "time")
natype = U(str, [str])
assert_is_type(path, str)
assert_is_type(destination_frame, str, None)
assert_is_type(header, -1, 0, 1)
assert_is_type(sep, None, I(str, lambda s: len(s) == 1))
assert_is_type(col_names, [str], None)
assert_is_type(col_types, [coltype], {str: coltype}, None)
assert_is_type(na_strings, [natype], {str: natype}, None)
assert (skipped_columns==None) or isinstance(skipped_columns, list), \
"The skipped_columns should be an list of column names!"
check_frame_id(destination_frame)
if path.startswith("~"):
path = os.path.expanduser(path)
return H2OFrame()._upload_parse(path, destination_frame, header, sep, col_names, col_types, na_strings, skipped_columns)
def import_file(path=None, destination_frame=None, parse=True, header=0, sep=None, col_names=None, col_types=None,
na_strings=None, pattern=None, skipped_columns=None):
"""
Import a dataset that is already on the cluster.
The path to the data must be a valid path for each node in the H2O cluster. If some node in the H2O cluster
cannot see the file, then an exception will be thrown by the H2O cluster. Does a parallel/distributed
multi-threaded pull of the data. The main difference between this method and :func:`upload_file` is that
the latter works with local files, whereas this method imports remote files (i.e. files local to the server).
If you running H2O server on your own maching, then both methods behave the same.
:param path: path(s) specifying the location of the data to import or a path to a directory of files to import
:param destination_frame: The unique hex key assigned to the imported file. If none is given, a key will be
automatically generated.
:param parse: If True, the file should be parsed after import. If False, then a list is returned containing the file path.
:param header: -1 means the first line is data, 0 means guess, 1 means first line is header.
:param sep: The field separator character. Values on each line of the file are separated by
this character. If not provided, the parser will automatically detect the separator.
:param col_names: A list of column names for the file.
:param col_types: A list of types or a dictionary of column names to types to specify whether columns
should be forced to a certain type upon import parsing. If a list, the types for elements that are
one will be guessed. The possible types a column may have are:
- "unknown" - this will force the column to be parsed as all NA
- "uuid" - the values in the column must be true UUID or will be parsed as NA
- "string" - force the column to be parsed as a string
- "numeric" - force the column to be parsed as numeric. H2O will handle the compression of the numeric
data in the optimal manner.
- "enum" - force the column to be parsed as a categorical column.
- "time" - force the column to be parsed as a time column. H2O will attempt to parse the following
list of date time formats: (date) "yyyy-MM-dd", "yyyy MM dd", "dd-MMM-yy", "dd MMM yy", (time)
"HH:mm:ss", "HH:mm:ss:SSS", "HH:mm:ss:SSSnnnnnn", "HH.mm.ss" "HH.mm.ss.SSS", "HH.mm.ss.SSSnnnnnn".
Times can also contain "AM" or "PM".
:param na_strings: A list of strings, or a list of lists of strings (one list per column), or a dictionary
of column names to strings which are to be interpreted as missing values.
:param pattern: Character string containing a regular expression to match file(s) in the folder if `path` is a
directory.
:param skipped_columns: an integer list of column indices to skip and not parsed into the final frame from the import file.
:returns: a new :class:`H2OFrame` instance.
:examples:
>>> # Single file import
>>> iris = import_file("h2o-3/smalldata/iris.csv")
>>> # Return all files in the folder iris/ matching the regex r"iris_.*\.csv"
>>> iris_pattern = h2o.import_file(path = "h2o-3/smalldata/iris",
... pattern = "iris_.*\.csv")
"""
coltype = U(None, "unknown", "uuid", "string", "float", "real", "double", "int", "numeric",
"categorical", "factor", "enum", "time")
natype = U(str, [str])
assert_is_type(path, str, [str])
assert_is_type(pattern, str, None)
assert_is_type(destination_frame, str, None)
assert_is_type(parse, bool)
assert_is_type(header, -1, 0, 1)
assert_is_type(sep, None, I(str, lambda s: len(s) == 1))
assert_is_type(col_names, [str], None)
assert_is_type(col_types, [coltype], {str: coltype}, None)
assert_is_type(na_strings, [natype], {str: natype}, None)
assert isinstance(skipped_columns, (type(None), list)), "The skipped_columns should be an list of column names!"
check_frame_id(destination_frame)
patharr = path if isinstance(path, list) else [path]
if any(os.path.split(p)[0] == "~" for p in patharr):
raise H2OValueError("Paths relative to a current user (~) are not valid in the server environment. "
"Please use absolute paths if possible.")
if not parse:
return lazy_import(path, pattern)
else:
return H2OFrame()._import_parse(path, pattern, destination_frame, header, sep, col_names, col_types, na_strings, skipped_columns)
def import_sql_table(connection_url, table, username, password, columns=None, optimize=True, fetch_mode=None):
"""
Import SQL table to H2OFrame in memory.
Assumes that the SQL table is not being updated and is stable.
Runs multiple SELECT SQL queries concurrently for parallel ingestion.
Be sure to start the h2o.jar in the terminal with your downloaded JDBC driver in the classpath::
java -cp <path_to_h2o_jar>:<path_to_jdbc_driver_jar> water.H2OApp
Also see :func:`import_sql_select`.
Currently supported SQL databases are MySQL, PostgreSQL, MariaDB, and Netezza. Support for Oracle 12g and Microsoft SQL
Server is forthcoming.
:param connection_url: URL of the SQL database connection as specified by the Java Database Connectivity (JDBC)
Driver. For example, "jdbc:mysql://localhost:3306/menagerie?&useSSL=false"
:param table: name of SQL table
:param columns: a list of column names to import from SQL table. Default is to import all columns.
:param username: username for SQL server
:param password: password for SQL server
:param optimize: DEPRECATED. Ignored - use fetch_mode instead. Optimize import of SQL table for faster imports.
:param fetch_mode: Set to DISTRIBUTED to enable distributed import. Set to SINGLE to force a sequential read by a single node
from the database.
:returns: an :class:`H2OFrame` containing data of the specified SQL table.
:examples:
>>> conn_url = "jdbc:mysql://172.16.2.178:3306/ingestSQL?&useSSL=false"
>>> table = "citibike20k"
>>> username = "root"
>>> password = "abc123"
>>> my_citibike_data = h2o.import_sql_table(conn_url, table, username, password)
"""
assert_is_type(connection_url, str)
assert_is_type(table, str)
assert_is_type(username, str)
assert_is_type(password, str)
assert_is_type(columns, [str], None)
assert_is_type(optimize, bool)
assert_is_type(fetch_mode, str, None)
p = {"connection_url": connection_url, "table": table, "username": username, "password": password,
"fetch_mode": fetch_mode}
if columns:
p["columns"] = ", ".join(columns)
j = H2OJob(api("POST /99/ImportSQLTable", data=p), "Import SQL Table").poll()
return get_frame(j.dest_key)
def import_sql_select(connection_url, select_query, username, password, optimize=True, fetch_mode=None):
"""
Import the SQL table that is the result of the specified SQL query to H2OFrame in memory.
Creates a temporary SQL table from the specified sql_query.
Runs multiple SELECT SQL queries on the temporary table concurrently for parallel ingestion, then drops the table.
Be sure to start the h2o.jar in the terminal with your downloaded JDBC driver in the classpath::
java -cp <path_to_h2o_jar>:<path_to_jdbc_driver_jar> water.H2OApp
Also see h2o.import_sql_table. Currently supported SQL databases are MySQL, PostgreSQL, and MariaDB. Support
for Oracle 12g and Microsoft SQL Server is forthcoming.
:param connection_url: URL of the SQL database connection as specified by the Java Database Connectivity (JDBC)
Driver. For example, "jdbc:mysql://localhost:3306/menagerie?&useSSL=false"
:param select_query: SQL query starting with `SELECT` that returns rows from one or more database tables.
:param username: username for SQL server
:param password: password for SQL server
:param optimize: DEPRECATED. Ignored - use fetch_mode instead. Optimize import of SQL table for faster imports.
:param fetch_mode: Set to DISTRIBUTED to enable distributed import. Set to SINGLE to force a sequential read by a single node
from the database.
:returns: an :class:`H2OFrame` containing data of the specified SQL query.
:examples:
>>> conn_url = "jdbc:mysql://172.16.2.178:3306/ingestSQL?&useSSL=false"
>>> select_query = "SELECT bikeid from citibike20k"
>>> username = "root"
>>> password = "abc123"
>>> my_citibike_data = h2o.import_sql_select(conn_url, select_query,
... username, password, fetch_mode)
"""
assert_is_type(connection_url, str)
assert_is_type(select_query, str)
assert_is_type(username, str)
assert_is_type(password, str)
assert_is_type(optimize, bool)
assert_is_type(fetch_mode, str, None)
p = {"connection_url": connection_url, "select_query": select_query, "username": username, "password": password,
"fetch_mode": fetch_mode}
j = H2OJob(api("POST /99/ImportSQLTable", data=p), "Import SQL Table").poll()
return get_frame(j.dest_key)
def parse_setup(raw_frames, destination_frame=None, header=0, separator=None, column_names=None,
column_types=None, na_strings=None, skipped_columns=None):
"""
Retrieve H2O's best guess as to what the structure of the data file is.
During parse setup, the H2O cluster will make several guesses about the attributes of
the data. This method allows a user to perform corrective measures by updating the
returning dictionary from this method. This dictionary is then fed into `parse_raw` to
produce the H2OFrame instance.
:param raw_frames: a collection of imported file frames
:param destination_frame: The unique hex key assigned to the imported file. If none is given, a key will
automatically be generated.
:param header: -1 means the first line is data, 0 means guess, 1 means first line is header.
:param separator: The field separator character. Values on each line of the file are separated by
this character. If not provided, the parser will automatically detect the separator.
:param column_names: A list of column names for the file. If skipped_columns are specified, only list column names
of columns that are not skipped.
:param column_types: A list of types or a dictionary of column names to types to specify whether columns
should be forced to a certain type upon import parsing. If a list, the types for elements that are
one will be guessed. If skipped_columns are specified, only list column types of columns that are not skipped.
The possible types a column may have are:
- "unknown" - this will force the column to be parsed as all NA
- "uuid" - the values in the column must be true UUID or will be parsed as NA
- "string" - force the column to be parsed as a string
- "numeric" - force the column to be parsed as numeric. H2O will handle the compression of the numeric
data in the optimal manner.
- "enum" - force the column to be parsed as a categorical column.
- "time" - force the column to be parsed as a time column. H2O will attempt to parse the following
list of date time formats: (date) "yyyy-MM-dd", "yyyy MM dd", "dd-MMM-yy", "dd MMM yy", (time)
"HH:mm:ss", "HH:mm:ss:SSS", "HH:mm:ss:SSSnnnnnn", "HH.mm.ss" "HH.mm.ss.SSS", "HH.mm.ss.SSSnnnnnn".
Times can also contain "AM" or "PM".
:param na_strings: A list of strings, or a list of lists of strings (one list per column), or a dictionary
of column names to strings which are to be interpreted as missing values.
:param skipped_columns: an integer lists of column indices to skip and not parsed into the final frame from the import file.
:returns: a dictionary containing parse parameters guessed by the H2O backend.
"""
coltype = U(None, "unknown", "uuid", "string", "float", "real", "double", "int", "numeric",
"categorical", "factor", "enum", "time")
natype = U(str, [str])
assert_is_type(raw_frames, str, [str])
assert_is_type(destination_frame, None, str)
assert_is_type(header, -1, 0, 1)
assert_is_type(separator, None, I(str, lambda s: len(s) == 1))
assert_is_type(column_names, [str], None)
assert_is_type(column_types, [coltype], {str: coltype}, None)
assert_is_type(na_strings, [natype], {str: natype}, None)
check_frame_id(destination_frame)
# The H2O backend only accepts things that are quoted
if is_type(raw_frames, str): raw_frames = [raw_frames]
# temporary dictionary just to pass the following information to the parser: header, separator
kwargs = {"check_header": header, "source_frames": [quoted(frame_id) for frame_id in raw_frames]}
if separator:
kwargs["separator"] = ord(separator)
j = api("POST /3/ParseSetup", data=kwargs)
if "warnings" in j and j["warnings"]:
for w in j["warnings"]:
warnings.warn(w)
# TODO: really should be url encoding...
if destination_frame:
j["destination_frame"] = destination_frame
parse_column_len = len(j["column_types"]) if skipped_columns is None else (len(j["column_types"])-len(skipped_columns))
tempColumnNames = j["column_names"] if j["column_names"] is not None else gen_header(j["number_columns"])
useType = [True]*len(tempColumnNames)
if skipped_columns is not None:
useType = [True]*len(tempColumnNames)
for ind in range(len(tempColumnNames)):
if ind in skipped_columns:
useType[ind]=False
if column_names is not None:
if not isinstance(column_names, list): raise ValueError("col_names should be a list")
if (skipped_columns is not None) and len(skipped_columns)>0:
if (len(column_names)) != parse_column_len:
raise ValueError(
"length of col_names should be equal to the number of columns parsed: %d vs %d"
% (len(column_names), parse_column_len))
else:
if len(column_names) != len(j["column_types"]): raise ValueError(
"length of col_names should be equal to the number of columns: %d vs %d"
% (len(column_names), len(j["column_types"])))
j["column_names"] = column_names
counter = 0
for ind in range(len(tempColumnNames)):
if useType[ind]:
tempColumnNames[ind]=column_names[counter]
counter=counter+1
if (column_types is not None): # keep the column types to include all columns
if isinstance(column_types, dict):
# overwrite dictionary to ordered list of column types. if user didn't specify column type for all names,
# use type provided by backend
if j["column_names"] is None: # no colnames discovered! (C1, C2, ...)
j["column_names"] = gen_header(j["number_columns"])
if not set(column_types.keys()).issubset(set(j["column_names"])): raise ValueError(
"names specified in col_types is not a subset of the column names")
idx = 0
column_types_list = []
for name in tempColumnNames: # column_names may have already been changed
if name in column_types:
column_types_list.append(column_types[name])
else:
column_types_list.append(j["column_types"][idx])
idx += 1
column_types = column_types_list
elif isinstance(column_types, list):
if len(column_types) != parse_column_len: raise ValueError(
"length of col_types should be equal to the number of parsed columns")
# need to expand it out to all columns, not just the parsed ones
column_types_list = j["column_types"]
counter = 0
for ind in range(len(j["column_types"])):
if useType[ind] and (column_types[counter]!=None):
column_types_list[ind]=column_types[counter]
counter=counter+1
column_types = column_types_list
else: # not dictionary or list
raise ValueError("col_types should be a list of types or a dictionary of column names to types")
j["column_types"] = column_types
if na_strings is not None:
if isinstance(na_strings, dict):
# overwrite dictionary to ordered list of lists of na_strings
if not j["column_names"]: raise ValueError("column names should be specified")
if not set(na_strings.keys()).issubset(set(j["column_names"])): raise ValueError(
"names specified in na_strings is not a subset of the column names")
j["na_strings"] = [[] for _ in range(len(j["column_names"]))]
for name, na in na_strings.items():
idx = j["column_names"].index(name)
if is_type(na, str): na = [na]
for n in na: j["na_strings"][idx].append(quoted(n))
elif is_type(na_strings, [[str]]):
if len(na_strings) != len(j["column_types"]):
raise ValueError("length of na_strings should be equal to the number of columns")
j["na_strings"] = [[quoted(na) for na in col] if col is not None else [] for col in na_strings]
elif isinstance(na_strings, list):
j["na_strings"] = [[quoted(na) for na in na_strings]] * len(j["column_types"])
else: # not a dictionary or list
raise ValueError(
"na_strings should be a list, a list of lists (one list per column), or a dictionary of column "
"names to strings which are to be interpreted as missing values")
if skipped_columns is not None:
if isinstance(skipped_columns, list):
j["skipped_columns"] = []
for colidx in skipped_columns:
if (colidx < 0): raise ValueError("skipped column index cannot be negative")
j["skipped_columns"].append(colidx)
# quote column names and column types also when not specified by user
if j["column_names"]: j["column_names"] = list(map(quoted, j["column_names"]))
j["column_types"] = list(map(quoted, j["column_types"]))
return j
def parse_raw(setup, id=None, first_line_is_header=0):
"""
Parse dataset using the parse setup structure.
:param setup: Result of ``h2o.parse_setup()``
:param id: an id for the frame.
:param first_line_is_header: -1, 0, 1 if the first line is to be used as the header
:returns: an :class:`H2OFrame` object.
"""
assert_is_type(setup, dict)
assert_is_type(id, str, None)
assert_is_type(first_line_is_header, -1, 0, 1)
check_frame_id(id)
if id:
setup["destination_frame"] = id
if first_line_is_header != (-1, 0, 1):
if first_line_is_header not in (-1, 0, 1): raise ValueError("first_line_is_header should be -1, 0, or 1")
setup["check_header"] = first_line_is_header
fr = H2OFrame()
fr._parse_raw(setup)
return fr
def assign(data, xid):
"""
(internal) Assign new id to the frame.
:param data: an H2OFrame whose id should be changed
:param xid: new id for the frame.
:returns: the passed frame.
"""
assert_is_type(data, H2OFrame)
assert_is_type(xid, str)
assert_satisfies(xid, xid != data.frame_id)
check_frame_id(xid)
data._ex = ExprNode("assign", xid, data)._eval_driver(False)
data._ex._cache._id = xid
data._ex._children = None
return data
def deep_copy(data, xid):
"""
Create a deep clone of the frame ``data``.
:param data: an H2OFrame to be cloned
:param xid: (internal) id to be assigned to the new frame.
:returns: new :class:`H2OFrame` which is the clone of the passed frame.
"""
assert_is_type(data, H2OFrame)
assert_is_type(xid, str)
assert_satisfies(xid, xid != data.frame_id)
check_frame_id(xid)
duplicate = data.apply(lambda x: x)
duplicate._ex = ExprNode("assign", xid, duplicate)._eval_driver(False)
duplicate._ex._cache._id = xid
duplicate._ex._children = None
return duplicate
def get_model(model_id):
"""
Load a model from the server.
:param model_id: The model identification in H2O
:returns: Model object, a subclass of H2OEstimator
"""
assert_is_type(model_id, str)
model_json = api("GET /3/Models/%s" % model_id)["models"][0]
algo = model_json["algo"]
if algo == "svd": m = H2OSVD()
elif algo == "pca": m = H2OPrincipalComponentAnalysisEstimator()
elif algo == "drf": m = H2ORandomForestEstimator()
elif algo == "naivebayes": m = H2ONaiveBayesEstimator()
elif algo == "kmeans": m = H2OKMeansEstimator()
elif algo == "glrm": m = H2OGeneralizedLowRankEstimator()
elif algo == "glm": m = H2OGeneralizedLinearEstimator()
elif algo == "gbm": m = H2OGradientBoostingEstimator()
elif algo == "deepwater": m = H2ODeepWaterEstimator()
elif algo == "xgboost": m = H2OXGBoostEstimator()
elif algo == "word2vec": m = H2OWord2vecEstimator()
elif algo == "deeplearning":
if model_json["output"]["model_category"] == "AutoEncoder":
m = H2OAutoEncoderEstimator()
else:
m = H2ODeepLearningEstimator()
elif algo == "stackedensemble": m = H2OStackedEnsembleEstimator()
elif algo == "isolationforest": m = H2OIsolationForestEstimator()
else:
raise ValueError("Unknown algo type: " + algo)
m._resolve_model(model_id, model_json)
return m
def get_grid(grid_id):
"""
Return the specified grid.
:param grid_id: The grid identification in h2o
:returns: an :class:`H2OGridSearch` instance.
"""
assert_is_type(grid_id, str)
grid_json = api("GET /99/Grids/%s" % grid_id)
models = [get_model(key["name"]) for key in grid_json["model_ids"]]
# get first model returned in list of models from grid search to get model class (binomial, multinomial, etc)
first_model_json = api("GET /3/Models/%s" % grid_json["model_ids"][0]["name"])["models"][0]
gs = H2OGridSearch(None, {}, grid_id)
gs._resolve_grid(grid_id, grid_json, first_model_json)
gs.models = models
hyper_params = {param: set() for param in gs.hyper_names}
for param in gs.hyper_names:
for model in models:
if isinstance(model.full_parameters[param]["actual_value"], list):
hyper_params[param].add(model.full_parameters[param]["actual_value"][0])
else:
hyper_params[param].add(model.full_parameters[param]["actual_value"])
hyper_params = {str(param): list(vals) for param, vals in hyper_params.items()}
gs.hyper_params = hyper_params
gs.model = model.__class__()
return gs
def get_frame(frame_id, **kwargs):
"""
Obtain a handle to the frame in H2O with the frame_id key.
:param str frame_id: id of the frame to retrieve.
:returns: an :class:`H2OFrame` object
"""
assert_is_type(frame_id, str)
return H2OFrame.get_frame(frame_id, **kwargs)
def no_progress():
"""
Disable the progress bar from flushing to stdout.
The completed progress bar is printed when a job is complete so as to demarcate a log file.
"""
H2OJob.__PROGRESS_BAR__ = False
def show_progress():
"""Enable the progress bar (it is enabled by default)."""
H2OJob.__PROGRESS_BAR__ = True
def enable_expr_optimizations(flag):
"""Enable expression tree local optimizations."""
ExprNode.__ENABLE_EXPR_OPTIMIZATIONS__ = flag
def is_expr_optimizations_enabled():
return ExprNode.__ENABLE_EXPR_OPTIMIZATIONS__
def log_and_echo(message=""):
"""
Log a message on the server-side logs.
This is helpful when running several pieces of work one after the other on a single H2O
cluster and you want to make a notation in the H2O server side log where one piece of
work ends and the next piece of work begins.
Sends a message to H2O for logging. Generally used for debugging purposes.
:param message: message to write to the log.
"""
assert_is_type(message, str)
api("POST /3/LogAndEcho", data={"message": str(message)})
def remove(x):
"""
Remove object(s) from H2O.
:param x: H2OFrame, H2OEstimator, or string, or a list of those things: the object(s) or unique id(s)
pointing to the object(s) to be removed.
"""
item_type = U(str, H2OFrame, H2OEstimator)
assert_is_type(x, item_type, [item_type])
if not isinstance(x, list): x = [x]
for xi in x:
if isinstance(xi, H2OFrame):
xi_id = xi._ex._cache._id # String or None
if xi_id is None: return # Lazy frame, never evaluated, nothing in cluster
rapids("(rm {})".format(xi_id))
xi._ex = None
elif isinstance(xi, H2OEstimator):
api("DELETE /3/DKV/%s" % xi.model_id)
xi._id = None
else:
# string may be a Frame key name part of a rapids session... need to call rm thru rapids here
try:
rapids("(rm {})".format(xi))
except:
api("DELETE /3/DKV/%s" % xi)
def remove_all():
"""Remove all objects from H2O."""
api("DELETE /3/DKV")
def rapids(expr):
"""
Execute a Rapids expression.
<|fim▁hole|> :param expr: The rapids expression (ascii string).
:returns: The JSON response (as a python dictionary) of the Rapids execution.
"""
assert_is_type(expr, str)
return ExprNode.rapids(expr)
def ls():
"""List keys on an H2O Cluster."""
return H2OFrame._expr(expr=ExprNode("ls")).as_data_frame(use_pandas=True)
def frame(frame_id):
"""
Retrieve metadata for an id that points to a Frame.
:param frame_id: the key of a Frame in H2O.
:returns: dict containing the frame meta-information.
"""
assert_is_type(frame_id, str)
return api("GET /3/Frames/%s" % frame_id)
def frames():
"""
Retrieve all the Frames.
:returns: Meta information on the frames
"""
return api("GET /3/Frames")
def download_pojo(model, path="", get_jar=True, jar_name=""):
"""
Download the POJO for this model to the directory specified by path; if path is "", then dump to screen.
:param model: the model whose scoring POJO should be retrieved.
:param path: an absolute path to the directory where POJO should be saved.
:param get_jar: retrieve the h2o-genmodel.jar also (will be saved to the same folder ``path``).
:param jar_name: Custom name of genmodel jar.
:returns: location of the downloaded POJO file.
"""
assert_is_type(model, ModelBase)
assert_is_type(path, str)
assert_is_type(get_jar, bool)
if not model.have_pojo:
raise H2OValueError("Export to POJO not supported")
if path == "":
java_code = api("GET /3/Models.java/%s" % model.model_id)
print(java_code)
return None
else:
filename = api("GET /3/Models.java/%s" % model.model_id, save_to=path)
if get_jar:
if jar_name == "":
api("GET /3/h2o-genmodel.jar", save_to=os.path.join(path, "h2o-genmodel.jar"))
else:
api("GET /3/h2o-genmodel.jar", save_to=os.path.join(path, jar_name))
return filename
def download_csv(data, filename):
"""
Download an H2O data set to a CSV file on the local disk.
Warning: Files located on the H2O server may be very large! Make sure you have enough
hard drive space to accommodate the entire file.
:param data: an H2OFrame object to be downloaded.
:param filename: name for the CSV file where the data should be saved to.
"""
assert_is_type(data, H2OFrame)
assert_is_type(filename, str)
url = h2oconn.make_url("DownloadDataset", 3) + "?frame_id={}&hex_string=false".format(data.frame_id)
with open(filename, "wb") as f:
f.write(urlopen()(url).read())
def download_all_logs(dirname=".", filename=None):
"""
Download H2O log files to disk.
:param dirname: a character string indicating the directory that the log file should be saved in.
:param filename: a string indicating the name that the CSV file should be. Note that the saved format is .zip, so the file name must include the .zip extension.
:returns: path of logs written in a zip file.
:examples: The following code will save the zip file `'autoh2o_log.zip'` in a directory that is one down from where you are currently working into a directory called `your_directory_name`. (Please note that `your_directory_name` should be replaced with the name of the directory that you've created and that already exists.)
>>> h2o.download_all_logs(dirname='./your_directory_name/', filename = 'autoh2o_log.zip')
"""
assert_is_type(dirname, str)
assert_is_type(filename, str, None)
url = "%s/3/Logs/download" % h2oconn.base_url
opener = urlopen()
response = opener(url)
if not os.path.exists(dirname): os.mkdir(dirname)
if filename is None:
if PY3:
headers = [h[1] for h in response.headers._headers]
else:
headers = response.headers.headers
for h in headers:
if "filename=" in h:
filename = h.split("filename=")[1].strip()
break
path = os.path.join(dirname, filename)
response = opener(url).read()
print("Writing H2O logs to " + path)
with open(path, "wb") as f:
f.write(response)
return path
def save_model(model, path="", force=False):
"""
Save an H2O Model object to disk. (Note that ensemble binary models can now be saved using this method.)
:param model: The model object to save.
:param path: a path to save the model at (hdfs, s3, local)
:param force: if True overwrite destination directory in case it exists, or throw exception if set to False.
:returns: the path of the saved model
:examples:
>>> path = h2o.save_model(my_model, dir=my_path)
"""
assert_is_type(model, ModelBase)
assert_is_type(path, str)
assert_is_type(force, bool)
path = os.path.join(os.getcwd() if path == "" else path, model.model_id)
return api("GET /99/Models.bin/%s" % model.model_id, data={"dir": path, "force": force})["dir"]
def load_model(path):
"""
Load a saved H2O model from disk. (Note that ensemble binary models can now be loaded using this method.)
:param path: the full path of the H2O Model to be imported.
:returns: an :class:`H2OEstimator` object
:examples:
>>> path = h2o.save_model(my_model, dir=my_path)
>>> h2o.load_model(path)
"""
assert_is_type(path, str)
res = api("POST /99/Models.bin/%s" % "", data={"dir": path})
return get_model(res["models"][0]["model_id"]["name"])
def export_file(frame, path, force=False, parts=1):
"""
Export a given H2OFrame to a path on the machine this python session is currently connected to.
:param frame: the Frame to save to disk.
:param path: the path to the save point on disk.
:param force: if True, overwrite any preexisting file with the same path
:param parts: enables export to multiple 'part' files instead of just a single file.
Convenient for large datasets that take too long to store in a single file.
Use parts=-1 to instruct H2O to determine the optimal number of part files or
specify your desired maximum number of part files. Path needs to be a directory
when exporting to multiple files, also that directory must be empty.
Default is ``parts = 1``, which is to export to a single file.
"""
assert_is_type(frame, H2OFrame)
assert_is_type(path, str)
assert_is_type(force, bool)
assert_is_type(parts, int)
H2OJob(api("POST /3/Frames/%s/export" % (frame.frame_id), data={"path": path, "num_parts": parts, "force": force}),
"Export File").poll()
def cluster():
"""Return :class:`H2OCluster` object describing the backend H2O cloud."""
return h2oconn.cluster if h2oconn else None
def create_frame(frame_id=None, rows=10000, cols=10, randomize=True,
real_fraction=None, categorical_fraction=None, integer_fraction=None,
binary_fraction=None, time_fraction=None, string_fraction=None,
value=0, real_range=100, factors=100, integer_range=100,
binary_ones_fraction=0.02, missing_fraction=0.01,
has_response=False, response_factors=2, positive_response=False,
seed=None, seed_for_column_types=None):
"""
Create a new frame with random data.
Creates a data frame in H2O with real-valued, categorical, integer, and binary columns specified by the user.
:param frame_id: the destination key. If empty, this will be auto-generated.
:param rows: the number of rows of data to generate.
:param cols: the number of columns of data to generate. Excludes the response column if has_response is True.
:param randomize: If True, data values will be randomly generated. This must be True if either
categorical_fraction or integer_fraction is non-zero.
:param value: if randomize is False, then all real-valued entries will be set to this value.
:param real_range: the range of randomly generated real values.
:param real_fraction: the fraction of columns that are real-valued.
:param categorical_fraction: the fraction of total columns that are categorical.
:param factors: the number of (unique) factor levels in each categorical column.
:param integer_fraction: the fraction of total columns that are integer-valued.
:param integer_range: the range of randomly generated integer values.
:param binary_fraction: the fraction of total columns that are binary-valued.
:param binary_ones_fraction: the fraction of values in a binary column that are set to 1.
:param time_fraction: the fraction of randomly created date/time columns.
:param string_fraction: the fraction of randomly created string columns.
:param missing_fraction: the fraction of total entries in the data frame that are set to NA.
:param has_response: A logical value indicating whether an additional response column should be prepended to the
final H2O data frame. If set to True, the total number of columns will be ``cols + 1``.
:param response_factors: if has_response is True, then this variable controls the type of the "response" column:
setting response_factors to 1 will generate real-valued response, any value greater or equal than 2 will
create categorical response with that many categories.
:param positive_reponse: when response variable is present and of real type, this will control whether it
contains positive values only, or both positive and negative.
:param seed: a seed used to generate random values when ``randomize`` is True.
:param seed_for_column_types: a seed used to generate random column types when ``randomize`` is True.
:returns: an :class:`H2OFrame` object
"""
t_fraction = U(None, BoundNumeric(0, 1))
assert_is_type(frame_id, str, None)
assert_is_type(rows, BoundInt(1))
assert_is_type(cols, BoundInt(1))
assert_is_type(randomize, bool)
assert_is_type(value, numeric)
assert_is_type(real_range, BoundNumeric(0))
assert_is_type(real_fraction, t_fraction)
assert_is_type(categorical_fraction, t_fraction)
assert_is_type(integer_fraction, t_fraction)
assert_is_type(binary_fraction, t_fraction)
assert_is_type(time_fraction, t_fraction)
assert_is_type(string_fraction, t_fraction)
assert_is_type(missing_fraction, t_fraction)
assert_is_type(binary_ones_fraction, t_fraction)
assert_is_type(factors, BoundInt(1))
assert_is_type(integer_range, BoundInt(1))
assert_is_type(has_response, bool)
assert_is_type(response_factors, None, BoundInt(1))
assert_is_type(positive_response, bool)
assert_is_type(seed, int, None)
assert_is_type(seed_for_column_types, int, None)
check_frame_id(frame_id)
if randomize and value:
raise H2OValueError("Cannot set data to a `value` if `randomize` is true")
if (categorical_fraction or integer_fraction) and not randomize:
raise H2OValueError("`randomize` should be True when either categorical or integer columns are used.")
# The total column fraction that the user has specified explicitly. This sum should not exceed 1. We will respect
# all explicitly set fractions, and will auto-select the remaining fractions.
frcs = [real_fraction, categorical_fraction, integer_fraction, binary_fraction, time_fraction, string_fraction]
wgts = [0.5, 0.2, 0.2, 0.1, 0.0, 0.0]
sum_explicit_fractions = sum(0 if f is None else f for f in frcs)
count_explicit_fractions = sum(0 if f is None else 1 for f in frcs)
remainder = 1 - sum_explicit_fractions
if sum_explicit_fractions >= 1 + 1e-10:
raise H2OValueError("Fractions of binary, integer, categorical, time and string columns should add up "
"to a number less than 1.")
elif sum_explicit_fractions >= 1 - 1e-10:
# The fractions already add up to almost 1. No need to do anything (the server will absorb the tiny
# remainder into the real_fraction column).
pass
else:
# sum_explicit_fractions < 1 => distribute the remainder among the columns that were not set explicitly
if count_explicit_fractions == 6:
raise H2OValueError("Fraction of binary, integer, categorical, time and string columns add up to a "
"number less than 1.")
# Each column type receives a certain part (proportional to column's "weight") of the remaining fraction.
sum_implicit_weights = sum(wgts[i] if frcs[i] is None else 0 for i in range(6))
for i, f in enumerate(frcs):
if frcs[i] is not None: continue
if sum_implicit_weights == 0:
frcs[i] = remainder
else:
frcs[i] = remainder * wgts[i] / sum_implicit_weights
remainder -= frcs[i]
sum_implicit_weights -= wgts[i]
for i, f in enumerate(frcs):
if f is None:
frcs[i] = 0
real_fraction, categorical_fraction, integer_fraction, binary_fraction, time_fraction, string_fraction = frcs
parms = {"dest": frame_id if frame_id else py_tmp_key(append=h2oconn.session_id),
"rows": rows,
"cols": cols,
"randomize": randomize,
"categorical_fraction": categorical_fraction,
"integer_fraction": integer_fraction,
"binary_fraction": binary_fraction,
"time_fraction": time_fraction,
"string_fraction": string_fraction,
# "real_fraction" is not provided, the backend computes it as 1 - sum(5 other fractions)
"value": value,
"real_range": real_range,
"factors": factors,
"integer_range": integer_range,
"binary_ones_fraction": binary_ones_fraction,
"missing_fraction": missing_fraction,
"has_response": has_response,
"response_factors": response_factors,
"positive_response": positive_response,
"seed": -1 if seed is None else seed,
"seed_for_column_types": -1 if seed_for_column_types is None else seed_for_column_types,
}
H2OJob(api("POST /3/CreateFrame", data=parms), "Create Frame").poll()
return get_frame(parms["dest"])
def interaction(data, factors, pairwise, max_factors, min_occurrence, destination_frame=None):
"""
Categorical Interaction Feature Creation in H2O.
Creates a frame in H2O with n-th order interaction features between categorical columns, as specified by
the user.
:param data: the H2OFrame that holds the target categorical columns.
:param factors: factor columns (either indices or column names).
:param pairwise: If True, create pairwise interactions between factors (otherwise create one
higher-order interaction). Only applicable if there are 3 or more factors.
:param max_factors: Max. number of factor levels in pair-wise interaction terms (if enforced, one extra
catch-all factor will be made).
:param min_occurrence: Min. occurrence threshold for factor levels in pair-wise interaction terms
:param destination_frame: a string indicating the destination key. If empty, this will be auto-generated by H2O.
:returns: :class:`H2OFrame`
"""
assert_is_type(data, H2OFrame)
assert_is_type(factors, [str, int])
assert_is_type(pairwise, bool)
assert_is_type(max_factors, int)
assert_is_type(min_occurrence, int)
assert_is_type(destination_frame, str, None)
factors = [data.names[n] if is_type(n, int) else n for n in factors]
parms = {"dest": py_tmp_key(append=h2oconn.session_id) if destination_frame is None else destination_frame,
"source_frame": data.frame_id,
"factor_columns": [quoted(f) for f in factors],
"pairwise": pairwise,
"max_factors": max_factors,
"min_occurrence": min_occurrence,
}
H2OJob(api("POST /3/Interaction", data=parms), "Interactions").poll()
return get_frame(parms["dest"])
def as_list(data, use_pandas=True, header=True):
"""
Convert an H2O data object into a python-specific object.
WARNING! This will pull all data local!
If Pandas is available (and use_pandas is True), then pandas will be used to parse the
data frame. Otherwise, a list-of-lists populated by character data will be returned (so
the types of data will all be str).
:param data: an H2O data object.
:param use_pandas: If True, try to use pandas for reading in the data.
:param header: If True, return column names as first element in list
:returns: List of lists (Rows x Columns).
"""
assert_is_type(data, H2OFrame)
assert_is_type(use_pandas, bool)
assert_is_type(header, bool)
return H2OFrame.as_data_frame(data, use_pandas=use_pandas, header=header)
def demo(funcname, interactive=True, echo=True, test=False):
"""
H2O built-in demo facility.
:param funcname: A string that identifies the h2o python function to demonstrate.
:param interactive: If True, the user will be prompted to continue the demonstration after every segment.
:param echo: If True, the python commands that are executed will be displayed.
:param test: If True, `h2o.init()` will not be called (used for pyunit testing).
:example:
>>> import h2o
>>> h2o.demo("gbm")
"""
import h2o.demos as h2odemo
assert_is_type(funcname, str)
assert_is_type(interactive, bool)
assert_is_type(echo, bool)
assert_is_type(test, bool)
demo_function = getattr(h2odemo, funcname, None)
if demo_function and type(demo_function) is type(demo):
demo_function(interactive, echo, test)
else:
print("Demo for %s is not available." % funcname)
def load_dataset(relative_path):
"""Imports a data file within the 'h2o_data' folder."""
assert_is_type(relative_path, str)
h2o_dir = os.path.split(__file__)[0]
for possible_file in [os.path.join(h2o_dir, relative_path),
os.path.join(h2o_dir, "h2o_data", relative_path),
os.path.join(h2o_dir, "h2o_data", relative_path + ".csv")]:
if os.path.exists(possible_file):
return upload_file(possible_file)
# File not found -- raise an error!
raise H2OValueError("Data file %s cannot be found" % relative_path)
def make_metrics(predicted, actual, domain=None, distribution=None):
"""
Create Model Metrics from predicted and actual values in H2O.
:param H2OFrame predicted: an H2OFrame containing predictions.
:param H2OFrame actuals: an H2OFrame containing actual values.
:param domain: list of response factors for classification.
:param distribution: distribution for regression.
"""
assert_is_type(predicted, H2OFrame)
assert_is_type(actual, H2OFrame)
# assert predicted.ncol == 1, "`predicted` frame should have exactly 1 column"
assert actual.ncol == 1, "`actual` frame should have exactly 1 column"
assert_is_type(distribution, str, None)
assert_satisfies(actual.ncol, actual.ncol == 1)
if domain is None and any(actual.isfactor()):
domain = actual.levels()[0]
res = api("POST /3/ModelMetrics/predictions_frame/%s/actuals_frame/%s" % (predicted.frame_id, actual.frame_id),
data={"domain": domain, "distribution": distribution})
return res["model_metrics"]
def flow():
"""
Open H2O Flow in your browser.
"""
webbrowser.open(connection().base_url, new = 1)
def _put_key(file_path, dest_key=None, overwrite=True):
"""
Upload given file into DKV and save it under give key as raw object.
:param dest_key: name of destination key in DKV
:param file_path: path to file to upload
:return: key name if object was uploaded successfully
"""
ret = api("POST /3/PutKey?destination_key={}&overwrite={}".format(dest_key if dest_key else '', overwrite),
filename=file_path)
return ret["destination_key"]
def _create_zip_file(dest_filename, *content_list):
from .utils.shared_utils import InMemoryZipArch
with InMemoryZipArch(dest_filename) as zip_arch:
for filename, file_content in content_list:
zip_arch.append(filename, file_content)
return dest_filename
def _default_source_provider(obj):
import inspect
# First try to get source code via inspect
try:
return ' '.join(inspect.getsourcelines(obj)[0])
except (OSError, TypeError):
# It seems like we are in interactive shell and
# we do not have access to class source code directly
# At this point we can:
# (1) get IPython history and find class definition, or
# (2) compose body of class from methods, since it is still possible to get
# method body
class_def = "class {}:\n".format(obj.__name__)
for name, member in inspect.getmembers(obj):
if inspect.ismethod(member):
class_def += inspect.getsource(member)
return class_def
def upload_custom_metric(func, func_file="metrics.py", func_name=None, class_name=None, source_provider=None):
"""
Upload given metrics function into H2O cluster.
The metrics can have different representation:
- method
- class: needs to inherit from water.udf.CFunc2 and implement method apply(actual, predict)
returning double
- string: the same as in class case, but the class is given as a string
:param func: metrics representation: string, class, function
:param func_file: internal name of file to save given metrics representation
:param func_name: name for h2o key under which the given metric is saved
:param class_name: name of class wrapping the metrics function
:param source_provider: a function which provides a source code for given function
:return: reference to uploaded metrics function
"""
import tempfile
import inspect
# Use default source provider
if not source_provider:
source_provider = _default_source_provider
# The template wraps given metrics representation
_CFUNC_CODE_TEMPLATE = """# Generated code
import water.udf.CMetricFunc as MetricFunc
# User given metric function as a class implementing
# 3 methods defined by interface CMetricFunc
{}
# Generated user metric which satisfies the interface
# of Java MetricFunc
class {}Wrapper({}, MetricFunc, object):
pass
"""
assert_satisfies(func, inspect.isclass(func) or isinstance(func, str),
"The argument func needs to be string or class !")
assert_satisfies(func_file, func_file is not None,
"The argument func_file is missing!")
assert_satisfies(func_file, func_file.endswith('.py'),
"The argument func_file needs to end with '.py'")
code = None
derived_func_name = None
module_name = func_file[:-3]
if isinstance(func, str):
assert_satisfies(class_name, class_name is not None,
"The argument class_name is missing! " +
"It needs to reference the class in given string!")
derived_func_name = "metrics_{}".format(class_name)
code = str
else:
assert_satisfies(func, inspect.isclass(func), "The parameter `func` should be str or class")
for method in ['map', 'reduce', 'metric']:
assert_satisfies(func, method in func.__dict__, "The class `func` needs to define method `{}`".format(method))
assert_satisfies(class_name, class_name is None,
"If class is specified then class_name parameter needs to be None")
class_name = "{}.{}Wrapper".format(module_name, func.__name__)
derived_func_name = "metrics_{}".format(func.__name__)
code = _CFUNC_CODE_TEMPLATE.format(source_provider(func), func.__name__, func.__name__)
# If the func name is not given, use whatever we can derived from given definition
if not func_name:
func_name = derived_func_name
# Saved into jar file
tmpdir = tempfile.mkdtemp(prefix="h2o-func")
func_arch_file = _create_zip_file("{}/func.jar".format(tmpdir), (func_file, code))
# Upload into K/V
dest_key = _put_key(func_arch_file, dest_key=func_name)
# Reference
return "python:{}={}".format(dest_key, class_name)
#-----------------------------------------------------------------------------------------------------------------------
# Private
#-----------------------------------------------------------------------------------------------------------------------
def _check_connection():
if not h2oconn or not h2oconn.cluster:
raise H2OConnectionError("Not connected to a cluster. Did you run `h2o.connect()`?")
def _connect_with_conf(conn_conf):
conf = conn_conf
if isinstance(conn_conf, dict):
conf = H2OConnectionConf(config=conn_conf)
assert_is_type(conf, H2OConnectionConf)
return connect(url = conf.url, verify_ssl_certificates = conf.verify_ssl_certificates,
auth = conf.auth, proxy = conf.proxy,cookies = conf.cookies, verbose = conf.verbose)
#-----------------------------------------------------------------------------------------------------------------------
# ALL DEPRECATED METHODS BELOW
#-----------------------------------------------------------------------------------------------------------------------
# Deprecated since 2015-10-08
@deprecated("Deprecated, use ``h2o.import_file()``.")
def import_frame():
"""Deprecated."""
import_file()
# Deprecated since 2015-10-08
@deprecated("Deprecated (converted to a private method).")
def parse():
"""Deprecated."""
pass
# Deprecated since 2016-08-04
@deprecated("Deprecated, use ``h2o.cluster().show_status()``.")
def cluster_info():
"""Deprecated."""
_check_connection()
cluster().show_status()
# Deprecated since 2016-08-04
@deprecated("Deprecated, use ``h2o.cluster().show_status(True)``.")
def cluster_status():
"""Deprecated."""
_check_connection()
cluster().show_status(True)
# Deprecated since 2016-08-04
@deprecated("Deprecated, use ``h2o.cluster().shutdown()``.")
def shutdown(prompt=False):
"""Deprecated."""
_check_connection()
cluster().shutdown(prompt)
# Deprecated since 2016-08-04
@deprecated("Deprecated, use ``h2o.cluster().network_test()``.")
def network_test():
"""Deprecated."""
_check_connection()
cluster().network_test()
# Deprecated since 2016-08-04
@deprecated("Deprecated, use ``h2o.cluster().timezone``.")
def get_timezone():
"""Deprecated."""
_check_connection()
return cluster().timezone
# Deprecated since 2016-08-04
@deprecated("Deprecated, set ``h2o.cluster().timezone`` instead.")
def set_timezone(value):
"""Deprecated."""
_check_connection()
cluster().timezone = value
# Deprecated since 2016-08-04
@deprecated("Deprecated, use ``h2o.cluster().list_timezones()``.")
def list_timezones():
"""Deprecated."""
_check_connection()
return cluster().list_timezones()<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The compiler code necessary to implement the `#[derive]` extensions.
//!
//! FIXME (#2810): hygiene. Search for "__" strings (in other files too). We also assume "extra" is
//! the standard library, and "std" is the core library.
use ast::{Item, MetaItem, MetaWord};
use attr::AttrMetaMethods;
use ext::base::{ExtCtxt, SyntaxEnv, Decorator, ItemDecorator, Modifier};
use ext::build::AstBuilder;
use feature_gate;
use codemap::Span;
use parse::token::{intern, intern_and_get_ident};
use ptr::P;
macro_rules! pathvec {
($($x:ident)::+) => (
vec![ $( stringify!($x) ),+ ]
)
}
macro_rules! path {
($($x:tt)*) => (
::ext::deriving::generic::ty::Path::new( pathvec!( $($x)* ) )
)
}
macro_rules! path_local {
($x:ident) => (
::ext::deriving::generic::ty::Path::new_local(stringify!($x))
)
}
macro_rules! pathvec_std {
($cx:expr, $first:ident :: $($rest:ident)::+) => (
if $cx.use_std {
pathvec!(std :: $($rest)::+)
} else {
pathvec!($first :: $($rest)::+)
}
)
}
<|fim▁hole|>}
pub mod bounds;
pub mod clone;
pub mod encodable;
pub mod decodable;
pub mod hash;
pub mod rand;
pub mod show;
pub mod default;
pub mod primitive;
#[path="cmp/eq.rs"]
pub mod eq;
#[path="cmp/totaleq.rs"]
pub mod totaleq;
#[path="cmp/ord.rs"]
pub mod ord;
#[path="cmp/totalord.rs"]
pub mod totalord;
pub mod generic;
fn expand_deprecated_deriving(cx: &mut ExtCtxt,
span: Span,
_: &MetaItem,
_: &Item,
_: &mut FnMut(P<Item>)) {
cx.span_err(span, "`deriving` has been renamed to `derive`");
}
fn expand_derive(cx: &mut ExtCtxt,
_: Span,
mitem: &MetaItem,
item: P<Item>) -> P<Item> {
item.map(|mut item| {
if mitem.value_str().is_some() {
cx.span_err(mitem.span, "unexpected value in `derive`");
}
let traits = mitem.meta_item_list().unwrap_or(&[]);
if traits.is_empty() {
cx.span_warn(mitem.span, "empty trait list in `derive`");
}
for titem in traits.iter().rev() {
let tname = match titem.node {
MetaWord(ref tname) => tname,
_ => {
cx.span_err(titem.span, "malformed `derive` entry");
continue;
}
};
if !(is_builtin_trait(tname) || cx.ecfg.enable_custom_derive()) {
feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic,
"custom_derive",
titem.span,
feature_gate::EXPLAIN_CUSTOM_DERIVE);
continue;
}
// #[derive(Foo, Bar)] expands to #[derive_Foo] #[derive_Bar]
item.attrs.push(cx.attribute(titem.span, cx.meta_word(titem.span,
intern_and_get_ident(&format!("derive_{}", tname)))));
}
item
})
}
macro_rules! derive_traits {
($( $name:expr => $func:path, )*) => {
pub fn register_all(env: &mut SyntaxEnv) {
// Define the #[derive_*] extensions.
$({
struct DeriveExtension;
impl ItemDecorator for DeriveExtension {
fn expand(&self,
ecx: &mut ExtCtxt,
sp: Span,
mitem: &MetaItem,
item: &Item,
push: &mut FnMut(P<Item>)) {
warn_if_deprecated(ecx, sp, $name);
$func(ecx, sp, mitem, item, |i| push(i));
}
}
env.insert(intern(concat!("derive_", $name)),
Decorator(Box::new(DeriveExtension)));
})*
env.insert(intern("derive"),
Modifier(Box::new(expand_derive)));
env.insert(intern("deriving"),
Decorator(Box::new(expand_deprecated_deriving)));
}
fn is_builtin_trait(name: &str) -> bool {
match name {
$( $name )|* => true,
_ => false,
}
}
}
}
derive_traits! {
"Clone" => clone::expand_deriving_clone,
"Hash" => hash::expand_deriving_hash,
"RustcEncodable" => encodable::expand_deriving_rustc_encodable,
"RustcDecodable" => decodable::expand_deriving_rustc_decodable,
"PartialEq" => eq::expand_deriving_eq,
"Eq" => totaleq::expand_deriving_totaleq,
"PartialOrd" => ord::expand_deriving_ord,
"Ord" => totalord::expand_deriving_totalord,
"Rand" => rand::expand_deriving_rand,
"Debug" => show::expand_deriving_show,
"Default" => default::expand_deriving_default,
"FromPrimitive" => primitive::expand_deriving_from_primitive,
"Send" => bounds::expand_deriving_unsafe_bound,
"Sync" => bounds::expand_deriving_unsafe_bound,
"Copy" => bounds::expand_deriving_copy,
// deprecated
"Show" => show::expand_deriving_show,
"Encodable" => encodable::expand_deriving_encodable,
"Decodable" => decodable::expand_deriving_decodable,
}
#[inline] // because `name` is a compile-time constant
fn warn_if_deprecated(ecx: &mut ExtCtxt, sp: Span, name: &str) {
if let Some(replacement) = match name {
"Show" => Some("Debug"),
"Encodable" => Some("RustcEncodable"),
"Decodable" => Some("RustcDecodable"),
_ => None,
} {
ecx.span_warn(sp, &format!("derive({}) is deprecated in favor of derive({})",
name, replacement));
}
}<|fim▁end|> | macro_rules! path_std {
($($x:tt)*) => (
::ext::deriving::generic::ty::Path::new( pathvec_std!( $($x)* ) )
) |
<|file_name|>day_4.rs<|end_file_name|><|fim▁begin|>use std::iter::Peekable;
use std::str::Chars;
#[derive(PartialEq)]
enum SymbolGroup {
AlphaNumeric,
WhiteSpace,
Else
}
pub struct Lexer<'a> {
iter: Peekable<Chars<'a>>
}
impl <'a> Lexer<'a> {
pub fn new(line: &'a str) -> Lexer {
Lexer { iter: line.chars().peekable() }
}
pub fn next_lexem(&mut self) -> Option<String> {
let mut value = vec![];
let expected = self.define_symbol_group();
if expected == SymbolGroup::Else {
return None;
}
loop {
let actual = self.define_symbol_group();
let symbol = self.peek_next_symbol();
if expected == actual {
self.iter.next();
value.push(symbol.unwrap());
}
else {<|fim▁hole|> }
}
Some(value.iter().cloned().collect::<String>())
}
fn define_symbol_group(&mut self) -> SymbolGroup {
match self.peek_next_symbol() {
Some('a' ...'z') | Some('A'...'Z') |
Some('_') | Some('0'...'9') => SymbolGroup::AlphaNumeric,
Some(' ') => SymbolGroup::WhiteSpace,
Some(_) | None => SymbolGroup::Else,
}
}
fn peek_next_symbol(&mut self) -> Option<char> {
self.iter.peek().cloned()
}
}<|fim▁end|> | break; |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>import asyncio
import os
from urllib.parse import urlparse
import aiohttp
def damerau_levenshtein(first_string, second_string):
"""Returns the Damerau-Levenshtein edit distance between two strings."""
previous = None
prev_a = None
current = [i for i, x in enumerate(second_string, 1)] + [0]
for a_pos, a in enumerate(first_string):
prev_b = None
previously_previous, previous, current = previous, current, [0] * len(second_string) + [a_pos+1]
for b_pos, b in enumerate(second_string):
cost = int(a != b)
deletion = previous[b_pos] + 1
insertion = current[b_pos-1] + 1
substitution = previous[b_pos-1] + cost
current[b_pos] = min(deletion, insertion, substitution)
if prev_b and prev_a and a == prev_b and b == prev_a and a != b:
current[b_pos] = min(current[b_pos], previously_previous[b_pos-2] + cost)
prev_b = b
prev_a = a
return current[len(second_string) - 1]
def complete(value):
"""asyncio equivalent to `twisted.internet.defer.succeed`"""
f = asyncio.Future()
f.set_result(value)
return f
roman_numeral_table = [
('M', 1000),
('CM', 900),
('D', 500),
('CD', 400),
('C', 100),
('XC', 90),
('L', 50),
('XL', 40),
('X', 10),
('IX', 9),
('V', 5),
('IV', 4),
('I', 1)
]
def int_to_roman(num):
def parts():
nonlocal num<|fim▁hole|> num -= value
yield letter
return ''.join(parts())
class RequestManager:
"""Gross class for managing active requests.
The only thing it really does is make sure that anything using `get()`
won't send out duplicate requests. This is useful when trying to download
metadata for new series.
"""
# FIXME: make this connection map configurable.
connection_map = {
'www.omdbapi.com': 20,
}
current_requests = {}
limits = {}
CONN_POOL = aiohttp.TCPConnector()
count = 0
@classmethod
def get_pool(cls, key):
if key not in cls.limits:
limit = cls.connection_map.get(key, 50)
cls.limits[key] = asyncio.BoundedSemaphore(limit)
return cls.limits[key]
def __init__(self, url, **kwargs):
self.url = url
self.kwargs = kwargs
self.callbacks = []
RequestManager.count += 1
@asyncio.coroutine
def run(self):
key = urlparse(self.url).netloc
p = self.get_pool(key)
with (yield from p):
response = yield from aiohttp.request('GET', self.url, connector=self.CONN_POOL, **self.kwargs)
try:
json = yield from response.json()
except Exception as e:
for cb in self.callbacks:
cb.set_exception(e)
else:
for cb in self.callbacks:
cb.set_result((response, json))
def wait_for(self):
self.callbacks.append(asyncio.Future())
return self.callbacks[-1]
def get(url, **kwargs):
full_url = url + '&'.join(sorted('='.join(kv) for kv in kwargs.get('params', {}).items()))
if full_url in RequestManager.current_requests:
return RequestManager.current_requests[full_url].wait_for()
r = RequestManager(url, **kwargs)
RequestManager.current_requests[full_url] = r
asyncio.async(r.run())
cb = r.wait_for()
@cb.add_done_callback
def callback(result):
del RequestManager.current_requests[full_url]
return r.wait_for()
def setup_logging(name, level):
from logbook import NullHandler, RotatingFileHandler, lookup_level
path = os.path.expanduser('~/.config/aesop/{}.log'.format(name))
level = lookup_level(level)
# null handler to prevent logs unhandled from RotatingFileHandler going to
# stderr
NullHandler().push_application()
RotatingFileHandler(path, level=level).push_application()
def get_language(path):
from aesop import isocodes
for suffix in path.suffixes:
suffix = suffix[1:]
try:
isoname = isocodes.isoname(suffix.title())
except KeyError:
pass
else:
return isoname
if len(suffix) not in {2, 3}:
continue
suffix = suffix.lower()
if len(suffix) == 2:
try:
suffix = isocodes.iso2to3(suffix)
except KeyError:
continue
try:
isocodes.nicename(suffix)
except KeyError:
pass
else:
return suffix<|fim▁end|> | for letter, value in roman_numeral_table:
while value <= num: |
<|file_name|>0014_auto_20150726_1411.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('agency', '0013_auto_20150726_0001'),
]
operations = [
migrations.AlterField(
model_name='feedinfo',
name='feed_publisher_name',
field=models.CharField(max_length=50, verbose_name='Name', choices=[(b'EPTTC', 'EPTTC')]),
),<|fim▁hole|><|fim▁end|> | ] |
<|file_name|>propagate-approximated-shorter-to-static-no-bound.rs<|end_file_name|><|fim▁begin|>// Test a case where we are trying to prove `'x: 'y` and are forced to
// approximate the shorter end-point (`'y`) to with `'static`. This is
// because `'y` is higher-ranked but we know of no relations to other
// regions. Note that `'static` shows up in the stderr output as `'0`.
// compile-flags:-Zborrowck=mir -Zverbose
#![feature(rustc_attrs)]
use std::cell::Cell;
// Callee knows that:
//
// 'x: 'a
//
// so the only way we can ensure that `'x: 'y` is to show that
// `'a: 'static`.
fn establish_relationships<'a, 'b, F>(_cell_a: &Cell<&'a u32>, _cell_b: &Cell<&'b u32>, _closure: F)
where
F: for<'x, 'y> FnMut(
&Cell<&'a &'x u32>, // shows that 'x: 'a
&Cell<&'x u32>,
&Cell<&'y u32>,
),<|fim▁hole|>fn demand_y<'x, 'y>(_cell_x: &Cell<&'x u32>, _cell_y: &Cell<&'y u32>, _y: &'y u32) {}
#[rustc_regions]
fn supply<'a, 'b>(cell_a: Cell<&'a u32>, cell_b: Cell<&'b u32>) {
establish_relationships(&cell_a, &cell_b, |_outlives, x, y| {
//~^ ERROR borrowed data escapes outside of function
// Only works if 'x: 'y:
demand_y(x, y, x.get())
});
}
fn main() {}<|fim▁end|> | {
}
|
<|file_name|>testfileutils.py<|end_file_name|><|fim▁begin|>import os
import shutil
import biicode.common.test
from biicode.common.utils import file_utils as common_file_utils
def load(filepath):
"""Return binary load of given test resource."""
abspath = file_path(filepath)
with open(abspath, "rb") as f:
return f.read()
def read(filepath):
"""Return system text content of given test resource."""
abspath = file_path(filepath)
with open(abspath, "r") as f:
return f.read()
def write(file_, content):
try:
os.makedirs(os.path.split(file_)[0])
except:
pass
with open(file_, "wb") as f:
return f.write(content)
test_resources = os.path.join(os.path.dirname(biicode.common.test.__file__),
"resources/")
def append(content, dest):
with open(dest, "a") as f:
f.write(content)
def get_dir_files(path):
"""Returns a list of files within given test folder
Paths are relative to test/resources/path"""
abs_paths = common_file_utils.get_visible_files_recursive(file_path(path))
base_path = os.path.join(test_resources, path)
return [os.path.relpath(p, base_path) for p in abs_paths]
def file_path(name):
"""Return full path to given test resource. """
return os.path.join(test_resources, name)
def copyFiles(container, dest_folder, files=None):
'''Copies files from container to dst_folder, filtering by files if provided
'''
new_files = []
if not os.path.exists(dest_folder):
os.makedirs(dest_folder)
if not files:
files = get_dir_files(container)
for f in files:
srcpath = file_path(os.path.join(container, f))
dest = os.path.join(dest_folder, f)
dst_subfolder = os.path.join(dest_folder, os.path.dirname(f))
if not os.path.isdir(dst_subfolder):
os.makedirs(dst_subfolder)
if os.path.isdir(srcpath):
shutil.copytree(srcpath, dest)
else:
shutil.copyfile(srcpath, dest)
new_files.append(dest)
return new_files
def copyFile(src, dst_folder, dst_name=None):
'''Copies src file from test/resources folder to dst_folder
renamed to dst_name if provided
'''
srcpath = file_path(src)<|fim▁hole|> dst_name = os.path.split(src)[1]
if not os.path.exists(dst_folder):
os.makedirs(dst_folder)
dst = os.path.join(dst_folder, dst_name)
shutil.copyfile(srcpath, dst)
return dst
def createFile(name, dst_folder, content):
path = os.path.join(dst_folder, name)
if not os.path.exists(dst_folder):
os.makedirs(dst_folder)
with open(path, 'w+') as f:
f.write(content)
return path
def removeFolderContents(path):
'''Recursively deletes all content in given directory'''
for root, dirs, files in os.walk(path):
for f in files:
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
def search_pattern_and_replace(path, pattern, replacement):
'''Performs inline search and replace in given file'''
import fileinput, re
for line in fileinput.FileInput(path, inplace=1):
line = re.sub(pattern, replacement, line)
print line, # DO NOT REMOVE THIS PRINT, it is necessary for replace to work
def copy_directory(origin, dest):
shutil.copytree(origin, dest)
return dest
import filecmp
import os.path
def are_dir_trees_equal(dir1, dir2):
"""
Compare two directories recursively. Files in each directory are
assumed to be equal if their names and contents are equal.
@param dir1: First directory path
@param dir2: Second directory path
@return: True if the directory trees are the same and
there were no errors while accessing the directories or files,
False otherwise.
"""
dirs_cmp = filecmp.dircmp(dir1, dir2)
if len(dirs_cmp.left_only) > 0 or len(dirs_cmp.right_only) > 0 or \
len(dirs_cmp.funny_files) > 0:
return False
(_, mismatch, errors) = filecmp.cmpfiles(
dir1, dir2, dirs_cmp.common_files, shallow=False)
if len(mismatch) > 0 or len(errors) > 0:
return False
for common_dir in dirs_cmp.common_dirs:
new_dir1 = os.path.join(dir1, common_dir)
new_dir2 = os.path.join(dir2, common_dir)
if not are_dir_trees_equal(new_dir1, new_dir2):
return False
return True
def replace_content(folder, file_name, tag, tag_content):
""" Replace content from folder/file_name of tag with tag content."""
file_path = os.path.join(folder, file_name)
content = read(file_path)
content = content.replace(tag, tag_content)
return write(file_path, content)<|fim▁end|> | if not dst_name: |
<|file_name|>PagerTrigger.java<|end_file_name|><|fim▁begin|>package com.veaer.glass.viewpager;
import android.support.v4.view.ViewPager;
import com.veaer.glass.trigger.Trigger;
/**
* Created by Veaer on 15/11/18.
*/
public class PagerTrigger extends Trigger implements ViewPager.OnPageChangeListener {
private ColorProvider colorProvider;
private int startPosition, endPosition, maxLimit;
public static Trigger addTrigger(ViewPager viewPager, ColorProvider colorProvider) {
PagerTrigger viewPagerTrigger = new PagerTrigger(colorProvider);
viewPager.addOnPageChangeListener(viewPagerTrigger);
viewPagerTrigger.onPageSelected(0);<|fim▁hole|> PagerTrigger(ColorProvider colorProvider) {
this.colorProvider = colorProvider;
maxLimit = colorProvider.getCount() - 1;
}
@Override
public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) {
if (isScrollingRight(position)) {
startPosition = position;
endPosition = Math.min(maxLimit, position + 1);
} else {
startPosition = Math.min(maxLimit, position + 1);
endPosition = position;
}
initColorGenerator();
setColor(ColorGenerator.getColor(position, positionOffset));
}
@Override
public void onPageScrollStateChanged(int state) {
//do nothing
}
@Override
public void onPageSelected(int position) {
endPosition = position;
startPosition = position;
initColorGenerator();
}
private boolean isScrollingRight(int position) {
return position == startPosition;
}
private void initColorGenerator() {
ColorGenerator.init(startPosition, endPosition, colorProvider);
}
}<|fim▁end|> | return viewPagerTrigger;
}
|
<|file_name|>yui2-button-debug.js<|end_file_name|><|fim▁begin|>YUI.add('yui2-button', function(Y) {
var YAHOO = Y.YUI2;
/*
Copyright (c) 2010, Yahoo! Inc. All rights reserved.
Code licensed under the BSD License:
http://developer.yahoo.com/yui/license.html
version: 2.8.2r1
*/
/**
* @module button
* @description <p>The Button Control enables the creation of rich, graphical
* buttons that function like traditional HTML form buttons. <em>Unlike</em>
* traditional HTML form buttons, buttons created with the Button Control can have
* a label that is different from its value. With the inclusion of the optional
* <a href="module_menu.html">Menu Control</a>, the Button Control can also be
* used to create menu buttons and split buttons, controls that are not
* available natively in HTML. The Button Control can also be thought of as a
* way to create more visually engaging implementations of the browser's
* default radio-button and check-box controls.</p>
* <p>The Button Control supports the following types:</p>
* <dl>
* <dt>push</dt>
* <dd>Basic push button that can execute a user-specified command when
* pressed.</dd>
* <dt>link</dt>
* <dd>Navigates to a specified url when pressed.</dd>
* <dt>submit</dt>
* <dd>Submits the parent form when pressed.</dd>
* <dt>reset</dt>
* <dd>Resets the parent form when pressed.</dd>
* <dt>checkbox</dt>
* <dd>Maintains a "checked" state that can be toggled on and off.</dd>
* <dt>radio</dt>
* <dd>Maintains a "checked" state that can be toggled on and off. Use with
* the ButtonGroup class to create a set of controls that are mutually
* exclusive; checking one button in the set will uncheck all others in
* the group.</dd>
* <dt>menu</dt>
* <dd>When pressed will show/hide a menu.</dd>
* <dt>split</dt>
* <dd>Can execute a user-specified command or display a menu when pressed.</dd>
* </dl>
* @title Button
* @namespace YAHOO.widget
* @requires yahoo, dom, element, event
* @optional container, menu
*/
(function () {
/**
* The Button class creates a rich, graphical button.
* @param {String} p_oElement String specifying the id attribute of the
* <code><input></code>, <code><button></code>,
* <code><a></code>, or <code><span></code> element to
* be used to create the button.
* @param {<a href="http://www.w3.org/TR/2000/WD-DOM-Level-1-20000929/level-
* one-html.html#ID-6043025">HTMLInputElement</a>|<a href="http://www.w3.org
* /TR/2000/WD-DOM-Level-1-20000929/level-one-html.html#ID-34812697">
* HTMLButtonElement</a>|<a href="
* http://www.w3.org/TR/2000/WD-DOM-Level-1-20000929/level-one-html.html#
* ID-33759296">HTMLElement</a>} p_oElement Object reference for the
* <code><input></code>, <code><button></code>,
* <code><a></code>, or <code><span></code> element to be
* used to create the button.
* @param {Object} p_oElement Object literal specifying a set of
* configuration attributes used to create the button.
* @param {Object} p_oAttributes Optional. Object literal specifying a set
* of configuration attributes used to create the button.
* @namespace YAHOO.widget
* @class Button
* @constructor
* @extends YAHOO.util.Element
*/
// Shorthard for utilities
var Dom = YAHOO.util.Dom,
Event = YAHOO.util.Event,
Lang = YAHOO.lang,
UA = YAHOO.env.ua,
Overlay = YAHOO.widget.Overlay,
Menu = YAHOO.widget.Menu,
// Private member variables
m_oButtons = {}, // Collection of all Button instances
m_oOverlayManager = null, // YAHOO.widget.OverlayManager instance
m_oSubmitTrigger = null, // The button that submitted the form
m_oFocusedButton = null; // The button that has focus
// Private methods
/**
* @method createInputElement
* @description Creates an <code><input></code> element of the
* specified type.
* @private
* @param {String} p_sType String specifying the type of
* <code><input></code> element to create.
* @param {String} p_sName String specifying the name of
* <code><input></code> element to create.
* @param {String} p_sValue String specifying the value of
* <code><input></code> element to create.
* @param {String} p_bChecked Boolean specifying if the
* <code><input></code> element is to be checked.
* @return {<a href="http://www.w3.org/TR/2000/WD-DOM-Level-1-20000929/level-
* one-html.html#ID-6043025">HTMLInputElement</a>}
*/
function createInputElement(p_sType, p_sName, p_sValue, p_bChecked) {
var oInput,
sInput;
if (Lang.isString(p_sType) && Lang.isString(p_sName)) {
if (UA.ie) {
/*
For IE it is necessary to create the element with the
"type," "name," "value," and "checked" properties set all
at once.
*/
sInput = "<input type=\"" + p_sType + "\" name=\"" +
p_sName + "\"";
if (p_bChecked) {
sInput += " checked";
}
sInput += ">";
oInput = document.createElement(sInput);
}
else {
oInput = document.createElement("input");
oInput.name = p_sName;
oInput.type = p_sType;
if (p_bChecked) {
oInput.checked = true;
}
}
oInput.value = p_sValue;
}
return oInput;
}
/**
* @method setAttributesFromSrcElement
* @description Gets the values for all the attributes of the source element
* (either <code><input></code> or <code><a></code>) that
* map to Button configuration attributes and sets them into a collection
* that is passed to the Button constructor.
* @private
* @param {<a href="http://www.w3.org/TR/2000/WD-DOM-Level-1-20000929/level-
* one-html.html#ID-6043025">HTMLInputElement</a>|<a href="http://www.w3.org/
* TR/2000/WD-DOM-Level-1-20000929/level-one-html.html#ID-
* 48250443">HTMLAnchorElement</a>} p_oElement Object reference to the HTML
* element (either <code><input></code> or <code><span>
* </code>) used to create the button.
* @param {Object} p_oAttributes Object reference for the collection of
* configuration attributes used to create the button.
*/
function setAttributesFromSrcElement(p_oElement, p_oAttributes) {
var sSrcElementNodeName = p_oElement.nodeName.toUpperCase(),
sClass = (this.CLASS_NAME_PREFIX + this.CSS_CLASS_NAME),
me = this,
oAttribute,
oRootNode,
sText;
/**
* @method setAttributeFromDOMAttribute
* @description Gets the value of the specified DOM attribute and sets it
* into the collection of configuration attributes used to configure
* the button.
* @private
* @param {String} p_sAttribute String representing the name of the
* attribute to retrieve from the DOM element.
*/
function setAttributeFromDOMAttribute(p_sAttribute) {
if (!(p_sAttribute in p_oAttributes)) {
/*
Need to use "getAttributeNode" instead of "getAttribute"
because using "getAttribute," IE will return the innerText
of a <code><button></code> for the value attribute
rather than the value of the "value" attribute.
*/
oAttribute = p_oElement.getAttributeNode(p_sAttribute);
if (oAttribute && ("value" in oAttribute)) {
YAHOO.log("Setting attribute \"" + p_sAttribute +
"\" using source element's attribute value of \"" +
oAttribute.value + "\"", "info", me.toString());
p_oAttributes[p_sAttribute] = oAttribute.value;
}
}
}
/**
* @method setFormElementProperties
* @description Gets the value of the attributes from the form element
* and sets them into the collection of configuration attributes used to
* configure the button.
* @private
*/
function setFormElementProperties() {
setAttributeFromDOMAttribute("type");
if (p_oAttributes.type == "button") {
p_oAttributes.type = "push";
}
if (!("disabled" in p_oAttributes)) {
p_oAttributes.disabled = p_oElement.disabled;
}
setAttributeFromDOMAttribute("name");
setAttributeFromDOMAttribute("value");
setAttributeFromDOMAttribute("title");
}
switch (sSrcElementNodeName) {
case "A":
p_oAttributes.type = "link";
setAttributeFromDOMAttribute("href");
setAttributeFromDOMAttribute("target");
break;
case "INPUT":
setFormElementProperties();
if (!("checked" in p_oAttributes)) {
p_oAttributes.checked = p_oElement.checked;
}
break;
case "BUTTON":
setFormElementProperties();
oRootNode = p_oElement.parentNode.parentNode;
if (Dom.hasClass(oRootNode, sClass + "-checked")) {
p_oAttributes.checked = true;
}
if (Dom.hasClass(oRootNode, sClass + "-disabled")) {
p_oAttributes.disabled = true;
}
p_oElement.removeAttribute("value");
p_oElement.setAttribute("type", "button");
break;
}
p_oElement.removeAttribute("id");
p_oElement.removeAttribute("name");
if (!("tabindex" in p_oAttributes)) {
p_oAttributes.tabindex = p_oElement.tabIndex;
}
if (!("label" in p_oAttributes)) {
// Set the "label" property
sText = sSrcElementNodeName == "INPUT" ?
p_oElement.value : p_oElement.innerHTML;
if (sText && sText.length > 0) {
p_oAttributes.label = sText;
}
}
}
/**
* @method initConfig
* @description Initializes the set of configuration attributes that are
* used to instantiate the button.
* @private
* @param {Object} Object representing the button's set of
* configuration attributes.
*/
function initConfig(p_oConfig) {
var oAttributes = p_oConfig.attributes,
oSrcElement = oAttributes.srcelement,
sSrcElementNodeName = oSrcElement.nodeName.toUpperCase(),
me = this;
if (sSrcElementNodeName == this.NODE_NAME) {
p_oConfig.element = oSrcElement;
p_oConfig.id = oSrcElement.id;
Dom.getElementsBy(function (p_oElement) {
switch (p_oElement.nodeName.toUpperCase()) {
case "BUTTON":
case "A":
case "INPUT":
setAttributesFromSrcElement.call(me, p_oElement,
oAttributes);
break;
}
}, "*", oSrcElement);
}
else {
switch (sSrcElementNodeName) {
case "BUTTON":
case "A":
case "INPUT":
setAttributesFromSrcElement.call(this, oSrcElement,
oAttributes);
break;
}
}
}
// Constructor
YAHOO.widget.Button = function (p_oElement, p_oAttributes) {
if (!Overlay && YAHOO.widget.Overlay) {
Overlay = YAHOO.widget.Overlay;
}
if (!Menu && YAHOO.widget.Menu) {
Menu = YAHOO.widget.Menu;
}
var fnSuperClass = YAHOO.widget.Button.superclass.constructor,
oConfig,
oElement;
if (arguments.length == 1 && !Lang.isString(p_oElement) && !p_oElement.nodeName) {
if (!p_oElement.id) {
p_oElement.id = Dom.generateId();
YAHOO.log("No value specified for the button's \"id\" " +
"attribute. Setting button id to \"" + p_oElement.id +
"\".", "info", this.toString());
}
YAHOO.log("No source HTML element. Building the button " +
"using the set of configuration attributes.", "info", this.toString());
fnSuperClass.call(this, (this.createButtonElement(p_oElement.type)), p_oElement);
}
else {
oConfig = { element: null, attributes: (p_oAttributes || {}) };
if (Lang.isString(p_oElement)) {
oElement = Dom.get(p_oElement);
if (oElement) {
if (!oConfig.attributes.id) {
oConfig.attributes.id = p_oElement;
}
YAHOO.log("Building the button using an existing " +
"HTML element as a source element.", "info", this.toString());
oConfig.attributes.srcelement = oElement;
initConfig.call(this, oConfig);
if (!oConfig.element) {
YAHOO.log("Source element could not be used " +
"as is. Creating a new HTML element for " +
"the button.", "info", this.toString());
oConfig.element = this.createButtonElement(oConfig.attributes.type);
}
fnSuperClass.call(this, oConfig.element, oConfig.attributes);
}
}
else if (p_oElement.nodeName) {
if (!oConfig.attributes.id) {
if (p_oElement.id) {
oConfig.attributes.id = p_oElement.id;
}
else {
oConfig.attributes.id = Dom.generateId();
YAHOO.log("No value specified for the button's " +
"\"id\" attribute. Setting button id to \"" +
oConfig.attributes.id + "\".", "info", this.toString());
}
}
YAHOO.log("Building the button using an existing HTML " +
"element as a source element.", "info", this.toString());
oConfig.attributes.srcelement = p_oElement;
initConfig.call(this, oConfig);
if (!oConfig.element) {
YAHOO.log("Source element could not be used as is." +
" Creating a new HTML element for the button.",
"info", this.toString());
oConfig.element = this.createButtonElement(oConfig.attributes.type);
}
fnSuperClass.call(this, oConfig.element, oConfig.attributes);
}
}
};
YAHOO.extend(YAHOO.widget.Button, YAHOO.util.Element, {
// Protected properties
/**
* @property _button
* @description Object reference to the button's internal
* <code><a></code> or <code><button></code> element.
* @default null
* @protected
* @type <a href="http://www.w3.org/TR/2000/WD-DOM-Level-1-20000929/
* level-one-html.html#ID-48250443">HTMLAnchorElement</a>|<a href="
* http://www.w3.org/TR/2000/WD-DOM-Level-1-20000929/level-one-html.html
* #ID-34812697">HTMLButtonElement</a>
*/
_button: null,
/**
* @property _menu
* @description Object reference to the button's menu.
* @default null
* @protected
* @type {<a href="YAHOO.widget.Overlay.html">YAHOO.widget.Overlay</a>|
* <a href="YAHOO.widget.Menu.html">YAHOO.widget.Menu</a>}
*/
_menu: null,
/**
* @property _hiddenFields
* @description Object reference to the <code><input></code>
* element, or array of HTML form elements used to represent the button
* when its parent form is submitted.
* @default null
* @protected
* @type <a href="http://www.w3.org/TR/2000/WD-DOM-Level-1-20000929/
* level-one-html.html#ID-6043025">HTMLInputElement</a>|Array
*/
_hiddenFields: null,
/**
* @property _onclickAttributeValue
* @description Object reference to the button's current value for the
* "onclick" configuration attribute.
* @default null
* @protected
* @type Object
*/
_onclickAttributeValue: null,
/**
* @property _activationKeyPressed
* @description Boolean indicating if the key(s) that toggle the button's
* "active" state have been pressed.
* @default false
* @protected
* @type Boolean
*/
_activationKeyPressed: false,
/**
* @property _activationButtonPressed
* @description Boolean indicating if the mouse button that toggles
* the button's "active" state has been pressed.
* @default false
* @protected
* @type Boolean
*/
_activationButtonPressed: false,
/**
* @property _hasKeyEventHandlers
* @description Boolean indicating if the button's "blur", "keydown" and
* "keyup" event handlers are assigned
* @default false
* @protected
* @type Boolean
*/
_hasKeyEventHandlers: false,
/**
* @property _hasMouseEventHandlers
* @description Boolean indicating if the button's "mouseout,"
* "mousedown," and "mouseup" event handlers are assigned
* @default false
* @protected
* @type Boolean
*/
_hasMouseEventHandlers: false,
/**
* @property _nOptionRegionX
* @description Number representing the X coordinate of the leftmost edge of the Button's
* option region. Applies only to Buttons of type "split".
* @default 0
* @protected
* @type Number
*/
_nOptionRegionX: 0,
// Constants
/**
* @property CLASS_NAME_PREFIX
* @description Prefix used for all class names applied to a Button.
* @default "yui-"
* @final
* @type String
*/
CLASS_NAME_PREFIX: "yui-",
/**
* @property NODE_NAME
* @description The name of the node to be used for the button's
* root element.
* @default "SPAN"
* @final
* @type String
*/
NODE_NAME: "SPAN",
/**
* @property CHECK_ACTIVATION_KEYS
* @description Array of numbers representing keys that (when pressed)
* toggle the button's "checked" attribute.
* @default [32]
* @final
* @type Array
*/
CHECK_ACTIVATION_KEYS: [32],
/**
* @property ACTIVATION_KEYS
* @description Array of numbers representing keys that (when presed)
* toggle the button's "active" state.
* @default [13, 32]
* @final
* @type Array
*/
ACTIVATION_KEYS: [13, 32],
/**
* @property OPTION_AREA_WIDTH
* @description Width (in pixels) of the area of a split button that
* when pressed will display a menu.
* @default 20
* @final
* @type Number
*/
OPTION_AREA_WIDTH: 20,
/**
* @property CSS_CLASS_NAME
* @description String representing the CSS class(es) to be applied to
* the button's root element.
* @default "button"
* @final
* @type String
*/
CSS_CLASS_NAME: "button",
// Protected attribute setter methods
/**
* @method _setType
* @description Sets the value of the button's "type" attribute.
* @protected
* @param {String} p_sType String indicating the value for the button's
* "type" attribute.
*/
_setType: function (p_sType) {<|fim▁hole|> if (p_sType == "split") {
this.on("option", this._onOption);
}
},
/**
* @method _setLabel
* @description Sets the value of the button's "label" attribute.
* @protected
* @param {String} p_sLabel String indicating the value for the button's
* "label" attribute.
*/
_setLabel: function (p_sLabel) {
this._button.innerHTML = p_sLabel;
/*
Remove and add the default class name from the root element
for Gecko to ensure that the button shrinkwraps to the label.
Without this the button will not be rendered at the correct
width when the label changes. The most likely cause for this
bug is button's use of the Gecko-specific CSS display type of
"-moz-inline-box" to simulate "inline-block" supported by IE,
Safari and Opera.
*/
var sClass,
nGeckoVersion = UA.gecko;
if (nGeckoVersion && nGeckoVersion < 1.9 && Dom.inDocument(this.get("element"))) {
sClass = (this.CLASS_NAME_PREFIX + this.CSS_CLASS_NAME);
this.removeClass(sClass);
Lang.later(0, this, this.addClass, sClass);
}
},
/**
* @method _setTabIndex
* @description Sets the value of the button's "tabindex" attribute.
* @protected
* @param {Number} p_nTabIndex Number indicating the value for the
* button's "tabindex" attribute.
*/
_setTabIndex: function (p_nTabIndex) {
this._button.tabIndex = p_nTabIndex;
},
/**
* @method _setTitle
* @description Sets the value of the button's "title" attribute.
* @protected
* @param {String} p_nTabIndex Number indicating the value for
* the button's "title" attribute.
*/
_setTitle: function (p_sTitle) {
if (this.get("type") != "link") {
this._button.title = p_sTitle;
}
},
/**
* @method _setDisabled
* @description Sets the value of the button's "disabled" attribute.
* @protected
* @param {Boolean} p_bDisabled Boolean indicating the value for
* the button's "disabled" attribute.
*/
_setDisabled: function (p_bDisabled) {
if (this.get("type") != "link") {
if (p_bDisabled) {
if (this._menu) {
this._menu.hide();
}
if (this.hasFocus()) {
this.blur();
}
this._button.setAttribute("disabled", "disabled");
this.addStateCSSClasses("disabled");
this.removeStateCSSClasses("hover");
this.removeStateCSSClasses("active");
this.removeStateCSSClasses("focus");
}
else {
this._button.removeAttribute("disabled");
this.removeStateCSSClasses("disabled");
}
}
},
/**
* @method _setHref
* @description Sets the value of the button's "href" attribute.
* @protected
* @param {String} p_sHref String indicating the value for the button's
* "href" attribute.
*/
_setHref: function (p_sHref) {
if (this.get("type") == "link") {
this._button.href = p_sHref;
}
},
/**
* @method _setTarget
* @description Sets the value of the button's "target" attribute.
* @protected
* @param {String} p_sTarget String indicating the value for the button's
* "target" attribute.
*/
_setTarget: function (p_sTarget) {
if (this.get("type") == "link") {
this._button.setAttribute("target", p_sTarget);
}
},
/**
* @method _setChecked
* @description Sets the value of the button's "target" attribute.
* @protected
* @param {Boolean} p_bChecked Boolean indicating the value for
* the button's "checked" attribute.
*/
_setChecked: function (p_bChecked) {
var sType = this.get("type");
if (sType == "checkbox" || sType == "radio") {
if (p_bChecked) {
this.addStateCSSClasses("checked");
}
else {
this.removeStateCSSClasses("checked");
}
}
},
/**
* @method _setMenu
* @description Sets the value of the button's "menu" attribute.
* @protected
* @param {Object} p_oMenu Object indicating the value for the button's
* "menu" attribute.
*/
_setMenu: function (p_oMenu) {
var bLazyLoad = this.get("lazyloadmenu"),
oButtonElement = this.get("element"),
sMenuCSSClassName,
/*
Boolean indicating if the value of p_oMenu is an instance
of YAHOO.widget.Menu or YAHOO.widget.Overlay.
*/
bInstance = false,
oMenu,
oMenuElement,
oSrcElement;
function onAppendTo() {
oMenu.render(oButtonElement.parentNode);
this.removeListener("appendTo", onAppendTo);
}
function setMenuContainer() {
oMenu.cfg.queueProperty("container", oButtonElement.parentNode);
this.removeListener("appendTo", setMenuContainer);
}
function initMenu() {
var oContainer;
if (oMenu) {
Dom.addClass(oMenu.element, this.get("menuclassname"));
Dom.addClass(oMenu.element, this.CLASS_NAME_PREFIX + this.get("type") + "-button-menu");
oMenu.showEvent.subscribe(this._onMenuShow, null, this);
oMenu.hideEvent.subscribe(this._onMenuHide, null, this);
oMenu.renderEvent.subscribe(this._onMenuRender, null, this);
if (Menu && oMenu instanceof Menu) {
if (bLazyLoad) {
oContainer = this.get("container");
if (oContainer) {
oMenu.cfg.queueProperty("container", oContainer);
}
else {
this.on("appendTo", setMenuContainer);
}
}
oMenu.cfg.queueProperty("clicktohide", false);
oMenu.keyDownEvent.subscribe(this._onMenuKeyDown, this, true);
oMenu.subscribe("click", this._onMenuClick, this, true);
this.on("selectedMenuItemChange", this._onSelectedMenuItemChange);
oSrcElement = oMenu.srcElement;
if (oSrcElement && oSrcElement.nodeName.toUpperCase() == "SELECT") {
oSrcElement.style.display = "none";
oSrcElement.parentNode.removeChild(oSrcElement);
}
}
else if (Overlay && oMenu instanceof Overlay) {
if (!m_oOverlayManager) {
m_oOverlayManager = new YAHOO.widget.OverlayManager();
}
m_oOverlayManager.register(oMenu);
}
this._menu = oMenu;
if (!bInstance && !bLazyLoad) {
if (Dom.inDocument(oButtonElement)) {
oMenu.render(oButtonElement.parentNode);
}
else {
this.on("appendTo", onAppendTo);
}
}
}
}
if (Overlay) {
if (Menu) {
sMenuCSSClassName = Menu.prototype.CSS_CLASS_NAME;
}
if (p_oMenu && Menu && (p_oMenu instanceof Menu)) {
oMenu = p_oMenu;
bInstance = true;
initMenu.call(this);
}
else if (Overlay && p_oMenu && (p_oMenu instanceof Overlay)) {
oMenu = p_oMenu;
bInstance = true;
oMenu.cfg.queueProperty("visible", false);
initMenu.call(this);
}
else if (Menu && Lang.isArray(p_oMenu)) {
oMenu = new Menu(Dom.generateId(), { lazyload: bLazyLoad, itemdata: p_oMenu });
this._menu = oMenu;
this.on("appendTo", initMenu);
}
else if (Lang.isString(p_oMenu)) {
oMenuElement = Dom.get(p_oMenu);
if (oMenuElement) {
if (Menu && Dom.hasClass(oMenuElement, sMenuCSSClassName) ||
oMenuElement.nodeName.toUpperCase() == "SELECT") {
oMenu = new Menu(p_oMenu, { lazyload: bLazyLoad });
initMenu.call(this);
}
else if (Overlay) {
oMenu = new Overlay(p_oMenu, { visible: false });
initMenu.call(this);
}
}
}
else if (p_oMenu && p_oMenu.nodeName) {
if (Menu && Dom.hasClass(p_oMenu, sMenuCSSClassName) ||
p_oMenu.nodeName.toUpperCase() == "SELECT") {
oMenu = new Menu(p_oMenu, { lazyload: bLazyLoad });
initMenu.call(this);
}
else if (Overlay) {
if (!p_oMenu.id) {
Dom.generateId(p_oMenu);
}
oMenu = new Overlay(p_oMenu, { visible: false });
initMenu.call(this);
}
}
}
},
/**
* @method _setOnClick
* @description Sets the value of the button's "onclick" attribute.
* @protected
* @param {Object} p_oObject Object indicating the value for the button's
* "onclick" attribute.
*/
_setOnClick: function (p_oObject) {
/*
Remove any existing listeners if a "click" event handler
has already been specified.
*/
if (this._onclickAttributeValue &&
(this._onclickAttributeValue != p_oObject)) {
this.removeListener("click", this._onclickAttributeValue.fn);
this._onclickAttributeValue = null;
}
if (!this._onclickAttributeValue &&
Lang.isObject(p_oObject) &&
Lang.isFunction(p_oObject.fn)) {
this.on("click", p_oObject.fn, p_oObject.obj, p_oObject.scope);
this._onclickAttributeValue = p_oObject;
}
},
// Protected methods
/**
* @method _isActivationKey
* @description Determines if the specified keycode is one that toggles
* the button's "active" state.
* @protected
* @param {Number} p_nKeyCode Number representing the keycode to
* be evaluated.
* @return {Boolean}
*/
_isActivationKey: function (p_nKeyCode) {
var sType = this.get("type"),
aKeyCodes = (sType == "checkbox" || sType == "radio") ?
this.CHECK_ACTIVATION_KEYS : this.ACTIVATION_KEYS,
nKeyCodes = aKeyCodes.length,
bReturnVal = false,
i;
if (nKeyCodes > 0) {
i = nKeyCodes - 1;
do {
if (p_nKeyCode == aKeyCodes[i]) {
bReturnVal = true;
break;
}
}
while (i--);
}
return bReturnVal;
},
/**
* @method _isSplitButtonOptionKey
* @description Determines if the specified keycode is one that toggles
* the display of the split button's menu.
* @protected
* @param {Event} p_oEvent Object representing the DOM event object
* passed back by the event utility (YAHOO.util.Event).
* @return {Boolean}
*/
_isSplitButtonOptionKey: function (p_oEvent) {
var bShowMenu = (Event.getCharCode(p_oEvent) == 40);
var onKeyPress = function (p_oEvent) {
Event.preventDefault(p_oEvent);
this.removeListener("keypress", onKeyPress);
};
// Prevent the browser from scrolling the window
if (bShowMenu) {
if (UA.opera) {
this.on("keypress", onKeyPress);
}
Event.preventDefault(p_oEvent);
}
return bShowMenu;
},
/**
* @method _addListenersToForm
* @description Adds event handlers to the button's form.
* @protected
*/
_addListenersToForm: function () {
var oForm = this.getForm(),
onFormKeyPress = YAHOO.widget.Button.onFormKeyPress,
bHasKeyPressListener,
oSrcElement,
aListeners,
nListeners,
i;
if (oForm) {
Event.on(oForm, "reset", this._onFormReset, null, this);
Event.on(oForm, "submit", this._onFormSubmit, null, this);
oSrcElement = this.get("srcelement");
if (this.get("type") == "submit" ||
(oSrcElement && oSrcElement.type == "submit"))
{
aListeners = Event.getListeners(oForm, "keypress");
bHasKeyPressListener = false;
if (aListeners) {
nListeners = aListeners.length;
if (nListeners > 0) {
i = nListeners - 1;
do {
if (aListeners[i].fn == onFormKeyPress) {
bHasKeyPressListener = true;
break;
}
}
while (i--);
}
}
if (!bHasKeyPressListener) {
Event.on(oForm, "keypress", onFormKeyPress);
}
}
}
},
/**
* @method _showMenu
* @description Shows the button's menu.
* @protected
* @param {Event} p_oEvent Object representing the DOM event object
* passed back by the event utility (YAHOO.util.Event) that triggered
* the display of the menu.
*/
_showMenu: function (p_oEvent) {
if (YAHOO.widget.MenuManager) {
YAHOO.widget.MenuManager.hideVisible();
}
if (m_oOverlayManager) {
m_oOverlayManager.hideAll();
}
var oMenu = this._menu,
aMenuAlignment = this.get("menualignment"),
bFocusMenu = this.get("focusmenu"),
fnFocusMethod;
if (this._renderedMenu) {
oMenu.cfg.setProperty("context",
[this.get("element"), aMenuAlignment[0], aMenuAlignment[1]]);
oMenu.cfg.setProperty("preventcontextoverlap", true);
oMenu.cfg.setProperty("constraintoviewport", true);
}
else {
oMenu.cfg.queueProperty("context",
[this.get("element"), aMenuAlignment[0], aMenuAlignment[1]]);
oMenu.cfg.queueProperty("preventcontextoverlap", true);
oMenu.cfg.queueProperty("constraintoviewport", true);
}
/*
Refocus the Button before showing its Menu in case the call to
YAHOO.widget.MenuManager.hideVisible() resulted in another element in the
DOM being focused after another Menu was hidden.
*/
this.focus();
if (Menu && oMenu && (oMenu instanceof Menu)) {
// Since Menus automatically focus themselves when made visible, temporarily
// replace the Menu focus method so that the value of the Button's "focusmenu"
// attribute determines if the Menu should be focus when made visible.
fnFocusMethod = oMenu.focus;
oMenu.focus = function () {};
if (this._renderedMenu) {
oMenu.cfg.setProperty("minscrollheight", this.get("menuminscrollheight"));
oMenu.cfg.setProperty("maxheight", this.get("menumaxheight"));
}
else {
oMenu.cfg.queueProperty("minscrollheight", this.get("menuminscrollheight"));
oMenu.cfg.queueProperty("maxheight", this.get("menumaxheight"));
}
oMenu.show();
oMenu.focus = fnFocusMethod;
oMenu.align();
/*
Stop the propagation of the event so that the MenuManager
doesn't blur the menu after it gets focus.
*/
if (p_oEvent.type == "mousedown") {
Event.stopPropagation(p_oEvent);
}
if (bFocusMenu) {
oMenu.focus();
}
}
else if (Overlay && oMenu && (oMenu instanceof Overlay)) {
if (!this._renderedMenu) {
oMenu.render(this.get("element").parentNode);
}
oMenu.show();
oMenu.align();
}
},
/**
* @method _hideMenu
* @description Hides the button's menu.
* @protected
*/
_hideMenu: function () {
var oMenu = this._menu;
if (oMenu) {
oMenu.hide();
}
},
// Protected event handlers
/**
* @method _onMouseOver
* @description "mouseover" event handler for the button.
* @protected
* @param {Event} p_oEvent Object representing the DOM event object
* passed back by the event utility (YAHOO.util.Event).
*/
_onMouseOver: function (p_oEvent) {
var sType = this.get("type"),
oElement,
nOptionRegionX;
if (sType === "split") {
oElement = this.get("element");
nOptionRegionX =
(Dom.getX(oElement) + (oElement.offsetWidth - this.OPTION_AREA_WIDTH));
this._nOptionRegionX = nOptionRegionX;
}
if (!this._hasMouseEventHandlers) {
if (sType === "split") {
this.on("mousemove", this._onMouseMove);
}
this.on("mouseout", this._onMouseOut);
this._hasMouseEventHandlers = true;
}
this.addStateCSSClasses("hover");
if (sType === "split" && (Event.getPageX(p_oEvent) > nOptionRegionX)) {
this.addStateCSSClasses("hoveroption");
}
if (this._activationButtonPressed) {
this.addStateCSSClasses("active");
}
if (this._bOptionPressed) {
this.addStateCSSClasses("activeoption");
}
if (this._activationButtonPressed || this._bOptionPressed) {
Event.removeListener(document, "mouseup", this._onDocumentMouseUp);
}
},
/**
* @method _onMouseMove
* @description "mousemove" event handler for the button.
* @protected
* @param {Event} p_oEvent Object representing the DOM event object
* passed back by the event utility (YAHOO.util.Event).
*/
_onMouseMove: function (p_oEvent) {
var nOptionRegionX = this._nOptionRegionX;
if (nOptionRegionX) {
if (Event.getPageX(p_oEvent) > nOptionRegionX) {
this.addStateCSSClasses("hoveroption");
}
else {
this.removeStateCSSClasses("hoveroption");
}
}
},
/**
* @method _onMouseOut
* @description "mouseout" event handler for the button.
* @protected
* @param {Event} p_oEvent Object representing the DOM event object
* passed back by the event utility (YAHOO.util.Event).
*/
_onMouseOut: function (p_oEvent) {
var sType = this.get("type");
this.removeStateCSSClasses("hover");
if (sType != "menu") {
this.removeStateCSSClasses("active");
}
if (this._activationButtonPressed || this._bOptionPressed) {
Event.on(document, "mouseup", this._onDocumentMouseUp, null, this);
}
if (sType === "split" && (Event.getPageX(p_oEvent) > this._nOptionRegionX)) {
this.removeStateCSSClasses("hoveroption");
}
},
/**
* @method _onDocumentMouseUp
* @description "mouseup" event handler for the button.
* @protected
* @param {Event} p_oEvent Object representing the DOM event object
* passed back by the event utility (YAHOO.util.Event).
*/
_onDocumentMouseUp: function (p_oEvent) {
this._activationButtonPressed = false;
this._bOptionPressed = false;
var sType = this.get("type"),
oTarget,
oMenuElement;
if (sType == "menu" || sType == "split") {
oTarget = Event.getTarget(p_oEvent);
oMenuElement = this._menu.element;
if (oTarget != oMenuElement &&
!Dom.isAncestor(oMenuElement, oTarget)) {
this.removeStateCSSClasses((sType == "menu" ?
"active" : "activeoption"));
this._hideMenu();
}
}
Event.removeListener(document, "mouseup", this._onDocumentMouseUp);
},
/**
* @method _onMouseDown
* @description "mousedown" event handler for the button.
* @protected
* @param {Event} p_oEvent Object representing the DOM event object
* passed back by the event utility (YAHOO.util.Event).
*/
_onMouseDown: function (p_oEvent) {
var sType,
bReturnVal = true;
function onMouseUp() {
this._hideMenu();
this.removeListener("mouseup", onMouseUp);
}
if ((p_oEvent.which || p_oEvent.button) == 1) {
if (!this.hasFocus()) {
this.focus();
}
sType = this.get("type");
if (sType == "split") {
if (Event.getPageX(p_oEvent) > this._nOptionRegionX) {
this.fireEvent("option", p_oEvent);
bReturnVal = false;
}
else {
this.addStateCSSClasses("active");
this._activationButtonPressed = true;
}
}
else if (sType == "menu") {
if (this.isActive()) {
this._hideMenu();
this._activationButtonPressed = false;
}
else {
this._showMenu(p_oEvent);
this._activationButtonPressed = true;
}
}
else {
this.addStateCSSClasses("active");
this._activationButtonPressed = true;
}
if (sType == "split" || sType == "menu") {
this._hideMenuTimer = Lang.later(250, this, this.on, ["mouseup", onMouseUp]);
}
}
return bReturnVal;
},
/**
* @method _onMouseUp
* @description "mouseup" event handler for the button.
* @protected
* @param {Event} p_oEvent Object representing the DOM event object
* passed back by the event utility (YAHOO.util.Event).
*/
_onMouseUp: function (p_oEvent) {
var sType = this.get("type"),
oHideMenuTimer = this._hideMenuTimer,
bReturnVal = true;
if (oHideMenuTimer) {
oHideMenuTimer.cancel();
}
if (sType == "checkbox" || sType == "radio") {
this.set("checked", !(this.get("checked")));
}
this._activationButtonPressed = false;
if (sType != "menu") {
this.removeStateCSSClasses("active");
}
if (sType == "split" && Event.getPageX(p_oEvent) > this._nOptionRegionX) {
bReturnVal = false;
}
return bReturnVal;
},
/**
* @method _onFocus
* @description "focus" event handler for the button.
* @protected
* @param {Event} p_oEvent Object representing the DOM event object
* passed back by the event utility (YAHOO.util.Event).
*/
_onFocus: function (p_oEvent) {
var oElement;
this.addStateCSSClasses("focus");
if (this._activationKeyPressed) {
this.addStateCSSClasses("active");
}
m_oFocusedButton = this;
if (!this._hasKeyEventHandlers) {
oElement = this._button;
Event.on(oElement, "blur", this._onBlur, null, this);
Event.on(oElement, "keydown", this._onKeyDown, null, this);
Event.on(oElement, "keyup", this._onKeyUp, null, this);
this._hasKeyEventHandlers = true;
}
this.fireEvent("focus", p_oEvent);
},
/**
* @method _onBlur
* @description "blur" event handler for the button.
* @protected
* @param {Event} p_oEvent Object representing the DOM event object
* passed back by the event utility (YAHOO.util.Event).
*/
_onBlur: function (p_oEvent) {
this.removeStateCSSClasses("focus");
if (this.get("type") != "menu") {
this.removeStateCSSClasses("active");
}
if (this._activationKeyPressed) {
Event.on(document, "keyup", this._onDocumentKeyUp, null, this);
}
m_oFocusedButton = null;
this.fireEvent("blur", p_oEvent);
},
/**
* @method _onDocumentKeyUp
* @description "keyup" event handler for the document.
* @protected
* @param {Event} p_oEvent Object representing the DOM event object
* passed back by the event utility (YAHOO.util.Event).
*/
_onDocumentKeyUp: function (p_oEvent) {
if (this._isActivationKey(Event.getCharCode(p_oEvent))) {
this._activationKeyPressed = false;
Event.removeListener(document, "keyup", this._onDocumentKeyUp);
}
},
/**
* @method _onKeyDown
* @description "keydown" event handler for the button.
* @protected
* @param {Event} p_oEvent Object representing the DOM event object
* passed back by the event utility (YAHOO.util.Event).
*/
_onKeyDown: function (p_oEvent) {
var oMenu = this._menu;
if (this.get("type") == "split" &&
this._isSplitButtonOptionKey(p_oEvent)) {
this.fireEvent("option", p_oEvent);
}
else if (this._isActivationKey(Event.getCharCode(p_oEvent))) {
if (this.get("type") == "menu") {
this._showMenu(p_oEvent);
}
else {
this._activationKeyPressed = true;
this.addStateCSSClasses("active");
}
}
if (oMenu && oMenu.cfg.getProperty("visible") &&
Event.getCharCode(p_oEvent) == 27) {
oMenu.hide();
this.focus();
}
},
/**
* @method _onKeyUp
* @description "keyup" event handler for the button.
* @protected
* @param {Event} p_oEvent Object representing the DOM event object
* passed back by the event utility (YAHOO.util.Event).
*/
_onKeyUp: function (p_oEvent) {
var sType;
if (this._isActivationKey(Event.getCharCode(p_oEvent))) {
sType = this.get("type");
if (sType == "checkbox" || sType == "radio") {
this.set("checked", !(this.get("checked")));
}
this._activationKeyPressed = false;
if (this.get("type") != "menu") {
this.removeStateCSSClasses("active");
}
}
},
/**
* @method _onClick
* @description "click" event handler for the button.
* @protected
* @param {Event} p_oEvent Object representing the DOM event object
* passed back by the event utility (YAHOO.util.Event).
*/
_onClick: function (p_oEvent) {
var sType = this.get("type"),
oForm,
oSrcElement,
bReturnVal;
switch (sType) {
case "submit":
if (p_oEvent.returnValue !== false) {
this.submitForm();
}
break;
case "reset":
oForm = this.getForm();
if (oForm) {
oForm.reset();
}
break;
case "split":
if (this._nOptionRegionX > 0 &&
(Event.getPageX(p_oEvent) > this._nOptionRegionX)) {
bReturnVal = false;
}
else {
this._hideMenu();
oSrcElement = this.get("srcelement");
if (oSrcElement && oSrcElement.type == "submit" &&
p_oEvent.returnValue !== false) {
this.submitForm();
}
}
break;
}
return bReturnVal;
},
/**
* @method _onDblClick
* @description "dblclick" event handler for the button.
* @protected
* @param {Event} p_oEvent Object representing the DOM event object
* passed back by the event utility (YAHOO.util.Event).
*/
_onDblClick: function (p_oEvent) {
var bReturnVal = true;
if (this.get("type") == "split" && Event.getPageX(p_oEvent) > this._nOptionRegionX) {
bReturnVal = false;
}
return bReturnVal;
},
/**
* @method _onAppendTo
* @description "appendTo" event handler for the button.
* @protected
* @param {Event} p_oEvent Object representing the DOM event object
* passed back by the event utility (YAHOO.util.Event).
*/
_onAppendTo: function (p_oEvent) {
/*
It is necessary to call "_addListenersToForm" using
"setTimeout" to make sure that the button's "form" property
returns a node reference. Sometimes, if you try to get the
reference immediately after appending the field, it is null.
*/
Lang.later(0, this, this._addListenersToForm);
},
/**
* @method _onFormReset
* @description "reset" event handler for the button's form.
* @protected
* @param {Event} p_oEvent Object representing the DOM event
* object passed back by the event utility (YAHOO.util.Event).
*/
_onFormReset: function (p_oEvent) {
var sType = this.get("type"),
oMenu = this._menu;
if (sType == "checkbox" || sType == "radio") {
this.resetValue("checked");
}
if (Menu && oMenu && (oMenu instanceof Menu)) {
this.resetValue("selectedMenuItem");
}
},
/**
* @method _onFormSubmit
* @description "submit" event handler for the button's form.
* @protected
* @param {Event} p_oEvent Object representing the DOM event
* object passed back by the event utility (YAHOO.util.Event).
*/
_onFormSubmit: function (p_oEvent) {
this.createHiddenFields();
},
/**
* @method _onDocumentMouseDown
* @description "mousedown" event handler for the document.
* @protected
* @param {Event} p_oEvent Object representing the DOM event object
* passed back by the event utility (YAHOO.util.Event).
*/
_onDocumentMouseDown: function (p_oEvent) {
var oTarget = Event.getTarget(p_oEvent),
oButtonElement = this.get("element"),
oMenuElement = this._menu.element;
if (oTarget != oButtonElement &&
!Dom.isAncestor(oButtonElement, oTarget) &&
oTarget != oMenuElement &&
!Dom.isAncestor(oMenuElement, oTarget)) {
this._hideMenu();
// In IE when the user mouses down on a focusable element
// that element will be focused and become the "activeElement".
// (http://msdn.microsoft.com/en-us/library/ms533065(VS.85).aspx)
// However, there is a bug in IE where if there is a
// positioned element with a focused descendant that is
// hidden in response to the mousedown event, the target of
// the mousedown event will appear to have focus, but will
// not be set as the activeElement. This will result
// in the element not firing key events, even though it
// appears to have focus. The following call to "setActive"
// fixes this bug.
if (UA.ie && oTarget.focus) {
oTarget.setActive();
}
Event.removeListener(document, "mousedown",
this._onDocumentMouseDown);
}
},
/**
* @method _onOption
* @description "option" event handler for the button.
* @protected
* @param {Event} p_oEvent Object representing the DOM event object
* passed back by the event utility (YAHOO.util.Event).
*/
_onOption: function (p_oEvent) {
if (this.hasClass(this.CLASS_NAME_PREFIX + "split-button-activeoption")) {
this._hideMenu();
this._bOptionPressed = false;
}
else {
this._showMenu(p_oEvent);
this._bOptionPressed = true;
}
},
/**
* @method _onMenuShow
* @description "show" event handler for the button's menu.
* @private
* @param {String} p_sType String representing the name of the event
* that was fired.
*/
_onMenuShow: function (p_sType) {
Event.on(document, "mousedown", this._onDocumentMouseDown,
null, this);
var sState = (this.get("type") == "split") ? "activeoption" : "active";
this.addStateCSSClasses(sState);
},
/**
* @method _onMenuHide
* @description "hide" event handler for the button's menu.
* @private
* @param {String} p_sType String representing the name of the event
* that was fired.
*/
_onMenuHide: function (p_sType) {
var sState = (this.get("type") == "split") ? "activeoption" : "active";
this.removeStateCSSClasses(sState);
if (this.get("type") == "split") {
this._bOptionPressed = false;
}
},
/**
* @method _onMenuKeyDown
* @description "keydown" event handler for the button's menu.
* @private
* @param {String} p_sType String representing the name of the event
* that was fired.
* @param {Array} p_aArgs Array of arguments sent when the event
* was fired.
*/
_onMenuKeyDown: function (p_sType, p_aArgs) {
var oEvent = p_aArgs[0];
if (Event.getCharCode(oEvent) == 27) {
this.focus();
if (this.get("type") == "split") {
this._bOptionPressed = false;
}
}
},
/**
* @method _onMenuRender
* @description "render" event handler for the button's menu.
* @private
* @param {String} p_sType String representing the name of the
* event thatwas fired.
*/
_onMenuRender: function (p_sType) {
var oButtonElement = this.get("element"),
oButtonParent = oButtonElement.parentNode,
oMenu = this._menu,
oMenuElement = oMenu.element,
oSrcElement = oMenu.srcElement,
oItem;
if (oButtonParent != oMenuElement.parentNode) {
oButtonParent.appendChild(oMenuElement);
}
this._renderedMenu = true;
// If the user has designated an <option> of the Menu's source
// <select> element to be selected, sync the selectedIndex with
// the "selectedMenuItem" Attribute.
if (oSrcElement &&
oSrcElement.nodeName.toLowerCase() === "select" &&
oSrcElement.value) {
oItem = oMenu.getItem(oSrcElement.selectedIndex);
// Set the value of the "selectedMenuItem" attribute
// silently since this is the initial set--synchronizing
// the value of the source <SELECT> element in the DOM with
// its corresponding Menu instance.
this.set("selectedMenuItem", oItem, true);
// Call the "_onSelectedMenuItemChange" method since the
// attribute was set silently.
this._onSelectedMenuItemChange({ newValue: oItem });
}
},
/**
* @method _onMenuClick
* @description "click" event handler for the button's menu.
* @private
* @param {String} p_sType String representing the name of the event
* that was fired.
* @param {Array} p_aArgs Array of arguments sent when the event
* was fired.
*/
_onMenuClick: function (p_sType, p_aArgs) {
var oItem = p_aArgs[1],
oSrcElement;
if (oItem) {
this.set("selectedMenuItem", oItem);
oSrcElement = this.get("srcelement");
if (oSrcElement && oSrcElement.type == "submit") {
this.submitForm();
}
this._hideMenu();
}
},
/**
* @method _onSelectedMenuItemChange
* @description "selectedMenuItemChange" event handler for the Button's
* "selectedMenuItem" attribute.
* @param {Event} event Object representing the DOM event object
* passed back by the event utility (YAHOO.util.Event).
*/
_onSelectedMenuItemChange: function (event) {
var oSelected = event.prevValue,
oItem = event.newValue,
sPrefix = this.CLASS_NAME_PREFIX;
if (oSelected) {
Dom.removeClass(oSelected.element, (sPrefix + "button-selectedmenuitem"));
}
if (oItem) {
Dom.addClass(oItem.element, (sPrefix + "button-selectedmenuitem"));
}
},
/**
* @method _onLabelClick
* @description "click" event handler for the Button's
* <code><label></code> element.
* @param {Event} event Object representing the DOM event object
* passed back by the event utility (YAHOO.util.Event).
*/
_onLabelClick: function (event) {
this.focus();
var sType = this.get("type");
if (sType == "radio" || sType == "checkbox") {
this.set("checked", (!this.get("checked")));
}
},
// Public methods
/**
* @method createButtonElement
* @description Creates the button's HTML elements.
* @param {String} p_sType String indicating the type of element
* to create.
* @return {<a href="http://www.w3.org/TR/2000/WD-DOM-Level-1-20000929/
* level-one-html.html#ID-58190037">HTMLElement</a>}
*/
createButtonElement: function (p_sType) {
var sNodeName = this.NODE_NAME,
oElement = document.createElement(sNodeName);
oElement.innerHTML = "<" + sNodeName + " class=\"first-child\">" +
(p_sType == "link" ? "<a></a>" :
"<button type=\"button\"></button>") + "</" + sNodeName + ">";
return oElement;
},
/**
* @method addStateCSSClasses
* @description Appends state-specific CSS classes to the button's root
* DOM element.
*/
addStateCSSClasses: function (p_sState) {
var sType = this.get("type"),
sPrefix = this.CLASS_NAME_PREFIX;
if (Lang.isString(p_sState)) {
if (p_sState != "activeoption" && p_sState != "hoveroption") {
this.addClass(sPrefix + this.CSS_CLASS_NAME + ("-" + p_sState));
}
this.addClass(sPrefix + sType + ("-button-" + p_sState));
}
},
/**
* @method removeStateCSSClasses
* @description Removes state-specific CSS classes to the button's root
* DOM element.
*/
removeStateCSSClasses: function (p_sState) {
var sType = this.get("type"),
sPrefix = this.CLASS_NAME_PREFIX;
if (Lang.isString(p_sState)) {
this.removeClass(sPrefix + this.CSS_CLASS_NAME + ("-" + p_sState));
this.removeClass(sPrefix + sType + ("-button-" + p_sState));
}
},
/**
* @method createHiddenFields
* @description Creates the button's hidden form field and appends it
* to its parent form.
* @return {<a href="http://www.w3.org/TR/2000/WD-DOM-Level-1-20000929/
* level-one-html.html#ID-6043025">HTMLInputElement</a>|Array}
*/
createHiddenFields: function () {
this.removeHiddenFields();
var oForm = this.getForm(),
oButtonField,
sType,
bCheckable,
oMenu,
oMenuItem,
sButtonName,
oValue,
oMenuField,
oReturnVal,
sMenuFieldName,
oMenuSrcElement,
bMenuSrcElementIsSelect = false;
if (oForm && !this.get("disabled")) {
sType = this.get("type");
bCheckable = (sType == "checkbox" || sType == "radio");
if ((bCheckable && this.get("checked")) || (m_oSubmitTrigger == this)) {
YAHOO.log("Creating hidden field.", "info", this.toString());
oButtonField = createInputElement((bCheckable ? sType : "hidden"),
this.get("name"), this.get("value"), this.get("checked"));
if (oButtonField) {
if (bCheckable) {
oButtonField.style.display = "none";
}
oForm.appendChild(oButtonField);
}
}
oMenu = this._menu;
if (Menu && oMenu && (oMenu instanceof Menu)) {
YAHOO.log("Creating hidden field for menu.", "info", this.toString());
oMenuItem = this.get("selectedMenuItem");
oMenuSrcElement = oMenu.srcElement;
bMenuSrcElementIsSelect = (oMenuSrcElement &&
oMenuSrcElement.nodeName.toUpperCase() == "SELECT");
if (oMenuItem) {
oValue = (oMenuItem.value === null || oMenuItem.value === "") ?
oMenuItem.cfg.getProperty("text") : oMenuItem.value;
sButtonName = this.get("name");
if (bMenuSrcElementIsSelect) {
sMenuFieldName = oMenuSrcElement.name;
}
else if (sButtonName) {
sMenuFieldName = (sButtonName + "_options");
}
if (oValue && sMenuFieldName) {
oMenuField = createInputElement("hidden", sMenuFieldName, oValue);
oForm.appendChild(oMenuField);
}
}
else if (bMenuSrcElementIsSelect) {
oMenuField = oForm.appendChild(oMenuSrcElement);
}
}
if (oButtonField && oMenuField) {
this._hiddenFields = [oButtonField, oMenuField];
}
else if (!oButtonField && oMenuField) {
this._hiddenFields = oMenuField;
}
else if (oButtonField && !oMenuField) {
this._hiddenFields = oButtonField;
}
oReturnVal = this._hiddenFields;
}
return oReturnVal;
},
/**
* @method removeHiddenFields
* @description Removes the button's hidden form field(s) from its
* parent form.
*/
removeHiddenFields: function () {
var oField = this._hiddenFields,
nFields,
i;
function removeChild(p_oElement) {
if (Dom.inDocument(p_oElement)) {
p_oElement.parentNode.removeChild(p_oElement);
}
}
if (oField) {
if (Lang.isArray(oField)) {
nFields = oField.length;
if (nFields > 0) {
i = nFields - 1;
do {
removeChild(oField[i]);
}
while (i--);
}
}
else {
removeChild(oField);
}
this._hiddenFields = null;
}
},
/**
* @method submitForm
* @description Submits the form to which the button belongs. Returns
* true if the form was submitted successfully, false if the submission
* was cancelled.
* @protected
* @return {Boolean}
*/
submitForm: function () {
var oForm = this.getForm(),
oSrcElement = this.get("srcelement"),
/*
Boolean indicating if the event fired successfully
(was not cancelled by any handlers)
*/
bSubmitForm = false,
oEvent;
if (oForm) {
if (this.get("type") == "submit" || (oSrcElement && oSrcElement.type == "submit")) {
m_oSubmitTrigger = this;
}
if (UA.ie) {
bSubmitForm = oForm.fireEvent("onsubmit");
}
else { // Gecko, Opera, and Safari
oEvent = document.createEvent("HTMLEvents");
oEvent.initEvent("submit", true, true);
bSubmitForm = oForm.dispatchEvent(oEvent);
}
/*
In IE and Safari, dispatching a "submit" event to a form
WILL cause the form's "submit" event to fire, but WILL NOT
submit the form. Therefore, we need to call the "submit"
method as well.
*/
if ((UA.ie || UA.webkit) && bSubmitForm) {
oForm.submit();
}
}
return bSubmitForm;
},
/**
* @method init
* @description The Button class's initialization method.
* @param {String} p_oElement String specifying the id attribute of the
* <code><input></code>, <code><button></code>,
* <code><a></code>, or <code><span></code> element to
* be used to create the button.
* @param {<a href="http://www.w3.org/TR/2000/WD-DOM-Level-1-20000929/
* level-one-html.html#ID-6043025">HTMLInputElement</a>|<a href="http://
* www.w3.org/TR/2000/WD-DOM-Level-1-20000929/level-one-html.html
* #ID-34812697">HTMLButtonElement</a>|<a href="http://www.w3.org/TR
* /2000/WD-DOM-Level-1-20000929/level-one-html.html#ID-33759296">
* HTMLElement</a>} p_oElement Object reference for the
* <code><input></code>, <code><button></code>,
* <code><a></code>, or <code><span></code> element to be
* used to create the button.
* @param {Object} p_oElement Object literal specifying a set of
* configuration attributes used to create the button.
* @param {Object} p_oAttributes Optional. Object literal specifying a
* set of configuration attributes used to create the button.
*/
init: function (p_oElement, p_oAttributes) {
var sNodeName = p_oAttributes.type == "link" ? "a" : "button",
oSrcElement = p_oAttributes.srcelement,
oButton = p_oElement.getElementsByTagName(sNodeName)[0],
oInput;
if (!oButton) {
oInput = p_oElement.getElementsByTagName("input")[0];
if (oInput) {
oButton = document.createElement("button");
oButton.setAttribute("type", "button");
oInput.parentNode.replaceChild(oButton, oInput);
}
}
this._button = oButton;
YAHOO.widget.Button.superclass.init.call(this, p_oElement, p_oAttributes);
var sId = this.get("id"),
sButtonId = sId + "-button";
oButton.id = sButtonId;
var aLabels,
oLabel;
var hasLabel = function (element) {
return (element.htmlFor === sId);
};
var setLabel = function () {
oLabel.setAttribute((UA.ie ? "htmlFor" : "for"), sButtonId);
};
if (oSrcElement && this.get("type") != "link") {
aLabels = Dom.getElementsBy(hasLabel, "label");
if (Lang.isArray(aLabels) && aLabels.length > 0) {
oLabel = aLabels[0];
}
}
m_oButtons[sId] = this;
var sPrefix = this.CLASS_NAME_PREFIX;
this.addClass(sPrefix + this.CSS_CLASS_NAME);
this.addClass(sPrefix + this.get("type") + "-button");
Event.on(this._button, "focus", this._onFocus, null, this);
this.on("mouseover", this._onMouseOver);
this.on("mousedown", this._onMouseDown);
this.on("mouseup", this._onMouseUp);
this.on("click", this._onClick);
// Need to reset the value of the "onclick" Attribute so that any
// handlers registered via the "onclick" Attribute are fired after
// Button's default "_onClick" listener.
var fnOnClick = this.get("onclick");
this.set("onclick", null);
this.set("onclick", fnOnClick);
this.on("dblclick", this._onDblClick);
var oParentNode;
if (oLabel) {
if (this.get("replaceLabel")) {
this.set("label", oLabel.innerHTML);
oParentNode = oLabel.parentNode;
oParentNode.removeChild(oLabel);
}
else {
this.on("appendTo", setLabel);
Event.on(oLabel, "click", this._onLabelClick, null, this);
this._label = oLabel;
}
}
this.on("appendTo", this._onAppendTo);
var oContainer = this.get("container"),
oElement = this.get("element"),
bElInDoc = Dom.inDocument(oElement);
if (oContainer) {
if (oSrcElement && oSrcElement != oElement) {
oParentNode = oSrcElement.parentNode;
if (oParentNode) {
oParentNode.removeChild(oSrcElement);
}
}
if (Lang.isString(oContainer)) {
Event.onContentReady(oContainer, this.appendTo, oContainer, this);
}
else {
this.on("init", function () {
Lang.later(0, this, this.appendTo, oContainer);
});
}
}
else if (!bElInDoc && oSrcElement && oSrcElement != oElement) {
oParentNode = oSrcElement.parentNode;
if (oParentNode) {
this.fireEvent("beforeAppendTo", {
type: "beforeAppendTo",
target: oParentNode
});
oParentNode.replaceChild(oElement, oSrcElement);
this.fireEvent("appendTo", {
type: "appendTo",
target: oParentNode
});
}
}
else if (this.get("type") != "link" && bElInDoc && oSrcElement &&
oSrcElement == oElement) {
this._addListenersToForm();
}
YAHOO.log("Initialization completed.", "info", this.toString());
this.fireEvent("init", {
type: "init",
target: this
});
},
/**
* @method initAttributes
* @description Initializes all of the configuration attributes used to
* create the button.
* @param {Object} p_oAttributes Object literal specifying a set of
* configuration attributes used to create the button.
*/
initAttributes: function (p_oAttributes) {
var oAttributes = p_oAttributes || {};
YAHOO.widget.Button.superclass.initAttributes.call(this,
oAttributes);
/**
* @attribute type
* @description String specifying the button's type. Possible
* values are: "push," "link," "submit," "reset," "checkbox,"
* "radio," "menu," and "split."
* @default "push"
* @type String
* @writeonce
*/
this.setAttributeConfig("type", {
value: (oAttributes.type || "push"),
validator: Lang.isString,
writeOnce: true,
method: this._setType
});
/**
* @attribute label
* @description String specifying the button's text label
* or innerHTML.
* @default null
* @type String
*/
this.setAttributeConfig("label", {
value: oAttributes.label,
validator: Lang.isString,
method: this._setLabel
});
/**
* @attribute value
* @description Object specifying the value for the button.
* @default null
* @type Object
*/
this.setAttributeConfig("value", {
value: oAttributes.value
});
/**
* @attribute name
* @description String specifying the name for the button.
* @default null
* @type String
*/
this.setAttributeConfig("name", {
value: oAttributes.name,
validator: Lang.isString
});
/**
* @attribute tabindex
* @description Number specifying the tabindex for the button.
* @default null
* @type Number
*/
this.setAttributeConfig("tabindex", {
value: oAttributes.tabindex,
validator: Lang.isNumber,
method: this._setTabIndex
});
/**
* @attribute title
* @description String specifying the title for the button.
* @default null
* @type String
*/
this.configureAttribute("title", {
value: oAttributes.title,
validator: Lang.isString,
method: this._setTitle
});
/**
* @attribute disabled
* @description Boolean indicating if the button should be disabled.
* (Disabled buttons are dimmed and will not respond to user input
* or fire events. Does not apply to button's of type "link.")
* @default false
* @type Boolean
*/
this.setAttributeConfig("disabled", {
value: (oAttributes.disabled || false),
validator: Lang.isBoolean,
method: this._setDisabled
});
/**
* @attribute href
* @description String specifying the href for the button. Applies
* only to buttons of type "link."
* @type String
*/
this.setAttributeConfig("href", {
value: oAttributes.href,
validator: Lang.isString,
method: this._setHref
});
/**
* @attribute target
* @description String specifying the target for the button.
* Applies only to buttons of type "link."
* @type String
*/
this.setAttributeConfig("target", {
value: oAttributes.target,
validator: Lang.isString,
method: this._setTarget
});
/**
* @attribute checked
* @description Boolean indicating if the button is checked.
* Applies only to buttons of type "radio" and "checkbox."
* @default false
* @type Boolean
*/
this.setAttributeConfig("checked", {
value: (oAttributes.checked || false),
validator: Lang.isBoolean,
method: this._setChecked
});
/**
* @attribute container
* @description HTML element reference or string specifying the id
* attribute of the HTML element that the button's markup should be
* rendered into.
* @type <a href="http://www.w3.org/TR/2000/WD-DOM-Level-1-20000929/
* level-one-html.html#ID-58190037">HTMLElement</a>|String
* @default null
* @writeonce
*/
this.setAttributeConfig("container", {
value: oAttributes.container,
writeOnce: true
});
/**
* @attribute srcelement
* @description Object reference to the HTML element (either
* <code><input></code> or <code><span></code>)
* used to create the button.
* @type <a href="http://www.w3.org/TR/2000/WD-DOM-Level-1-20000929/
* level-one-html.html#ID-58190037">HTMLElement</a>|String
* @default null
* @writeonce
*/
this.setAttributeConfig("srcelement", {
value: oAttributes.srcelement,
writeOnce: true
});
/**
* @attribute menu
* @description Object specifying the menu for the button.
* The value can be one of the following:
* <ul>
* <li>Object specifying a rendered <a href="YAHOO.widget.Menu.html">
* YAHOO.widget.Menu</a> instance.</li>
* <li>Object specifying a rendered <a href="YAHOO.widget.Overlay.html">
* YAHOO.widget.Overlay</a> instance.</li>
* <li>String specifying the id attribute of the <code><div>
* </code> element used to create the menu. By default the menu
* will be created as an instance of
* <a href="YAHOO.widget.Overlay.html">YAHOO.widget.Overlay</a>.
* If the <a href="YAHOO.widget.Menu.html#CSS_CLASS_NAME">
* default CSS class name for YAHOO.widget.Menu</a> is applied to
* the <code><div></code> element, it will be created as an
* instance of <a href="YAHOO.widget.Menu.html">YAHOO.widget.Menu
* </a>.</li><li>String specifying the id attribute of the
* <code><select></code> element used to create the menu.
* </li><li>Object specifying the <code><div></code> element
* used to create the menu.</li>
* <li>Object specifying the <code><select></code> element
* used to create the menu.</li>
* <li>Array of object literals, each representing a set of
* <a href="YAHOO.widget.MenuItem.html">YAHOO.widget.MenuItem</a>
* configuration attributes.</li>
* <li>Array of strings representing the text labels for each menu
* item in the menu.</li>
* </ul>
* @type <a href="YAHOO.widget.Menu.html">YAHOO.widget.Menu</a>|<a
* href="YAHOO.widget.Overlay.html">YAHOO.widget.Overlay</a>|<a
* href="http://www.w3.org/TR/2000/WD-DOM-Level-1-20000929/level-
* one-html.html#ID-58190037">HTMLElement</a>|String|Array
* @default null
* @writeonce
*/
this.setAttributeConfig("menu", {
value: null,
method: this._setMenu,
writeOnce: true
});
/**
* @attribute lazyloadmenu
* @description Boolean indicating the value to set for the
* <a href="YAHOO.widget.Menu.html#lazyLoad">"lazyload"</a>
* configuration property of the button's menu. Setting
* "lazyloadmenu" to <code>true </code> will defer rendering of
* the button's menu until the first time it is made visible.
* If "lazyloadmenu" is set to <code>false</code>, the button's
* menu will be rendered immediately if the button is in the
* document, or in response to the button's "appendTo" event if
* the button is not yet in the document. In either case, the
* menu is rendered into the button's parent HTML element.
* <em>This attribute does not apply if a
* <a href="YAHOO.widget.Menu.html">YAHOO.widget.Menu</a> or
* <a href="YAHOO.widget.Overlay.html">YAHOO.widget.Overlay</a>
* instance is passed as the value of the button's "menu"
* configuration attribute. <a href="YAHOO.widget.Menu.html">
* YAHOO.widget.Menu</a> or <a href="YAHOO.widget.Overlay.html">
* YAHOO.widget.Overlay</a> instances should be rendered before
* being set as the value for the "menu" configuration
* attribute.</em>
* @default true
* @type Boolean
* @writeonce
*/
this.setAttributeConfig("lazyloadmenu", {
value: (oAttributes.lazyloadmenu === false ? false : true),
validator: Lang.isBoolean,
writeOnce: true
});
/**
* @attribute menuclassname
* @description String representing the CSS class name to be
* applied to the root element of the button's menu.
* @type String
* @default "yui-button-menu"
* @writeonce
*/
this.setAttributeConfig("menuclassname", {
value: (oAttributes.menuclassname || (this.CLASS_NAME_PREFIX + "button-menu")),
validator: Lang.isString,
method: this._setMenuClassName,
writeOnce: true
});
/**
* @attribute menuminscrollheight
* @description Number defining the minimum threshold for the "menumaxheight"
* configuration attribute. When set this attribute is automatically applied
* to all submenus.
* @default 90
* @type Number
*/
this.setAttributeConfig("menuminscrollheight", {
value: (oAttributes.menuminscrollheight || 90),
validator: Lang.isNumber
});
/**
* @attribute menumaxheight
* @description Number defining the maximum height (in pixels) for a menu's
* body element (<code><div class="bd"<</code>). Once a menu's body
* exceeds this height, the contents of the body are scrolled to maintain
* this value. This value cannot be set lower than the value of the
* "minscrollheight" configuration property.
* @type Number
* @default 0
*/
this.setAttributeConfig("menumaxheight", {
value: (oAttributes.menumaxheight || 0),
validator: Lang.isNumber
});
/**
* @attribute menualignment
* @description Array defining how the Button's Menu is aligned to the Button.
* The default value of ["tl", "bl"] aligns the Menu's top left corner to the Button's
* bottom left corner.
* @type Array
* @default ["tl", "bl"]
*/
this.setAttributeConfig("menualignment", {
value: (oAttributes.menualignment || ["tl", "bl"]),
validator: Lang.isArray
});
/**
* @attribute selectedMenuItem
* @description Object representing the item in the button's menu
* that is currently selected.
* @type YAHOO.widget.MenuItem
* @default null
*/
this.setAttributeConfig("selectedMenuItem", {
value: null
});
/**
* @attribute onclick
* @description Object literal representing the code to be executed
* when the button is clicked. Format:<br> <code> {<br>
* <strong>fn:</strong> Function, // The handler to call
* when the event fires.<br> <strong>obj:</strong> Object,
* // An object to pass back to the handler.<br>
* <strong>scope:</strong> Object // The object to use
* for the scope of the handler.<br> } </code>
* @type Object
* @default null
*/
this.setAttributeConfig("onclick", {
value: oAttributes.onclick,
method: this._setOnClick
});
/**
* @attribute focusmenu
* @description Boolean indicating whether or not the button's menu
* should be focused when it is made visible.
* @type Boolean
* @default true
*/
this.setAttributeConfig("focusmenu", {
value: (oAttributes.focusmenu === false ? false : true),
validator: Lang.isBoolean
});
/**
* @attribute replaceLabel
* @description Boolean indicating whether or not the text of the
* button's <code><label></code> element should be used as
* the source for the button's label configuration attribute and
* removed from the DOM.
* @type Boolean
* @default false
*/
this.setAttributeConfig("replaceLabel", {
value: false,
validator: Lang.isBoolean,
writeOnce: true
});
},
/**
* @method focus
* @description Causes the button to receive the focus and fires the
* button's "focus" event.
*/
focus: function () {
if (!this.get("disabled")) {
this._button.focus();
}
},
/**
* @method blur
* @description Causes the button to lose focus and fires the button's
* "blur" event.
*/
blur: function () {
if (!this.get("disabled")) {
this._button.blur();
}
},
/**
* @method hasFocus
* @description Returns a boolean indicating whether or not the button
* has focus.
* @return {Boolean}
*/
hasFocus: function () {
return (m_oFocusedButton == this);
},
/**
* @method isActive
* @description Returns a boolean indicating whether or not the button
* is active.
* @return {Boolean}
*/
isActive: function () {
return this.hasClass(this.CLASS_NAME_PREFIX + this.CSS_CLASS_NAME + "-active");
},
/**
* @method getMenu
* @description Returns a reference to the button's menu.
* @return {<a href="YAHOO.widget.Overlay.html">
* YAHOO.widget.Overlay</a>|<a
* href="YAHOO.widget.Menu.html">YAHOO.widget.Menu</a>}
*/
getMenu: function () {
return this._menu;
},
/**
* @method getForm
* @description Returns a reference to the button's parent form.
* @return {<a href="http://www.w3.org/TR/2000/WD-DOM-Level-1-
* 20000929/level-one-html.html#ID-40002357">HTMLFormElement</a>}
*/
getForm: function () {
var oButton = this._button,
oForm;
if (oButton) {
oForm = oButton.form;
}
return oForm;
},
/**
* @method getHiddenFields
* @description Returns an <code><input></code> element or
* array of form elements used to represent the button when its parent
* form is submitted.
* @return {<a href="http://www.w3.org/TR/2000/WD-DOM-Level-1-20000929/
* level-one-html.html#ID-6043025">HTMLInputElement</a>|Array}
*/
getHiddenFields: function () {
return this._hiddenFields;
},
/**
* @method destroy
* @description Removes the button's element from its parent element and
* removes all event handlers.
*/
destroy: function () {
YAHOO.log("Destroying ...", "info", this.toString());
var oElement = this.get("element"),
oMenu = this._menu,
oLabel = this._label,
oParentNode,
aButtons;
if (oMenu) {
YAHOO.log("Destroying menu.", "info", this.toString());
if (m_oOverlayManager && m_oOverlayManager.find(oMenu)) {
m_oOverlayManager.remove(oMenu);
}
oMenu.destroy();
}
YAHOO.log("Removing DOM event listeners.", "info", this.toString());
Event.purgeElement(oElement);
Event.purgeElement(this._button);
Event.removeListener(document, "mouseup", this._onDocumentMouseUp);
Event.removeListener(document, "keyup", this._onDocumentKeyUp);
Event.removeListener(document, "mousedown", this._onDocumentMouseDown);
if (oLabel) {
Event.removeListener(oLabel, "click", this._onLabelClick);
oParentNode = oLabel.parentNode;
oParentNode.removeChild(oLabel);
}
var oForm = this.getForm();
if (oForm) {
Event.removeListener(oForm, "reset", this._onFormReset);
Event.removeListener(oForm, "submit", this._onFormSubmit);
}
YAHOO.log("Removing CustomEvent listeners.", "info", this.toString());
this.unsubscribeAll();
oParentNode = oElement.parentNode;
if (oParentNode) {
oParentNode.removeChild(oElement);
}
YAHOO.log("Removing from document.", "info", this.toString());
delete m_oButtons[this.get("id")];
var sClass = (this.CLASS_NAME_PREFIX + this.CSS_CLASS_NAME);
aButtons = Dom.getElementsByClassName(sClass,
this.NODE_NAME, oForm);
if (Lang.isArray(aButtons) && aButtons.length === 0) {
Event.removeListener(oForm, "keypress",
YAHOO.widget.Button.onFormKeyPress);
}
YAHOO.log("Destroyed.", "info", this.toString());
},
fireEvent: function (p_sType , p_aArgs) {
var sType = arguments[0];
// Disabled buttons should not respond to DOM events
if (this.DOM_EVENTS[sType] && this.get("disabled")) {
return false;
}
return YAHOO.widget.Button.superclass.fireEvent.apply(this, arguments);
},
/**
* @method toString
* @description Returns a string representing the button.
* @return {String}
*/
toString: function () {
return ("Button " + this.get("id"));
}
});
/**
* @method YAHOO.widget.Button.onFormKeyPress
* @description "keypress" event handler for the button's form.
* @param {Event} p_oEvent Object representing the DOM event object passed
* back by the event utility (YAHOO.util.Event).
*/
YAHOO.widget.Button.onFormKeyPress = function (p_oEvent) {
var oTarget = Event.getTarget(p_oEvent),
nCharCode = Event.getCharCode(p_oEvent),
sNodeName = oTarget.nodeName && oTarget.nodeName.toUpperCase(),
sType = oTarget.type,
/*
Boolean indicating if the form contains any enabled or
disabled YUI submit buttons
*/
bFormContainsYUIButtons = false,
oButton,
oYUISubmitButton, // The form's first, enabled YUI submit button
/*
The form's first, enabled HTML submit button that precedes any
YUI submit button
*/
oPrecedingSubmitButton,
oEvent;
function isSubmitButton(p_oElement) {
var sId,
oSrcElement;
switch (p_oElement.nodeName.toUpperCase()) {
case "INPUT":
case "BUTTON":
if (p_oElement.type == "submit" && !p_oElement.disabled) {
if (!bFormContainsYUIButtons && !oPrecedingSubmitButton) {
oPrecedingSubmitButton = p_oElement;
}
}
break;
default:
sId = p_oElement.id;
if (sId) {
oButton = m_oButtons[sId];
if (oButton) {
bFormContainsYUIButtons = true;
if (!oButton.get("disabled")) {
oSrcElement = oButton.get("srcelement");
if (!oYUISubmitButton && (oButton.get("type") == "submit" ||
(oSrcElement && oSrcElement.type == "submit"))) {
oYUISubmitButton = oButton;
}
}
}
}
break;
}
}
if (nCharCode == 13 && ((sNodeName == "INPUT" && (sType == "text" ||
sType == "password" || sType == "checkbox" || sType == "radio" ||
sType == "file")) || sNodeName == "SELECT")) {
Dom.getElementsBy(isSubmitButton, "*", this);
if (oPrecedingSubmitButton) {
/*
Need to set focus to the first enabled submit button
to make sure that IE includes its name and value
in the form's data set.
*/
oPrecedingSubmitButton.focus();
}
else if (!oPrecedingSubmitButton && oYUISubmitButton) {
/*
Need to call "preventDefault" to ensure that the form doesn't end up getting
submitted twice.
*/
Event.preventDefault(p_oEvent);
if (UA.ie) {
oYUISubmitButton.get("element").fireEvent("onclick");
}
else {
oEvent = document.createEvent("HTMLEvents");
oEvent.initEvent("click", true, true);
if (UA.gecko < 1.9) {
oYUISubmitButton.fireEvent("click", oEvent);
}
else {
oYUISubmitButton.get("element").dispatchEvent(oEvent);
}
}
}
}
};
/**
* @method YAHOO.widget.Button.addHiddenFieldsToForm
* @description Searches the specified form and adds hidden fields for
* instances of YAHOO.widget.Button that are of type "radio," "checkbox,"
* "menu," and "split."
* @param {<a href="http://www.w3.org/TR/2000/WD-DOM-Level-1-20000929/level-
* one-html.html#ID-40002357">HTMLFormElement</a>} p_oForm Object reference
* for the form to search.
*/
YAHOO.widget.Button.addHiddenFieldsToForm = function (p_oForm) {
var proto = YAHOO.widget.Button.prototype,
aButtons = Dom.getElementsByClassName(
(proto.CLASS_NAME_PREFIX + proto.CSS_CLASS_NAME),
"*",
p_oForm),
nButtons = aButtons.length,
oButton,
sId,
i;
if (nButtons > 0) {
YAHOO.log("Form contains " + nButtons + " YUI buttons.", "info", this.toString());
for (i = 0; i < nButtons; i++) {
sId = aButtons[i].id;
if (sId) {
oButton = m_oButtons[sId];
if (oButton) {
oButton.createHiddenFields();
}
}
}
}
};
/**
* @method YAHOO.widget.Button.getButton
* @description Returns a button with the specified id.
* @param {String} p_sId String specifying the id of the root node of the
* HTML element representing the button to be retrieved.
* @return {YAHOO.widget.Button}
*/
YAHOO.widget.Button.getButton = function (p_sId) {
return m_oButtons[p_sId];
};
// Events
/**
* @event focus
* @description Fires when the menu item receives focus. Passes back a
* single object representing the original DOM event object passed back by
* the event utility (YAHOO.util.Event) when the event was fired. See
* <a href="YAHOO.util.Element.html#addListener">Element.addListener</a>
* for more information on listening for this event.
* @type YAHOO.util.CustomEvent
*/
/**
* @event blur
* @description Fires when the menu item loses the input focus. Passes back
* a single object representing the original DOM event object passed back by
* the event utility (YAHOO.util.Event) when the event was fired. See
* <a href="YAHOO.util.Element.html#addListener">Element.addListener</a> for
* more information on listening for this event.
* @type YAHOO.util.CustomEvent
*/
/**
* @event option
* @description Fires when the user invokes the button's option. Passes
* back a single object representing the original DOM event (either
* "mousedown" or "keydown") that caused the "option" event to fire. See
* <a href="YAHOO.util.Element.html#addListener">Element.addListener</a>
* for more information on listening for this event.
* @type YAHOO.util.CustomEvent
*/
})();
(function () {
// Shorthard for utilities
var Dom = YAHOO.util.Dom,
Event = YAHOO.util.Event,
Lang = YAHOO.lang,
Button = YAHOO.widget.Button,
// Private collection of radio buttons
m_oButtons = {};
/**
* The ButtonGroup class creates a set of buttons that are mutually
* exclusive; checking one button in the set will uncheck all others in the
* button group.
* @param {String} p_oElement String specifying the id attribute of the
* <code><div></code> element of the button group.
* @param {<a href="http://www.w3.org/TR/2000/WD-DOM-Level-1-20000929/
* level-one-html.html#ID-22445964">HTMLDivElement</a>} p_oElement Object
* specifying the <code><div></code> element of the button group.
* @param {Object} p_oElement Object literal specifying a set of
* configuration attributes used to create the button group.
* @param {Object} p_oAttributes Optional. Object literal specifying a set
* of configuration attributes used to create the button group.
* @namespace YAHOO.widget
* @class ButtonGroup
* @constructor
* @extends YAHOO.util.Element
*/
YAHOO.widget.ButtonGroup = function (p_oElement, p_oAttributes) {
var fnSuperClass = YAHOO.widget.ButtonGroup.superclass.constructor,
sNodeName,
oElement,
sId;
if (arguments.length == 1 && !Lang.isString(p_oElement) &&
!p_oElement.nodeName) {
if (!p_oElement.id) {
sId = Dom.generateId();
p_oElement.id = sId;
YAHOO.log("No value specified for the button group's \"id\"" +
" attribute. Setting button group id to \"" + sId + "\".",
"info");
}
this.logger = new YAHOO.widget.LogWriter("ButtonGroup " + sId);
this.logger.log("No source HTML element. Building the button " +
"group using the set of configuration attributes.");
fnSuperClass.call(this, (this._createGroupElement()), p_oElement);
}
else if (Lang.isString(p_oElement)) {
oElement = Dom.get(p_oElement);
if (oElement) {
if (oElement.nodeName.toUpperCase() == this.NODE_NAME) {
this.logger =
new YAHOO.widget.LogWriter("ButtonGroup " + p_oElement);
fnSuperClass.call(this, oElement, p_oAttributes);
}
}
}
else {
sNodeName = p_oElement.nodeName.toUpperCase();
if (sNodeName && sNodeName == this.NODE_NAME) {
if (!p_oElement.id) {
p_oElement.id = Dom.generateId();
YAHOO.log("No value specified for the button group's" +
" \"id\" attribute. Setting button group id " +
"to \"" + p_oElement.id + "\".", "warn");
}
this.logger =
new YAHOO.widget.LogWriter("ButtonGroup " + p_oElement.id);
fnSuperClass.call(this, p_oElement, p_oAttributes);
}
}
};
YAHOO.extend(YAHOO.widget.ButtonGroup, YAHOO.util.Element, {
// Protected properties
/**
* @property _buttons
* @description Array of buttons in the button group.
* @default null
* @protected
* @type Array
*/
_buttons: null,
// Constants
/**
* @property NODE_NAME
* @description The name of the tag to be used for the button
* group's element.
* @default "DIV"
* @final
* @type String
*/
NODE_NAME: "DIV",
/**
* @property CLASS_NAME_PREFIX
* @description Prefix used for all class names applied to a ButtonGroup.
* @default "yui-"
* @final
* @type String
*/
CLASS_NAME_PREFIX: "yui-",
/**
* @property CSS_CLASS_NAME
* @description String representing the CSS class(es) to be applied
* to the button group's element.
* @default "buttongroup"
* @final
* @type String
*/
CSS_CLASS_NAME: "buttongroup",
// Protected methods
/**
* @method _createGroupElement
* @description Creates the button group's element.
* @protected
* @return {<a href="http://www.w3.org/TR/2000/WD-DOM-Level-1-20000929/
* level-one-html.html#ID-22445964">HTMLDivElement</a>}
*/
_createGroupElement: function () {
var oElement = document.createElement(this.NODE_NAME);
return oElement;
},
// Protected attribute setter methods
/**
* @method _setDisabled
* @description Sets the value of the button groups's
* "disabled" attribute.
* @protected
* @param {Boolean} p_bDisabled Boolean indicating the value for
* the button group's "disabled" attribute.
*/
_setDisabled: function (p_bDisabled) {
var nButtons = this.getCount(),
i;
if (nButtons > 0) {
i = nButtons - 1;
do {
this._buttons[i].set("disabled", p_bDisabled);
}
while (i--);
}
},
// Protected event handlers
/**
* @method _onKeyDown
* @description "keydown" event handler for the button group.
* @protected
* @param {Event} p_oEvent Object representing the DOM event object
* passed back by the event utility (YAHOO.util.Event).
*/
_onKeyDown: function (p_oEvent) {
var oTarget = Event.getTarget(p_oEvent),
nCharCode = Event.getCharCode(p_oEvent),
sId = oTarget.parentNode.parentNode.id,
oButton = m_oButtons[sId],
nIndex = -1;
if (nCharCode == 37 || nCharCode == 38) {
nIndex = (oButton.index === 0) ?
(this._buttons.length - 1) : (oButton.index - 1);
}
else if (nCharCode == 39 || nCharCode == 40) {
nIndex = (oButton.index === (this._buttons.length - 1)) ?
0 : (oButton.index + 1);
}
if (nIndex > -1) {
this.check(nIndex);
this.getButton(nIndex).focus();
}
},
/**
* @method _onAppendTo
* @description "appendTo" event handler for the button group.
* @protected
* @param {Event} p_oEvent Object representing the event that was fired.
*/
_onAppendTo: function (p_oEvent) {
var aButtons = this._buttons,
nButtons = aButtons.length,
i;
for (i = 0; i < nButtons; i++) {
aButtons[i].appendTo(this.get("element"));
}
},
/**
* @method _onButtonCheckedChange
* @description "checkedChange" event handler for each button in the
* button group.
* @protected
* @param {Event} p_oEvent Object representing the event that was fired.
* @param {<a href="YAHOO.widget.Button.html">YAHOO.widget.Button</a>}
* p_oButton Object representing the button that fired the event.
*/
_onButtonCheckedChange: function (p_oEvent, p_oButton) {
var bChecked = p_oEvent.newValue,
oCheckedButton = this.get("checkedButton");
if (bChecked && oCheckedButton != p_oButton) {
if (oCheckedButton) {
oCheckedButton.set("checked", false, true);
}
this.set("checkedButton", p_oButton);
this.set("value", p_oButton.get("value"));
}
else if (oCheckedButton && !oCheckedButton.set("checked")) {
oCheckedButton.set("checked", true, true);
}
},
// Public methods
/**
* @method init
* @description The ButtonGroup class's initialization method.
* @param {String} p_oElement String specifying the id attribute of the
* <code><div></code> element of the button group.
* @param {<a href="http://www.w3.org/TR/2000/WD-DOM-Level-1-20000929/
* level-one-html.html#ID-22445964">HTMLDivElement</a>} p_oElement Object
* specifying the <code><div></code> element of the button group.
* @param {Object} p_oElement Object literal specifying a set of
* configuration attributes used to create the button group.
* @param {Object} p_oAttributes Optional. Object literal specifying a
* set of configuration attributes used to create the button group.
*/
init: function (p_oElement, p_oAttributes) {
this._buttons = [];
YAHOO.widget.ButtonGroup.superclass.init.call(this, p_oElement,
p_oAttributes);
this.addClass(this.CLASS_NAME_PREFIX + this.CSS_CLASS_NAME);
var sClass = (YAHOO.widget.Button.prototype.CLASS_NAME_PREFIX + "radio-button"),
aButtons = this.getElementsByClassName(sClass);
this.logger.log("Searching for child nodes with the class name " +
sClass + " to add to the button group.");
if (aButtons.length > 0) {
this.logger.log("Found " + aButtons.length +
" child nodes with the class name " + sClass +
" Attempting to add to button group.");
this.addButtons(aButtons);
}
this.logger.log("Searching for child nodes with the type of " +
" \"radio\" to add to the button group.");
function isRadioButton(p_oElement) {
return (p_oElement.type == "radio");
}
aButtons =
Dom.getElementsBy(isRadioButton, "input", this.get("element"));
if (aButtons.length > 0) {
this.logger.log("Found " + aButtons.length + " child nodes" +
" with the type of \"radio.\" Attempting to add to" +
" button group.");
this.addButtons(aButtons);
}
this.on("keydown", this._onKeyDown);
this.on("appendTo", this._onAppendTo);
var oContainer = this.get("container");
if (oContainer) {
if (Lang.isString(oContainer)) {
Event.onContentReady(oContainer, function () {
this.appendTo(oContainer);
}, null, this);
}
else {
this.appendTo(oContainer);
}
}
this.logger.log("Initialization completed.");
},
/**
* @method initAttributes
* @description Initializes all of the configuration attributes used to
* create the button group.
* @param {Object} p_oAttributes Object literal specifying a set of
* configuration attributes used to create the button group.
*/
initAttributes: function (p_oAttributes) {
var oAttributes = p_oAttributes || {};
YAHOO.widget.ButtonGroup.superclass.initAttributes.call(
this, oAttributes);
/**
* @attribute name
* @description String specifying the name for the button group.
* This name will be applied to each button in the button group.
* @default null
* @type String
*/
this.setAttributeConfig("name", {
value: oAttributes.name,
validator: Lang.isString
});
/**
* @attribute disabled
* @description Boolean indicating if the button group should be
* disabled. Disabling the button group will disable each button
* in the button group. Disabled buttons are dimmed and will not
* respond to user input or fire events.
* @default false
* @type Boolean
*/
this.setAttributeConfig("disabled", {
value: (oAttributes.disabled || false),
validator: Lang.isBoolean,
method: this._setDisabled
});
/**
* @attribute value
* @description Object specifying the value for the button group.
* @default null
* @type Object
*/
this.setAttributeConfig("value", {
value: oAttributes.value
});
/**
* @attribute container
* @description HTML element reference or string specifying the id
* attribute of the HTML element that the button group's markup
* should be rendered into.
* @type <a href="http://www.w3.org/TR/2000/WD-DOM-Level-1-20000929/
* level-one-html.html#ID-58190037">HTMLElement</a>|String
* @default null
* @writeonce
*/
this.setAttributeConfig("container", {
value: oAttributes.container,
writeOnce: true
});
/**
* @attribute checkedButton
* @description Reference for the button in the button group that
* is checked.
* @type {<a href="YAHOO.widget.Button.html">YAHOO.widget.Button</a>}
* @default null
*/
this.setAttributeConfig("checkedButton", {
value: null
});
},
/**
* @method addButton
* @description Adds the button to the button group.
* @param {<a href="YAHOO.widget.Button.html">YAHOO.widget.Button</a>}
* p_oButton Object reference for the <a href="YAHOO.widget.Button.html">
* YAHOO.widget.Button</a> instance to be added to the button group.
* @param {String} p_oButton String specifying the id attribute of the
* <code><input></code> or <code><span></code> element
* to be used to create the button to be added to the button group.
* @param {<a href="http://www.w3.org/TR/2000/WD-DOM-Level-1-20000929/
* level-one-html.html#ID-6043025">HTMLInputElement</a>|<a href="
* http://www.w3.org/TR/2000/WD-DOM-Level-1-20000929/level-one-html.html#
* ID-33759296">HTMLElement</a>} p_oButton Object reference for the
* <code><input></code> or <code><span></code> element
* to be used to create the button to be added to the button group.
* @param {Object} p_oButton Object literal specifying a set of
* <a href="YAHOO.widget.Button.html">YAHOO.widget.Button</a>
* configuration attributes used to configure the button to be added to
* the button group.
* @return {<a href="YAHOO.widget.Button.html">YAHOO.widget.Button</a>}
*/
addButton: function (p_oButton) {
var oButton,
oButtonElement,
oGroupElement,
nIndex,
sButtonName,
sGroupName;
if (p_oButton instanceof Button &&
p_oButton.get("type") == "radio") {
oButton = p_oButton;
}
else if (!Lang.isString(p_oButton) && !p_oButton.nodeName) {
p_oButton.type = "radio";
oButton = new Button(p_oButton);
}
else {
oButton = new Button(p_oButton, { type: "radio" });
}
if (oButton) {
nIndex = this._buttons.length;
sButtonName = oButton.get("name");
sGroupName = this.get("name");
oButton.index = nIndex;
this._buttons[nIndex] = oButton;
m_oButtons[oButton.get("id")] = oButton;
if (sButtonName != sGroupName) {
oButton.set("name", sGroupName);
}
if (this.get("disabled")) {
oButton.set("disabled", true);
}
if (oButton.get("checked")) {
this.set("checkedButton", oButton);
}
oButtonElement = oButton.get("element");
oGroupElement = this.get("element");
if (oButtonElement.parentNode != oGroupElement) {
oGroupElement.appendChild(oButtonElement);
}
oButton.on("checkedChange",
this._onButtonCheckedChange, oButton, this);
this.logger.log("Button " + oButton.get("id") + " added.");
}
return oButton;
},
/**
* @method addButtons
* @description Adds the array of buttons to the button group.
* @param {Array} p_aButtons Array of <a href="YAHOO.widget.Button.html">
* YAHOO.widget.Button</a> instances to be added
* to the button group.
* @param {Array} p_aButtons Array of strings specifying the id
* attribute of the <code><input></code> or <code><span>
* </code> elements to be used to create the buttons to be added to the
* button group.
* @param {Array} p_aButtons Array of object references for the
* <code><input></code> or <code><span></code> elements
* to be used to create the buttons to be added to the button group.
* @param {Array} p_aButtons Array of object literals, each containing
* a set of <a href="YAHOO.widget.Button.html">YAHOO.widget.Button</a>
* configuration attributes used to configure each button to be added
* to the button group.
* @return {Array}
*/
addButtons: function (p_aButtons) {
var nButtons,
oButton,
aButtons,
i;
if (Lang.isArray(p_aButtons)) {
nButtons = p_aButtons.length;
aButtons = [];
if (nButtons > 0) {
for (i = 0; i < nButtons; i++) {
oButton = this.addButton(p_aButtons[i]);
if (oButton) {
aButtons[aButtons.length] = oButton;
}
}
}
}
return aButtons;
},
/**
* @method removeButton
* @description Removes the button at the specified index from the
* button group.
* @param {Number} p_nIndex Number specifying the index of the button
* to be removed from the button group.
*/
removeButton: function (p_nIndex) {
var oButton = this.getButton(p_nIndex),
nButtons,
i;
if (oButton) {
this.logger.log("Removing button " + oButton.get("id") + ".");
this._buttons.splice(p_nIndex, 1);
delete m_oButtons[oButton.get("id")];
oButton.removeListener("checkedChange",
this._onButtonCheckedChange);
oButton.destroy();
nButtons = this._buttons.length;
if (nButtons > 0) {
i = this._buttons.length - 1;
do {
this._buttons[i].index = i;
}
while (i--);
}
this.logger.log("Button " + oButton.get("id") + " removed.");
}
},
/**
* @method getButton
* @description Returns the button at the specified index.
* @param {Number} p_nIndex The index of the button to retrieve from the
* button group.
* @return {<a href="YAHOO.widget.Button.html">YAHOO.widget.Button</a>}
*/
getButton: function (p_nIndex) {
return this._buttons[p_nIndex];
},
/**
* @method getButtons
* @description Returns an array of the buttons in the button group.
* @return {Array}
*/
getButtons: function () {
return this._buttons;
},
/**
* @method getCount
* @description Returns the number of buttons in the button group.
* @return {Number}
*/
getCount: function () {
return this._buttons.length;
},
/**
* @method focus
* @description Sets focus to the button at the specified index.
* @param {Number} p_nIndex Number indicating the index of the button
* to focus.
*/
focus: function (p_nIndex) {
var oButton,
nButtons,
i;
if (Lang.isNumber(p_nIndex)) {
oButton = this._buttons[p_nIndex];
if (oButton) {
oButton.focus();
}
}
else {
nButtons = this.getCount();
for (i = 0; i < nButtons; i++) {
oButton = this._buttons[i];
if (!oButton.get("disabled")) {
oButton.focus();
break;
}
}
}
},
/**
* @method check
* @description Checks the button at the specified index.
* @param {Number} p_nIndex Number indicating the index of the button
* to check.
*/
check: function (p_nIndex) {
var oButton = this.getButton(p_nIndex);
if (oButton) {
oButton.set("checked", true);
}
},
/**
* @method destroy
* @description Removes the button group's element from its parent
* element and removes all event handlers.
*/
destroy: function () {
this.logger.log("Destroying...");
var nButtons = this._buttons.length,
oElement = this.get("element"),
oParentNode = oElement.parentNode,
i;
if (nButtons > 0) {
i = this._buttons.length - 1;
do {
this._buttons[i].destroy();
}
while (i--);
}
this.logger.log("Removing DOM event handlers.");
Event.purgeElement(oElement);
this.logger.log("Removing from document.");
oParentNode.removeChild(oElement);
},
/**
* @method toString
* @description Returns a string representing the button group.
* @return {String}
*/
toString: function () {
return ("ButtonGroup " + this.get("id"));
}
});
})();
YAHOO.register("button", YAHOO.widget.Button, {version: "2.8.2r1", build: "8"});
}, '2.8.2' ,{"requires": ["yui2-yahoo", "yui2-dom", "yui2-event", "yui2-skin-sam-button", "yui2-element"], "optional": ["yui2-containercore", "yui2-skin-sam-menu", "yui2-menu"]});<|fim▁end|> | |
<|file_name|>header-default-template.js<|end_file_name|><|fim▁begin|>//Needed components
import React from 'react';<|fim▁hole|>import HeaderContent from './header-content';
import HeaderActions from './header-actions';
/**
* Application header
*/
const AppHeader = () => {
return (
<HeaderScrolling>
<HeaderTopRow />
<HeaderContent />
<HeaderActions />
</HeaderScrolling>
);
}
AppHeader.displayName = 'AppHeader';
export default AppHeader;<|fim▁end|> | import HeaderScrolling from './header-scrolling';
import HeaderTopRow from './header-top-row'; |
<|file_name|>Webpage.java<|end_file_name|><|fim▁begin|>package com.wongsir.newsgathering.model.commons;
import com.google.common.base.MoreObjects;
import com.google.gson.annotations.SerializedName;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* @Description: TODO
* @author Wongsir
* @date 2017年1月4日
*
*/
public class Webpage {
/**
* 附件id列表
*/
public List<String> attachmentList;
/**
* 图片ID列表
*/
public List<String> imageList;
/**
* 正文
*/
private String content;
/**
* 标题
*/
private String title;
/**
* 链接
*/
private String url;
/**
* 域名
*/
private String domain;
/**
* 爬虫id,可以认为是taskid
*/
private String spiderUUID;
/**
* 模板id
*/
@SerializedName("spiderInfoId")
private String spiderInfoId;
/**
* 分类
*/
private String category;
/**
* 网页快照
*/
private String rawHTML;
/**
* 关键词
*/
private List<String> keywords;
/**
* 摘要
*/
private List<String> summary;
/**
* 抓取时间
*/
@SerializedName("gatherTime")
private Date gathertime;
/**
* 网页id,es自动分配的
*/
private String id;
/**
* 文章的发布时间
*/
private Date publishTime;
/**
* 命名实体
*/
private Map<String, Set<String>> namedEntity;
/**
* 动态字段
*/
private Map<String, Object> dynamicFields;
/**
* 静态字段
*/
private Map<String, Object> staticFields;
/**
* 本网页处理时长
*/
private long processTime;
public Map<String, Object> getStaticFields() {
return staticFields;
}
public Webpage setStaticFields(Map<String, Object> staticFields) {
this.staticFields = staticFields;
return this;
}
public Map<String, Set<String>> getNamedEntity() {
return namedEntity;
}
public Webpage setNamedEntity(Map<String, Set<String>> namedEntity) {
this.namedEntity = namedEntity;
return this;
}
public String getContent() {
return content;
}
public void setContent(String content) {
this.content = content;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getDomain() {
return domain;
}
public void setDomain(String domain) {
this.domain = domain;
}
public String getSpiderInfoId() {
return spiderInfoId;
}
public void setSpiderInfoId(String spiderInfoId) {
this.spiderInfoId = spiderInfoId;
}
public Date getGathertime() {
return gathertime;
}
public void setGathertime(Date gathertime) {
this.gathertime = gathertime;
}
public String getSpiderUUID() {
return spiderUUID;
}
public void setSpiderUUID(String spiderUUID) {
this.spiderUUID = spiderUUID;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public List<String> getKeywords() {
return keywords;
}
public Webpage setKeywords(List<String> keywords) {
this.keywords = keywords;
return this;
}
public List<String> getSummary() {
return summary;
}
public Webpage setSummary(List<String> summary) {
this.summary = summary;
return this;
}
public Date getPublishTime() {
return publishTime;
}
public Webpage setPublishTime(Date publishTime) {
this.publishTime = publishTime;
return this;
}
public String getCategory() {
return category;
}
public Webpage setCategory(String category) {
this.category = category;
return this;
}
public String getRawHTML() {
return rawHTML;
}
public Webpage setRawHTML(String rawHTML) {
this.rawHTML = rawHTML;
return this;
}
public Map<String, Object> getDynamicFields() {
return dynamicFields;
}
public Webpage setDynamicFields(Map<String, Object> dynamicFields) {
this.dynamicFields = dynamicFields;
return this;
}
public List<String> getAttachmentList() {
return attachmentList;
}
public Webpage setAttachmentList(List<String> attachmentList) {
this.attachmentList = attachmentList;
return this;
}
public List<String> getImageList() {
return imageList;
}
public Webpage setImageList(List<String> imageList) {
this.imageList = imageList;
return this;
}
public long getProcessTime() {
return processTime;
}
public Webpage setProcessTime(long processTime) {
this.processTime = processTime;
return this;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("content", content)
.add("title", title)
.add("url", url)
.add("domain", domain)
.add("spiderUUID", spiderUUID)
.add("spiderInfoId", spiderInfoId)
.add("category", category)
.add("rawHTML", rawHTML)
.add("keywords", keywords)
.add("summary", summary)
.add("gathertime", gathertime)
.add("id", id)
.add("publishTime", publishTime)
.add("namedEntity", namedEntity)
.add("dynamicFields", dynamicFields)
.add("staticFields", staticFields)
.add("attachmentList", attachmentList)
.add("imageList", imageList)
.toString();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Webpage webpage = (Webpage) o;
return new EqualsBuilder()
.append(getContent(), webpage.getContent())
.append(getTitle(), webpage.getTitle())
.append(getUrl(), webpage.getUrl())
.append(getDomain(), webpage.getDomain())
.append(getSpiderUUID(), webpage.getSpiderUUID())
.append(getSpiderInfoId(), webpage.getSpiderInfoId())
.append(getCategory(), webpage.getCategory())
.append(getRawHTML(), webpage.getRawHTML())
.append(getKeywords(), webpage.getKeywords())
.append(getSummary(), webpage.getSummary())
.append(getGathertime(), webpage.getGathertime())
.append(getId(), webpage.getId())
.append(getPublishTime(), webpage.getPublishTime())
.append(getNamedEntity(), webpage.getNamedEntity())
.append(getDynamicFields(), webpage.getDynamicFields())
<|fim▁hole|> .append(getImageList(), webpage.getImageList())
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.append(getContent())
.append(getTitle())
.append(getUrl())
.append(getDomain())
.append(getSpiderUUID())
.append(getSpiderInfoId())
.append(getCategory())
.append(getRawHTML())
.append(getKeywords())
.append(getSummary())
.append(getGathertime())
.append(getId())
.append(getPublishTime())
.append(getNamedEntity())
.append(getDynamicFields())
.append(getStaticFields())
.append(getAttachmentList())
.append(getImageList())
.toHashCode();
}
}<|fim▁end|> | .append(getStaticFields(), webpage.getStaticFields())
.append(getAttachmentList(), webpage.getAttachmentList())
|
<|file_name|>issue-14959.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(unboxed_closures)]
use std::ops::Fn;
trait Response {}
trait Request {}
trait Ingot<R, S> {<|fim▁hole|> fn enter(&mut self, _: &mut R, _: &mut S, a: &mut Alloy) -> Status;
}
#[allow(dead_code)]
struct HelloWorld;
struct SendFile<'a>;
struct Alloy;
enum Status {
Continue
}
impl Alloy {
fn find<T>(&self) -> Option<T> {
None
}
}
impl<'a, 'b> Fn<(&'b mut Response+'b,),()> for SendFile<'a> {
extern "rust-call" fn call(&self, (_res,): (&'b mut Response+'b,)) {}
}
impl<Rq: Request, Rs: Response> Ingot<Rq, Rs> for HelloWorld {
fn enter(&mut self, _req: &mut Rq, res: &mut Rs, alloy: &mut Alloy) -> Status {
let send_file = alloy.find::<SendFile>().unwrap();
send_file(res);
Status::Continue
}
}
fn main() {}<|fim▁end|> | |
<|file_name|>view.js<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2015 by Rafael Angel Aznar Aparici (rafaaznar at gmail dot com)
*
* sisane: The stunning micro-library that helps you to develop easily
* AJAX web applications by using Angular.js 1.x & sisane-server
* sisane is distributed under the MIT License (MIT)
* Sources at https://github.com/rafaelaznar/
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
'use strict';
moduloEstado.controller('EstadoViewController', ['$scope', '$routeParams', 'serverService', '$location', 'estadoService', '$uibModal',
function ($scope, $routeParams, serverService, $location, estadoService, $uibModal) {
$scope.fields = estadoService.getFields();
$scope.obtitle = estadoService.getObTitle();
$scope.icon = estadoService.getIcon();
$scope.ob = estadoService.getTitle();
$scope.title = "Vista de " + $scope.obtitle;
$scope.id = $routeParams.id;
$scope.status = null;
$scope.debugging = serverService.debugging();
serverService.promise_getOne($scope.ob, $scope.id).then(function (response) {
if (response.status == 200) {
if (response.data.status == 200) {
$scope.status = null;
$scope.bean = response.data.message;
} else {
$scope.status = "Error en la recepción de datos del servidor";
}<|fim▁hole|> $scope.status = "Error en la recepción de datos del servidor";
}
}).catch(function (data) {
$scope.status = "Error en la recepción de datos del servidor";
});
$scope.close = function () {
$location.path('/home');
};
$scope.plist = function () {
$location.path('/' + $scope.ob + '/plist');
};
$scope.back = function () {
window.history.back();
};
}]);<|fim▁end|> | } else { |
<|file_name|>tag.rs<|end_file_name|><|fim▁begin|>// Play List (M3U8 Format)
use std::default::Default;
use std::str::FromStr;
use std::string::ToString;
use std::collections::BTreeMap;
#[allow(non_camel_case_types)]
#[derive(Debug)]
pub enum Tag {
// Basic Tags
M3U,
VERSION(usize),
// Media Segment Tags
INF(f64, Option<String>, String), // duration, title(option), uri
BYTERANGE(usize, Option<usize>), // length, start
DISCONTINUITY,
// KEY(Option<String>, Option<String>, Option<String>),
KEY,
MAP,
PROGRAM_DATE_TIME,
DATERANGE,
// Media Playlist Tags
TARGETDURATION(f64), // number s is a decimal-integer indicating the target duration in seconds.
MEDIA_SEQUENCE(usize), // number is a decimal-integer
DISCONTINUITY_SEQUENCE,
ENDLIST,
PLAYLIST_TYPE(String), // <EVENT|VOD>
I_FRAMES_ONLY,
// Master Playlist Tags
MEDIA,
STREAM_INF(BTreeMap<String, String>, String), // Attrs, URI
I_FRAME_STREAM_INF,
SESSION_DATA,
SESSION_KEY,
// Media or Master Playlist Tags
INDEPENDENT_SEGMENTS,
START,
}
impl Default for Tag {
fn default() -> Tag {
Tag::M3U
}
}
impl ToString for Tag {
fn to_string(&self) -> String {
match *self {
Tag::M3U => "#EXTM3U".to_string(),
Tag::VERSION(ref version) => format!("#EXT-X-VERSION:{}", version),
Tag::INF(ref duration, ref title, ref uri) => match *title {
Some(ref title) => {
format!("#EXTINF:{},{}\n{}", duration, title, uri)
},
None => {
format!("#EXTINF:{},\n{}", duration, uri)
}
},
Tag::BYTERANGE(ref length, ref start) => {
if start.is_none(){
format!("#EXT-X-BYTERANGE:{}", length)
} else {
format!("#EXT-X-BYTERANGE:{}@{}", length, start.unwrap())
}
},
Tag::DISCONTINUITY => "#EXT-X-DISCONTINUITY".to_string(),
Tag::KEY => "#EXT-X-KEY".to_string(),
Tag::MAP => "#EXT-X-MAP".to_string(),
Tag::PROGRAM_DATE_TIME => "#EXT-X-PROGRAM-DATE-TIME".to_string(),
Tag::DATERANGE => "#EXT-X-DATERANGE".to_string(),
Tag::TARGETDURATION(ref duration) => format!("#EXT-X-TARGETDURATION:{}", duration),
Tag::MEDIA_SEQUENCE(ref seq) => format!("#EXT-X-MEDIA-SEQUENCE:{}", seq),
Tag::DISCONTINUITY_SEQUENCE => "#EXT-X-DISCONTINUITY-SEQUENCE".to_string(),
Tag::ENDLIST => "#EXT-X-ENDLIST".to_string(),
Tag::PLAYLIST_TYPE(ref t) => format!("#EXT-X-PLAYLIST-TYPE:{}", t),
Tag::I_FRAMES_ONLY => "#EXT-X-I-FRAMES-ONLY".to_string(),
Tag::MEDIA => "#EXT-X-MEDIA".to_string(),
Tag::STREAM_INF(ref attrs, ref uri) => {
let mut _attrs: Vec<String> = Vec::new();
for (key, value) in attrs {
_attrs.push(format!("{}={}", key, value))
}
format!("#EXT-X-STREAM-INF:{}\n{}", _attrs.join(","), uri)
},
Tag::I_FRAME_STREAM_INF => "#EXT-X-I-FRAME-STREAM-INF".to_string(),
Tag::SESSION_DATA => "#EXT-X-SESSION-DATA".to_string(),
Tag::SESSION_KEY => "#EXT-X-SESSION-KEY".to_string(),
Tag::INDEPENDENT_SEGMENTS => "#EXT-X-INDEPENDENT-SEGMENTS".to_string(),
Tag::START => "#EXT-X-START".to_string(),
}
}
}
impl FromStr for Tag {
type Err = ();
fn from_str(s: &str) -> Result<Tag, ()> {
let s = s.trim();
if s.starts_with("EXT") == false {
return Err(());
}
if s.starts_with("EXTM3U") {
Ok(Tag::M3U)
} else if s.starts_with("EXT-X-VERSION") {
let kv: Vec<&str> = s.split(":").collect();
if kv.len() != 2 {
Err(())
} else {
match usize::from_str(kv[1]){
Ok(version) => Ok(Tag::VERSION(version)),
Err(_) => Err(())
}
}
} else if s.starts_with("EXTINF") {
let kv: Vec<&str> = s.split(|c|{c==':'||c==','||c=='\n'}).collect();
if kv.len() >= 3 {
let duration: f64 = match f64::from_str(kv[1]){
Ok(duration) => duration,
Err(_) => return Err(())
};
if kv.len() == 3 {
let title: Option<String> = None;
let uri: String = kv[2].to_string();
Ok(Tag::INF(duration, title, uri) )
} else if kv.len() == 4 {<|fim▁hole|> } else {
Err(())
}
} else {
Err(())
}
} else if s.starts_with("EXT-X-BYTERANGE") {
// #EXT-X-BYTERANGE:69864
// #EXT-X-BYTERANGE:82112@752321
// #EXT-X-BYTERANGE:75232@0
let kv: Vec<&str> = s.split(|c|{c==':'||c=='@'}).collect();
if kv.len() >= 2 {
let bytes_length: usize = match usize::from_str(kv[1]){
Ok(bytes_length) => bytes_length,
Err(_) => return Err(())
};
if kv.len() == 2 {
let start: Option<usize> = None;
Ok(Tag::BYTERANGE(bytes_length, start) )
} else if kv.len() == 3 {
let start: usize = match usize::from_str(kv[2]){
Ok(start) => start,
Err(_) => return Err(())
};
Ok(Tag::BYTERANGE(bytes_length, Some(start)) )
} else {
Err(())
}
} else {
Err(())
}
} else if s.starts_with("EXT-X-DISCONTINUITY") {
Ok(Tag::DISCONTINUITY)
} else if s.starts_with("EXT-X-KEY") {
Ok(Tag::KEY)
} else if s.starts_with("EXT-X-MAP") {
Ok(Tag::MAP)
} else if s.starts_with("EXT-X-PROGRAM-DATE-TIME"){
Ok(Tag::PROGRAM_DATE_TIME)
} else if s.starts_with("EXT-X-DATERANGE") {
Ok(Tag::DATERANGE)
} else if s.starts_with("EXT-X-TARGETDURATION") {
// EXT-X-TARGETDURATION:12
let kv: Vec<&str> = s.split(':').collect();
if kv.len() == 2 {
let duration: f64 = match f64::from_str(kv[1]){
Ok(duration) => duration,
Err(_) => return Err(())
};
Ok(Tag::TARGETDURATION(duration))
} else {
Err(())
}
} else if s.starts_with("EXT-X-MEDIA-SEQUENCE") {
// "EXT-X-MEDIA-SEQUENCE:1"
let kv: Vec<&str> = s.split(':').collect();
if kv.len() == 2 {
let seq: usize = match usize::from_str(kv[1]){
Ok(seq) => seq,
Err(_) => return Err(())
};
Ok(Tag::MEDIA_SEQUENCE(seq))
} else {
Err(())
}
} else if s.starts_with("EXT-X-DISCONTINUITY-SEQUENCE") {
Ok(Tag::DISCONTINUITY_SEQUENCE)
} else if s.starts_with("EXT-X-ENDLIST") {
Ok(Tag::ENDLIST)
} else if s.starts_with("EXT-X-PLAYLIST-TYPE") {
let kv: Vec<&str> = s.split(':').collect();
if kv.len() == 2 {
let play_list_type: String = match kv[1] {
"EVENT" => "EVENT".to_string(),
"VOD" => "VOD".to_string(),
_ => return Err(())
};
Ok(Tag::PLAYLIST_TYPE(play_list_type))
} else {
Err(())
}
} else if s.starts_with("EXT-X-I-FRAMES-ONLY") {
Ok(Tag::I_FRAMES_ONLY)
} else if s.starts_with("EXT-X-MEDIA"){
Ok(Tag::MEDIA)
} else if s.starts_with("EXT-X-STREAM-INF") {
// #EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=300000\nchunklist-b300000.m3u8
// attrs:
// Required:
// BANDWIDTH, CODECS,
// OPTIONAL:
// RESOLUTION(recommended), FRAME-RATE(recommended),
// AVERAGE-BANDWIDTH, AUDIO, VIDEO, SUBTITLES, CLOSED-CAPTIONS,
let mut kv: Vec<&str> = s.split(|c|{c==':'||c==','||c=='\n'}).collect();
let uri_index = kv.len()-1;
let uri = kv[uri_index].to_string();
kv.remove(uri_index);
kv.remove(0);
let mut attrs: BTreeMap<String, String> = BTreeMap::new();
for _attr in kv.iter(){
let attr: Vec<&str> = _attr.split('=').collect();
if attr.len() != 2 {
return Err(());
}
let key: &str = attr[0];
let value: &str = attr[1];
attrs.insert(key.to_string(), value.to_string());
}
Ok(Tag::STREAM_INF(attrs, uri))
} else if s.starts_with("EXT-X-I-FRAME-STREAM-INF") {
Ok(Tag::I_FRAME_STREAM_INF)
} else if s.starts_with("EXT-X-SESSION-DATA") {
Ok(Tag::SESSION_DATA)
} else if s.starts_with("EXT-X-SESSION-KEY") {
Ok(Tag::SESSION_KEY)
} else if s.starts_with("EXT-X-INDEPENDENT-SEGMENTS") {
Ok(Tag::INDEPENDENT_SEGMENTS)
} else if s.starts_with("EXT-X-START") {
Ok(Tag::START)
} else {
Err(())
}
}
}
impl Tag {
}<|fim▁end|> | let title: Option<String> = Some(kv[2].to_string());
let uri: String = kv[3].to_string();
Ok(Tag::INF(duration, title, uri) ) |
<|file_name|>xf86vidmode.rs<|end_file_name|><|fim▁begin|>/*
* This file generated automatically from xf86vidmode.xml by r_client.py.
* Edit at your peril.
*/
//Make the compiler quiet
#![allow(unused_imports)]
#![allow(non_camel_case_types)]
use std;
use libc::*;
use ffi;
pub static XF86VIDMODE_MAJOR_VERSION : c_uint = 2;
pub static XF86VIDMODE_MINOR_VERSION : c_uint = 2;
pub type syncrange = u32;
/**
* @brief syncrange_iterator
**/
pub struct syncrange_iterator {
pub data : *mut syncrange,
pub rem : c_int,
pub index: c_int
}
pub type dotclock = u32;
/**
* @brief dotclock_iterator
**/
pub struct dotclock_iterator {
pub data : *mut dotclock,
pub rem : c_int,
pub index: c_int
}
pub struct mode_info {
pub dotclock : dotclock,
pub hdisplay : u16,
pub hsyncstart : u16,
pub hsyncend : u16,
pub htotal : u16,
pub hskew : u32,
pub vdisplay : u16,
pub vsyncstart : u16,
pub vsyncend : u16,
pub vtotal : u16,
pub pad0 : [u8,..4],
pub flags : u32,
pub pad1 : [u8,..12],
pub privsize : u32
}
/**
* @brief mode_info_iterator
**/
pub struct mode_info_iterator {
pub data : *mut mode_info,
pub rem : c_int,
pub index: c_int
}
pub struct query_version_cookie {
sequence : c_uint
}
pub struct query_version_request {
pub major_opcode : u8,
pub minor_opcode : u8,
pub length : u16
}
pub struct query_version_reply {
pub response_type : u8,
pub pad0 : u8,
pub sequence : u16,
pub length : u32,
pub major_version : u16,
pub minor_version : u16
}
pub struct get_mode_line_cookie {
sequence : c_uint
}
pub struct get_mode_line_request {
pub major_opcode : u8,
pub minor_opcode : u8,
pub length : u16,
pub screen : u16,
pub pad0 : [u8,..2]
}
pub struct get_mode_line_reply {
pub response_type : u8,
pub pad0 : u8,
pub sequence : u16,
pub length : u32,
pub dotclock : dotclock,
pub hdisplay : u16,
pub hsyncstart : u16,
pub hsyncend : u16,
pub htotal : u16,
pub hskew : u16,
pub vdisplay : u16,
pub vsyncstart : u16,
pub vsyncend : u16,
pub vtotal : u16,
pub pad1 : [u8,..2],
pub flags : u32,
pub pad2 : [u8,..12],
pub privsize : u32
}
pub struct mod_mode_line_request {
pub major_opcode : u8,
pub minor_opcode : u8,
pub length : u16,
pub screen : u32,
pub hdisplay : u16,
pub hsyncstart : u16,
pub hsyncend : u16,
pub htotal : u16,
pub hskew : u16,
pub vdisplay : u16,
pub vsyncstart : u16,
pub vsyncend : u16,
pub vtotal : u16,
pub pad0 : [u8,..2],
pub flags : u32,
pub pad1 : [u8,..12],
pub privsize : u32
}
pub struct switch_mode_request {
pub major_opcode : u8,
pub minor_opcode : u8,
pub length : u16,
pub screen : u16,
pub zoom : u16
}
pub struct get_monitor_cookie {
sequence : c_uint
}
pub struct get_monitor_request {
pub major_opcode : u8,
pub minor_opcode : u8,
pub length : u16,
pub screen : u16,
pub pad0 : [u8,..2]
}
pub struct get_monitor_reply {
pub response_type : u8,
pub pad0 : u8,
pub sequence : u16,
pub length : u32,
pub vendor_length : u8,
pub model_length : u8,
pub num_hsync : u8,
pub num_vsync : u8,
pub pad1 : [u8,..20]
}
pub struct lock_mode_switch_request {
pub major_opcode : u8,
pub minor_opcode : u8,
pub length : u16,
pub screen : u16,
pub lock : u16
}
pub struct get_all_mode_lines_cookie {
sequence : c_uint
}
pub struct get_all_mode_lines_request {
pub major_opcode : u8,
pub minor_opcode : u8,
pub length : u16,
pub screen : u16,
pub pad0 : [u8,..2]
}
pub struct get_all_mode_lines_reply {
pub response_type : u8,
pub pad0 : u8,
pub sequence : u16,
pub length : u32,
pub modecount : u32,
pub pad1 : [u8,..20]
}
pub struct add_mode_line_request {
pub major_opcode : u8,
pub minor_opcode : u8,
pub length : u16,
pub screen : u32,
pub dotclock : dotclock,
pub hdisplay : u16,
pub hsyncstart : u16,
pub hsyncend : u16,
pub htotal : u16,
pub hskew : u16,
pub vdisplay : u16,
pub vsyncstart : u16,
pub vsyncend : u16,
pub vtotal : u16,
pub pad0 : [u8,..2],
pub flags : u32,
pub pad1 : [u8,..12],
pub privsize : u32,
pub after_dotclock : dotclock,
pub after_hdisplay : u16,
pub after_hsyncstart : u16,
pub after_hsyncend : u16,
pub after_htotal : u16,
pub after_hskew : u16,
pub after_vdisplay : u16,
pub after_vsyncstart : u16,
pub after_vsyncend : u16,
pub after_vtotal : u16,
pub pad2 : [u8,..2],
pub after_flags : u32,
pub pad3 : [u8,..12]
}
pub struct delete_mode_line_request {
pub major_opcode : u8,
pub minor_opcode : u8,
pub length : u16,
pub screen : u32,
pub dotclock : dotclock,
pub hdisplay : u16,
pub hsyncstart : u16,
pub hsyncend : u16,
pub htotal : u16,
pub hskew : u16,
pub vdisplay : u16,
pub vsyncstart : u16,
pub vsyncend : u16,
pub vtotal : u16,
pub pad0 : [u8,..2],
pub flags : u32,
pub pad1 : [u8,..12],
pub privsize : u32
}
pub struct validate_mode_line_cookie {
sequence : c_uint
}
pub struct validate_mode_line_request {
pub major_opcode : u8,
pub minor_opcode : u8,
pub length : u16,
pub screen : u32,
pub dotclock : dotclock,
pub hdisplay : u16,
pub hsyncstart : u16,
pub hsyncend : u16,
pub htotal : u16,
pub hskew : u16,
pub vdisplay : u16,
pub vsyncstart : u16,
pub vsyncend : u16,
pub vtotal : u16,
pub pad0 : [u8,..2],
pub flags : u32,
pub pad1 : [u8,..12],
pub privsize : u32
}
pub struct validate_mode_line_reply {
pub response_type : u8,
pub pad0 : u8,
pub sequence : u16,
pub length : u32,
pub status : u32,
pub pad1 : [u8,..20]
}
pub struct switch_to_mode_request {
pub major_opcode : u8,
pub minor_opcode : u8,
pub length : u16,
pub screen : u32,
pub dotclock : dotclock,
pub hdisplay : u16,
pub hsyncstart : u16,
pub hsyncend : u16,
pub htotal : u16,
pub hskew : u16,
pub vdisplay : u16,
pub vsyncstart : u16,
pub vsyncend : u16,
pub vtotal : u16,
pub pad0 : [u8,..2],
pub flags : u32,
pub pad1 : [u8,..12],
pub privsize : u32
}
pub struct get_view_port_cookie {
sequence : c_uint
}
pub struct get_view_port_request {
pub major_opcode : u8,
pub minor_opcode : u8,
pub length : u16,
pub screen : u16,
pub pad0 : [u8,..2]
}
pub struct get_view_port_reply {
pub response_type : u8,
pub pad0 : u8,
pub sequence : u16,
pub length : u32,
pub x : u32,
pub y : u32,
pub pad1 : [u8,..16]
}
pub struct set_view_port_request {
pub major_opcode : u8,
pub minor_opcode : u8,
pub length : u16,
pub screen : u16,
pub pad0 : [u8,..2],
pub x : u32,
pub y : u32
}
pub struct get_dot_clocks_cookie {
sequence : c_uint
}
pub struct get_dot_clocks_request {
pub major_opcode : u8,
pub minor_opcode : u8,
pub length : u16,
pub screen : u16,
pub pad0 : [u8,..2]
}
pub struct get_dot_clocks_reply {
pub response_type : u8,
pub pad0 : u8,
pub sequence : u16,
pub length : u32,
pub flags : u32,
pub clocks : u32,
pub maxclocks : u32,
pub pad1 : [u8,..12]
}
pub struct set_client_version_request {
pub major_opcode : u8,
pub minor_opcode : u8,
pub length : u16,
pub major : u16,
pub minor : u16
}
pub struct set_gamma_request {
pub major_opcode : u8,
pub minor_opcode : u8,
pub length : u16,
pub screen : u16,
pub pad0 : [u8,..2],
pub red : u32,
pub green : u32,
pub blue : u32,
pub pad1 : [u8,..12]
}
pub struct get_gamma_cookie {
sequence : c_uint
}
pub struct get_gamma_request {
pub major_opcode : u8,
pub minor_opcode : u8,
pub length : u16,
pub screen : u16,
pub pad0 : [u8,..26]
}
pub struct get_gamma_reply {
pub response_type : u8,
pub pad0 : u8,
pub sequence : u16,
pub length : u32,
pub red : u32,
pub green : u32,
pub blue : u32,
pub pad1 : [u8,..12]
}
pub struct get_gamma_ramp_cookie {
sequence : c_uint
}
pub struct get_gamma_ramp_request {
pub major_opcode : u8,
pub minor_opcode : u8,
pub length : u16,
pub screen : u16,
pub size : u16
}
pub struct get_gamma_ramp_reply {
pub response_type : u8,
pub pad0 : u8,
pub sequence : u16,
pub length : u32,
pub size : u16,
pub pad1 : [u8,..22]
}
pub struct set_gamma_ramp_request {
pub major_opcode : u8,
pub minor_opcode : u8,
pub length : u16,
pub screen : u16,
pub size : u16
}
pub struct get_gamma_ramp_size_cookie {
sequence : c_uint
}
pub struct get_gamma_ramp_size_request {
pub major_opcode : u8,
pub minor_opcode : u8,
pub length : u16,
pub screen : u16,
pub pad0 : [u8,..2]
}
pub struct get_gamma_ramp_size_reply {
pub response_type : u8,
pub pad0 : u8,
pub sequence : u16,
pub length : u32,
pub size : u16,
pub pad1 : [u8,..22]
}
pub struct get_permissions_cookie {
sequence : c_uint
}
pub struct get_permissions_request {
pub major_opcode : u8,
pub minor_opcode : u8,
pub length : u16,
pub screen : u16,
pub pad0 : [u8,..2]
}
pub struct get_permissions_reply {
pub response_type : u8,
pub pad0 : u8,
pub sequence : u16,
pub length : u32,
pub permissions : u32,
pub pad1 : [u8,..20]
}
pub struct bad_clock_error {
pub response_type : u8,
pub error_code : u8,
pub sequence : u16
}
pub struct bad_h_timings_error {
pub response_type : u8,
pub error_code : u8,
pub sequence : u16
}
pub struct bad_v_timings_error {
pub response_type : u8,
pub error_code : u8,
pub sequence : u16
}
pub struct mode_unsuitable_error {
pub response_type : u8,
pub error_code : u8,
pub sequence : u16
}
pub struct extension_disabled_error {
pub response_type : u8,
pub error_code : u8,
pub sequence : u16
}
pub struct client_not_local_error {
pub response_type : u8,
pub error_code : u8,
pub sequence : u16
}
pub struct zoom_locked_error {
pub response_type : u8,
pub error_code : u8,
pub sequence : u16
}
extern "C" {
/**
* Get the next element of the iterator
* @param i Pointer to a syncrange_iterator
*
* Get the next element in the iterator. The member rem is
* decreased by one. The member data points to the next
* element. The member index is increased by sizeof(syncrange)
*
*
*/
pub fn xcb_xf86vidmode_syncrange_next (i:*mut syncrange_iterator) -> c_void;
/**
* Return the iterator pointing to the last element
* @param i An syncrange_iterator
* @return The iterator pointing to the last element
*
* Set the current element in the iterator to the last element.
* The member rem is set to 0. The member data points to the
* last element.
*/
pub fn xcb_xf86vidmode_syncrange_end (i:syncrange_iterator) -> ffi::base::generic_iterator;
/**
* Get the next element of the iterator
* @param i Pointer to a dotclock_iterator
*
* Get the next element in the iterator. The member rem is
* decreased by one. The member data points to the next
* element. The member index is increased by sizeof(dotclock)
*
*
*/
pub fn xcb_xf86vidmode_dotclock_next (i:*mut dotclock_iterator) -> c_void;
/**
* Return the iterator pointing to the last element
* @param i An dotclock_iterator
* @return The iterator pointing to the last element
*
* Set the current element in the iterator to the last element.
* The member rem is set to 0. The member data points to the
* last element.
*/
pub fn xcb_xf86vidmode_dotclock_end (i:dotclock_iterator) -> ffi::base::generic_iterator;
/**
* Get the next element of the iterator
* @param i Pointer to a mode_info_iterator
*
* Get the next element in the iterator. The member rem is
* decreased by one. The member data points to the next
* element. The member index is increased by sizeof(mode_info)
*
*
*/
pub fn xcb_xf86vidmode_mode_info_next (i:*mut mode_info_iterator) -> c_void;
/**
* Return the iterator pointing to the last element
* @param i An mode_info_iterator
* @return The iterator pointing to the last element
*
* Set the current element in the iterator to the last element.
* The member rem is set to 0. The member data points to the
* last element.
*/
pub fn xcb_xf86vidmode_mode_info_end (i:mode_info_iterator) -> ffi::base::generic_iterator;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
*/
pub fn xcb_xf86vidmode_query_version (c : *mut ffi::base::connection) -> query_version_cookie;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
* This form can be used only if the request will cause
* a reply to be generated. Any returned error will be
* placed in the event queue.
*/
pub fn xcb_xf86vidmode_query_version_unchecked (c : *mut ffi::base::connection) -> query_version_cookie;
/**
* Return the reply
* @param c The connection
* @param cookie The cookie
* @param e The generic_error supplied
*
* Returns the reply of the request asked by
*
* The parameter @p e supplied to this function must be NULL if
* xcb_xf86vidmode_query_version_unchecked(). is used.
* Otherwise, it stores the error if any.
*
* The returned value must be freed by the caller using free().
*/
pub fn xcb_xf86vidmode_query_version_reply (c : *mut ffi::base::connection,
cookie : query_version_cookie,
e : *mut *mut ffi::base::generic_error) -> *mut query_version_reply;
pub fn xcb_xf86vidmode_get_mode_line_sizeof (_buffer : *mut c_void) -> c_int;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
*/
pub fn xcb_xf86vidmode_get_mode_line (c : *mut ffi::base::connection,
screen : u16) -> get_mode_line_cookie;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
* This form can be used only if the request will cause
* a reply to be generated. Any returned error will be
* placed in the event queue.
*/
pub fn xcb_xf86vidmode_get_mode_line_unchecked (c : *mut ffi::base::connection,
screen : u16) -> get_mode_line_cookie;
pub fn xcb_xf86vidmode_get_mode_line_private (R : *mut get_mode_line_reply) -> *mut u8;
pub fn xcb_xf86vidmode_get_mode_line_private_length (R : *mut get_mode_line_reply) -> c_int;
pub fn xcb_xf86vidmode_get_mode_line_private_end (R : *mut get_mode_line_reply) -> ffi::base::generic_iterator;
/**
* Return the reply
* @param c The connection
* @param cookie The cookie
* @param e The generic_error supplied
*
* Returns the reply of the request asked by
*
* The parameter @p e supplied to this function must be NULL if
* xcb_xf86vidmode_get_mode_line_unchecked(). is used.
* Otherwise, it stores the error if any.
*
* The returned value must be freed by the caller using free().
*/
pub fn xcb_xf86vidmode_get_mode_line_reply (c : *mut ffi::base::connection,
cookie : get_mode_line_cookie,
e : *mut *mut ffi::base::generic_error) -> *mut get_mode_line_reply;
pub fn xcb_xf86vidmode_mod_mode_line_sizeof (_buffer : *mut c_void) -> c_int;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
* This form can be used only if the request will not cause
* a reply to be generated. Any returned error will be
* saved for handling by xcb_request_check().
*/
pub fn xcb_xf86vidmode_mod_mode_line_checked (c : *mut ffi::base::connection,
screen : u32,
hdisplay : u16,
hsyncstart : u16,
hsyncend : u16,
htotal : u16,
hskew : u16,
vdisplay : u16,
vsyncstart : u16,
vsyncend : u16,
vtotal : u16,
flags : u32,
privsize : u32,
private : *mut u8) -> ffi::base::void_cookie;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
*/
pub fn xcb_xf86vidmode_mod_mode_line (c : *mut ffi::base::connection,
screen : u32,
hdisplay : u16,
hsyncstart : u16,
hsyncend : u16,
htotal : u16,
hskew : u16,
vdisplay : u16,
vsyncstart : u16,
vsyncend : u16,
vtotal : u16,
flags : u32,
privsize : u32,
private : *mut u8) -> ffi::base::void_cookie;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
* This form can be used only if the request will not cause
* a reply to be generated. Any returned error will be
* saved for handling by xcb_request_check().
*/
pub fn xcb_xf86vidmode_switch_mode_checked (c : *mut ffi::base::connection,
screen : u16,
zoom : u16) -> ffi::base::void_cookie;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
*/
pub fn xcb_xf86vidmode_switch_mode (c : *mut ffi::base::connection,
screen : u16,
zoom : u16) -> ffi::base::void_cookie;
pub fn xcb_xf86vidmode_get_monitor_sizeof (_buffer : *mut c_void) -> c_int;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
*/
pub fn xcb_xf86vidmode_get_monitor (c : *mut ffi::base::connection,
screen : u16) -> get_monitor_cookie;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
* This form can be used only if the request will cause
* a reply to be generated. Any returned error will be
* placed in the event queue.
*/
pub fn xcb_xf86vidmode_get_monitor_unchecked (c : *mut ffi::base::connection,
screen : u16) -> get_monitor_cookie;
pub fn xcb_xf86vidmode_get_monitor_hsync (R : *mut get_monitor_reply) -> *mut syncrange;
pub fn xcb_xf86vidmode_get_monitor_hsync_length (R : *mut get_monitor_reply) -> c_int;
<|fim▁hole|>pub fn xcb_xf86vidmode_get_monitor_vsync (R : *mut get_monitor_reply) -> *mut syncrange;
pub fn xcb_xf86vidmode_get_monitor_vsync_length (R : *mut get_monitor_reply) -> c_int;
pub fn xcb_xf86vidmode_get_monitor_vsync_end (R : *mut get_monitor_reply) -> ffi::base::generic_iterator;
pub fn xcb_xf86vidmode_get_monitor_vendor (R : *mut get_monitor_reply) -> *mut c_char;
pub fn xcb_xf86vidmode_get_monitor_vendor_length (R : *mut get_monitor_reply) -> c_int;
pub fn xcb_xf86vidmode_get_monitor_vendor_end (R : *mut get_monitor_reply) -> ffi::base::generic_iterator;
pub fn xcb_xf86vidmode_get_monitor_alignment_pad (R : *mut get_monitor_reply) -> *mut c_void;
pub fn xcb_xf86vidmode_get_monitor_alignment_pad_length (R : *mut get_monitor_reply) -> c_int;
pub fn xcb_xf86vidmode_get_monitor_alignment_pad_end (R : *mut get_monitor_reply) -> ffi::base::generic_iterator;
pub fn xcb_xf86vidmode_get_monitor_model (R : *mut get_monitor_reply) -> *mut c_char;
pub fn xcb_xf86vidmode_get_monitor_model_length (R : *mut get_monitor_reply) -> c_int;
pub fn xcb_xf86vidmode_get_monitor_model_end (R : *mut get_monitor_reply) -> ffi::base::generic_iterator;
/**
* Return the reply
* @param c The connection
* @param cookie The cookie
* @param e The generic_error supplied
*
* Returns the reply of the request asked by
*
* The parameter @p e supplied to this function must be NULL if
* xcb_xf86vidmode_get_monitor_unchecked(). is used.
* Otherwise, it stores the error if any.
*
* The returned value must be freed by the caller using free().
*/
pub fn xcb_xf86vidmode_get_monitor_reply (c : *mut ffi::base::connection,
cookie : get_monitor_cookie,
e : *mut *mut ffi::base::generic_error) -> *mut get_monitor_reply;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
* This form can be used only if the request will not cause
* a reply to be generated. Any returned error will be
* saved for handling by xcb_request_check().
*/
pub fn xcb_xf86vidmode_lock_mode_switch_checked (c : *mut ffi::base::connection,
screen : u16,
lock : u16) -> ffi::base::void_cookie;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
*/
pub fn xcb_xf86vidmode_lock_mode_switch (c : *mut ffi::base::connection,
screen : u16,
lock : u16) -> ffi::base::void_cookie;
pub fn xcb_xf86vidmode_get_all_mode_lines_sizeof (_buffer : *mut c_void) -> c_int;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
*/
pub fn xcb_xf86vidmode_get_all_mode_lines (c : *mut ffi::base::connection,
screen : u16) -> get_all_mode_lines_cookie;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
* This form can be used only if the request will cause
* a reply to be generated. Any returned error will be
* placed in the event queue.
*/
pub fn xcb_xf86vidmode_get_all_mode_lines_unchecked (c : *mut ffi::base::connection,
screen : u16) -> get_all_mode_lines_cookie;
pub fn xcb_xf86vidmode_get_all_mode_lines_modeinfo (R : *mut get_all_mode_lines_reply) -> *mut mode_info;
pub fn xcb_xf86vidmode_get_all_mode_lines_modeinfo_length (R : *mut get_all_mode_lines_reply) -> c_int;
pub fn xcb_xf86vidmode_get_all_mode_lines_modeinfo_iterator (R : *mut get_all_mode_lines_reply) -> mode_info_iterator;
/**
* Return the reply
* @param c The connection
* @param cookie The cookie
* @param e The generic_error supplied
*
* Returns the reply of the request asked by
*
* The parameter @p e supplied to this function must be NULL if
* xcb_xf86vidmode_get_all_mode_lines_unchecked(). is used.
* Otherwise, it stores the error if any.
*
* The returned value must be freed by the caller using free().
*/
pub fn xcb_xf86vidmode_get_all_mode_lines_reply (c : *mut ffi::base::connection,
cookie : get_all_mode_lines_cookie,
e : *mut *mut ffi::base::generic_error) -> *mut get_all_mode_lines_reply;
pub fn xcb_xf86vidmode_add_mode_line_sizeof (_buffer : *mut c_void) -> c_int;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
* This form can be used only if the request will not cause
* a reply to be generated. Any returned error will be
* saved for handling by xcb_request_check().
*/
pub fn xcb_xf86vidmode_add_mode_line_checked (c : *mut ffi::base::connection,
screen : u32,
dotclock : dotclock,
hdisplay : u16,
hsyncstart : u16,
hsyncend : u16,
htotal : u16,
hskew : u16,
vdisplay : u16,
vsyncstart : u16,
vsyncend : u16,
vtotal : u16,
flags : u32,
privsize : u32,
after_dotclock : dotclock,
after_hdisplay : u16,
after_hsyncstart : u16,
after_hsyncend : u16,
after_htotal : u16,
after_hskew : u16,
after_vdisplay : u16,
after_vsyncstart : u16,
after_vsyncend : u16,
after_vtotal : u16,
after_flags : u32,
private : *mut u8) -> ffi::base::void_cookie;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
*/
pub fn xcb_xf86vidmode_add_mode_line (c : *mut ffi::base::connection,
screen : u32,
dotclock : dotclock,
hdisplay : u16,
hsyncstart : u16,
hsyncend : u16,
htotal : u16,
hskew : u16,
vdisplay : u16,
vsyncstart : u16,
vsyncend : u16,
vtotal : u16,
flags : u32,
privsize : u32,
after_dotclock : dotclock,
after_hdisplay : u16,
after_hsyncstart : u16,
after_hsyncend : u16,
after_htotal : u16,
after_hskew : u16,
after_vdisplay : u16,
after_vsyncstart : u16,
after_vsyncend : u16,
after_vtotal : u16,
after_flags : u32,
private : *mut u8) -> ffi::base::void_cookie;
pub fn xcb_xf86vidmode_delete_mode_line_sizeof (_buffer : *mut c_void) -> c_int;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
* This form can be used only if the request will not cause
* a reply to be generated. Any returned error will be
* saved for handling by xcb_request_check().
*/
pub fn xcb_xf86vidmode_delete_mode_line_checked (c : *mut ffi::base::connection,
screen : u32,
dotclock : dotclock,
hdisplay : u16,
hsyncstart : u16,
hsyncend : u16,
htotal : u16,
hskew : u16,
vdisplay : u16,
vsyncstart : u16,
vsyncend : u16,
vtotal : u16,
flags : u32,
privsize : u32,
private : *mut u8) -> ffi::base::void_cookie;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
*/
pub fn xcb_xf86vidmode_delete_mode_line (c : *mut ffi::base::connection,
screen : u32,
dotclock : dotclock,
hdisplay : u16,
hsyncstart : u16,
hsyncend : u16,
htotal : u16,
hskew : u16,
vdisplay : u16,
vsyncstart : u16,
vsyncend : u16,
vtotal : u16,
flags : u32,
privsize : u32,
private : *mut u8) -> ffi::base::void_cookie;
pub fn xcb_xf86vidmode_validate_mode_line_sizeof (_buffer : *mut c_void) -> c_int;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
*/
pub fn xcb_xf86vidmode_validate_mode_line (c : *mut ffi::base::connection,
screen : u32,
dotclock : dotclock,
hdisplay : u16,
hsyncstart : u16,
hsyncend : u16,
htotal : u16,
hskew : u16,
vdisplay : u16,
vsyncstart : u16,
vsyncend : u16,
vtotal : u16,
flags : u32,
privsize : u32,
private : *mut u8) -> validate_mode_line_cookie;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
* This form can be used only if the request will cause
* a reply to be generated. Any returned error will be
* placed in the event queue.
*/
pub fn xcb_xf86vidmode_validate_mode_line_unchecked (c : *mut ffi::base::connection,
screen : u32,
dotclock : dotclock,
hdisplay : u16,
hsyncstart : u16,
hsyncend : u16,
htotal : u16,
hskew : u16,
vdisplay : u16,
vsyncstart : u16,
vsyncend : u16,
vtotal : u16,
flags : u32,
privsize : u32,
private : *mut u8) -> validate_mode_line_cookie;
/**
* Return the reply
* @param c The connection
* @param cookie The cookie
* @param e The generic_error supplied
*
* Returns the reply of the request asked by
*
* The parameter @p e supplied to this function must be NULL if
* xcb_xf86vidmode_validate_mode_line_unchecked(). is used.
* Otherwise, it stores the error if any.
*
* The returned value must be freed by the caller using free().
*/
pub fn xcb_xf86vidmode_validate_mode_line_reply (c : *mut ffi::base::connection,
cookie : validate_mode_line_cookie,
e : *mut *mut ffi::base::generic_error) -> *mut validate_mode_line_reply;
pub fn xcb_xf86vidmode_switch_to_mode_sizeof (_buffer : *mut c_void) -> c_int;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
* This form can be used only if the request will not cause
* a reply to be generated. Any returned error will be
* saved for handling by xcb_request_check().
*/
pub fn xcb_xf86vidmode_switch_to_mode_checked (c : *mut ffi::base::connection,
screen : u32,
dotclock : dotclock,
hdisplay : u16,
hsyncstart : u16,
hsyncend : u16,
htotal : u16,
hskew : u16,
vdisplay : u16,
vsyncstart : u16,
vsyncend : u16,
vtotal : u16,
flags : u32,
privsize : u32,
private : *mut u8) -> ffi::base::void_cookie;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
*/
pub fn xcb_xf86vidmode_switch_to_mode (c : *mut ffi::base::connection,
screen : u32,
dotclock : dotclock,
hdisplay : u16,
hsyncstart : u16,
hsyncend : u16,
htotal : u16,
hskew : u16,
vdisplay : u16,
vsyncstart : u16,
vsyncend : u16,
vtotal : u16,
flags : u32,
privsize : u32,
private : *mut u8) -> ffi::base::void_cookie;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
*/
pub fn xcb_xf86vidmode_get_view_port (c : *mut ffi::base::connection,
screen : u16) -> get_view_port_cookie;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
* This form can be used only if the request will cause
* a reply to be generated. Any returned error will be
* placed in the event queue.
*/
pub fn xcb_xf86vidmode_get_view_port_unchecked (c : *mut ffi::base::connection,
screen : u16) -> get_view_port_cookie;
/**
* Return the reply
* @param c The connection
* @param cookie The cookie
* @param e The generic_error supplied
*
* Returns the reply of the request asked by
*
* The parameter @p e supplied to this function must be NULL if
* xcb_xf86vidmode_get_view_port_unchecked(). is used.
* Otherwise, it stores the error if any.
*
* The returned value must be freed by the caller using free().
*/
pub fn xcb_xf86vidmode_get_view_port_reply (c : *mut ffi::base::connection,
cookie : get_view_port_cookie,
e : *mut *mut ffi::base::generic_error) -> *mut get_view_port_reply;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
* This form can be used only if the request will not cause
* a reply to be generated. Any returned error will be
* saved for handling by xcb_request_check().
*/
pub fn xcb_xf86vidmode_set_view_port_checked (c : *mut ffi::base::connection,
screen : u16,
x : u32,
y : u32) -> ffi::base::void_cookie;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
*/
pub fn xcb_xf86vidmode_set_view_port (c : *mut ffi::base::connection,
screen : u16,
x : u32,
y : u32) -> ffi::base::void_cookie;
pub fn xcb_xf86vidmode_get_dot_clocks_sizeof (_buffer : *mut c_void) -> c_int;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
*/
pub fn xcb_xf86vidmode_get_dot_clocks (c : *mut ffi::base::connection,
screen : u16) -> get_dot_clocks_cookie;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
* This form can be used only if the request will cause
* a reply to be generated. Any returned error will be
* placed in the event queue.
*/
pub fn xcb_xf86vidmode_get_dot_clocks_unchecked (c : *mut ffi::base::connection,
screen : u16) -> get_dot_clocks_cookie;
pub fn xcb_xf86vidmode_get_dot_clocks_clock (R : *mut get_dot_clocks_reply) -> *mut u32;
pub fn xcb_xf86vidmode_get_dot_clocks_clock_length (R : *mut get_dot_clocks_reply) -> c_int;
pub fn xcb_xf86vidmode_get_dot_clocks_clock_end (R : *mut get_dot_clocks_reply) -> ffi::base::generic_iterator;
/**
* Return the reply
* @param c The connection
* @param cookie The cookie
* @param e The generic_error supplied
*
* Returns the reply of the request asked by
*
* The parameter @p e supplied to this function must be NULL if
* xcb_xf86vidmode_get_dot_clocks_unchecked(). is used.
* Otherwise, it stores the error if any.
*
* The returned value must be freed by the caller using free().
*/
pub fn xcb_xf86vidmode_get_dot_clocks_reply (c : *mut ffi::base::connection,
cookie : get_dot_clocks_cookie,
e : *mut *mut ffi::base::generic_error) -> *mut get_dot_clocks_reply;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
* This form can be used only if the request will not cause
* a reply to be generated. Any returned error will be
* saved for handling by xcb_request_check().
*/
pub fn xcb_xf86vidmode_set_client_version_checked (c : *mut ffi::base::connection,
major : u16,
minor : u16) -> ffi::base::void_cookie;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
*/
pub fn xcb_xf86vidmode_set_client_version (c : *mut ffi::base::connection,
major : u16,
minor : u16) -> ffi::base::void_cookie;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
* This form can be used only if the request will not cause
* a reply to be generated. Any returned error will be
* saved for handling by xcb_request_check().
*/
pub fn xcb_xf86vidmode_set_gamma_checked (c : *mut ffi::base::connection,
screen : u16,
red : u32,
green : u32,
blue : u32) -> ffi::base::void_cookie;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
*/
pub fn xcb_xf86vidmode_set_gamma (c : *mut ffi::base::connection,
screen : u16,
red : u32,
green : u32,
blue : u32) -> ffi::base::void_cookie;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
*/
pub fn xcb_xf86vidmode_get_gamma (c : *mut ffi::base::connection,
screen : u16) -> get_gamma_cookie;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
* This form can be used only if the request will cause
* a reply to be generated. Any returned error will be
* placed in the event queue.
*/
pub fn xcb_xf86vidmode_get_gamma_unchecked (c : *mut ffi::base::connection,
screen : u16) -> get_gamma_cookie;
/**
* Return the reply
* @param c The connection
* @param cookie The cookie
* @param e The generic_error supplied
*
* Returns the reply of the request asked by
*
* The parameter @p e supplied to this function must be NULL if
* xcb_xf86vidmode_get_gamma_unchecked(). is used.
* Otherwise, it stores the error if any.
*
* The returned value must be freed by the caller using free().
*/
pub fn xcb_xf86vidmode_get_gamma_reply (c : *mut ffi::base::connection,
cookie : get_gamma_cookie,
e : *mut *mut ffi::base::generic_error) -> *mut get_gamma_reply;
pub fn xcb_xf86vidmode_get_gamma_ramp_sizeof (_buffer : *mut c_void) -> c_int;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
*/
pub fn xcb_xf86vidmode_get_gamma_ramp (c : *mut ffi::base::connection,
screen : u16,
size : u16) -> get_gamma_ramp_cookie;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
* This form can be used only if the request will cause
* a reply to be generated. Any returned error will be
* placed in the event queue.
*/
pub fn xcb_xf86vidmode_get_gamma_ramp_unchecked (c : *mut ffi::base::connection,
screen : u16,
size : u16) -> get_gamma_ramp_cookie;
pub fn xcb_xf86vidmode_get_gamma_ramp_red (R : *mut get_gamma_ramp_reply) -> *mut u16;
pub fn xcb_xf86vidmode_get_gamma_ramp_red_length (R : *mut get_gamma_ramp_reply) -> c_int;
pub fn xcb_xf86vidmode_get_gamma_ramp_red_end (R : *mut get_gamma_ramp_reply) -> ffi::base::generic_iterator;
pub fn xcb_xf86vidmode_get_gamma_ramp_green (R : *mut get_gamma_ramp_reply) -> *mut u16;
pub fn xcb_xf86vidmode_get_gamma_ramp_green_length (R : *mut get_gamma_ramp_reply) -> c_int;
pub fn xcb_xf86vidmode_get_gamma_ramp_green_end (R : *mut get_gamma_ramp_reply) -> ffi::base::generic_iterator;
pub fn xcb_xf86vidmode_get_gamma_ramp_blue (R : *mut get_gamma_ramp_reply) -> *mut u16;
pub fn xcb_xf86vidmode_get_gamma_ramp_blue_length (R : *mut get_gamma_ramp_reply) -> c_int;
pub fn xcb_xf86vidmode_get_gamma_ramp_blue_end (R : *mut get_gamma_ramp_reply) -> ffi::base::generic_iterator;
/**
* Return the reply
* @param c The connection
* @param cookie The cookie
* @param e The generic_error supplied
*
* Returns the reply of the request asked by
*
* The parameter @p e supplied to this function must be NULL if
* xcb_xf86vidmode_get_gamma_ramp_unchecked(). is used.
* Otherwise, it stores the error if any.
*
* The returned value must be freed by the caller using free().
*/
pub fn xcb_xf86vidmode_get_gamma_ramp_reply (c : *mut ffi::base::connection,
cookie : get_gamma_ramp_cookie,
e : *mut *mut ffi::base::generic_error) -> *mut get_gamma_ramp_reply;
pub fn xcb_xf86vidmode_set_gamma_ramp_sizeof (_buffer : *mut c_void) -> c_int;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
* This form can be used only if the request will not cause
* a reply to be generated. Any returned error will be
* saved for handling by xcb_request_check().
*/
pub fn xcb_xf86vidmode_set_gamma_ramp_checked (c : *mut ffi::base::connection,
screen : u16,
size : u16,
red : *mut u16,
green : *mut u16,
blue : *mut u16) -> ffi::base::void_cookie;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
*/
pub fn xcb_xf86vidmode_set_gamma_ramp (c : *mut ffi::base::connection,
screen : u16,
size : u16,
red : *mut u16,
green : *mut u16,
blue : *mut u16) -> ffi::base::void_cookie;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
*/
pub fn xcb_xf86vidmode_get_gamma_ramp_size (c : *mut ffi::base::connection,
screen : u16) -> get_gamma_ramp_size_cookie;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
* This form can be used only if the request will cause
* a reply to be generated. Any returned error will be
* placed in the event queue.
*/
pub fn xcb_xf86vidmode_get_gamma_ramp_size_unchecked (c : *mut ffi::base::connection,
screen : u16) -> get_gamma_ramp_size_cookie;
/**
* Return the reply
* @param c The connection
* @param cookie The cookie
* @param e The generic_error supplied
*
* Returns the reply of the request asked by
*
* The parameter @p e supplied to this function must be NULL if
* xcb_xf86vidmode_get_gamma_ramp_size_unchecked(). is used.
* Otherwise, it stores the error if any.
*
* The returned value must be freed by the caller using free().
*/
pub fn xcb_xf86vidmode_get_gamma_ramp_size_reply (c : *mut ffi::base::connection,
cookie : get_gamma_ramp_size_cookie,
e : *mut *mut ffi::base::generic_error) -> *mut get_gamma_ramp_size_reply;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
*/
pub fn xcb_xf86vidmode_get_permissions (c : *mut ffi::base::connection,
screen : u16) -> get_permissions_cookie;
/**
*
* @param c The connection
* @return A cookie
*
* Delivers a request to the X server.
*
* This form can be used only if the request will cause
* a reply to be generated. Any returned error will be
* placed in the event queue.
*/
pub fn xcb_xf86vidmode_get_permissions_unchecked (c : *mut ffi::base::connection,
screen : u16) -> get_permissions_cookie;
/**
* Return the reply
* @param c The connection
* @param cookie The cookie
* @param e The generic_error supplied
*
* Returns the reply of the request asked by
*
* The parameter @p e supplied to this function must be NULL if
* xcb_xf86vidmode_get_permissions_unchecked(). is used.
* Otherwise, it stores the error if any.
*
* The returned value must be freed by the caller using free().
*/
pub fn xcb_xf86vidmode_get_permissions_reply (c : *mut ffi::base::connection,
cookie : get_permissions_cookie,
e : *mut *mut ffi::base::generic_error) -> *mut get_permissions_reply;
}<|fim▁end|> | pub fn xcb_xf86vidmode_get_monitor_hsync_end (R : *mut get_monitor_reply) -> ffi::base::generic_iterator;
|
<|file_name|>yawn_settings.py<|end_file_name|><|fim▁begin|>import dj_database_url
import pkg_resources
from yawn.settings.base import *
# this uses DATABASE_URL env variable:
DATABASES['default'] = dj_database_url.config(conn_max_age=600)
SECRET_KEY = os.environ.get('SECRET_KEY')
ALLOWED_HOSTS = [os.environ.get('ALLOWED_HOSTS')]
# Allow anonymous read
REST_FRAMEWORK['DEFAULT_PERMISSION_CLASSES'] = [
'rest_framework.permissions.IsAuthenticatedOrReadOnly',
]
INSTALLED_APPS += ['raven.contrib.django']
try:
yawn_version = pkg_resources.require("yawns")[0].version
except:
yawn_version = None
RAVEN_CONFIG = {
'dsn': os.environ.get('SENTRY_DSN'),
'release': yawn_version,
'name': os.environ.get('KUBERNETES_POD_NAME'),<|fim▁hole|><|fim▁end|> | 'include_paths': ['yawn'],
} |
<|file_name|>test_otp.py<|end_file_name|><|fim▁begin|>import unittest
from itertools import izip
import numpy as np
from numpy import cos, sin, pi
from pele.angleaxis import RBTopology, RigidFragment, RBPotentialWrapper
from pele.potentials import LJ
from pele.angleaxis._otp_cluster import OTPCluster<|fim▁hole|>
_x03 = np.array([2.550757898788, 2.591553038507, 3.696836364193,
2.623281513163, 3.415794212648, 3.310786279789,
1.791383852327, 2.264321752809, 4.306217333671,
0.761945654023, -0.805817782109, 1.166981882601,
0.442065301864, -2.747066418223, -1.784325262714,
-1.520905562598, 0.403670860200, -0.729768985400])
_x03_atomistic = np.array([3.064051819556, 2.474533745459, 3.646107658946,
2.412011983074, 2.941152759499, 4.243695098053,
2.176209893734, 2.358972610563, 3.200706335581,
2.786627589565, 3.211876105193, 2.850924310983,
1.962626909252, 3.436918873216, 3.370903763850,
3.120590040673, 3.598587659535, 3.710530764535,
1.697360211099, 2.317229950712, 4.823998989452,
2.283487958310, 1.840698306602, 4.168734267290,
1.393303387573, 2.635037001113, 3.925918744272
])
class TestOTPExplicit(unittest.TestCase):
def make_otp(self):
"""this constructs a single OTP molecule"""
otp = RigidFragment()
otp.add_atom("O", np.array([0.0, -2./3 * np.sin( 7.*pi/24.), 0.0]), 1.)
otp.add_atom("O", np.array([cos( 7.*pi/24.), 1./3. * sin( 7.* pi/24.), 0.0]), 1.)
otp.add_atom("O", np.array([-cos( 7.* pi/24.), 1./3. * sin( 7.*pi/24), 0.0]), 1.)
otp.finalize_setup()
return otp
def setUp(self):
nrigid = 3
self.topology = RBTopology()
self.topology.add_sites([self.make_otp() for i in xrange(nrigid)])
self.topology.finalize_setup()
cartesian_potential = LJ()
self.pot = RBPotentialWrapper(self.topology, cartesian_potential)
self.x0 = _x03
self.x0 = np.array(self.x0)
self.e0 = -17.3387670023
assert nrigid * 6 == self.x0.size
self.x0atomistic = _x03_atomistic
self.nrigid = nrigid
def test_energy(self):
e = self.pot.getEnergy(self.x0)
self.assertAlmostEqual(e, self.e0, delta=1e-4)
def test_energy_gradient(self):
e = self.pot.getEnergy(self.x0)
gnum = self.pot.NumericalDerivative(self.x0)
e2, g = self.pot.getEnergyGradient(self.x0)
self.assertAlmostEqual(e, e2, delta=1e-4)
for i in xrange(g.size):
self.assertAlmostEqual(g[i], gnum[i], 2)
def test_to_atomistic(self):
xatom = self.topology.to_atomistic(self.x0).flatten()
for i in xrange(xatom.size):
self.assertAlmostEqual(xatom[i], self.x0atomistic[i], 2)
def test_site_to_atomistic(self):
rf = self.make_otp()
p = np.array([1., 2, 3])
p /= np.linalg.norm(p)
com = np.array([4., 5, 6])
print "otp to atomistic"
print rf.to_atomistic(com, p)
print "otp transform grad"
g = np.array(range(9), dtype=float).reshape([-1,3])
print g.reshape(-1)
print rf.transform_grad(p, g)
def test_to_atomistic2(self):
x0 = np.array(range(self.nrigid * 6), dtype=float)
x2 = x0.reshape([-1,3])
for p in x2[self.nrigid:,:]:
p /= np.linalg.norm(p)
atomistic = self.topology.to_atomistic(x0).flatten()
from pele.potentials import LJ
lj = LJ()
e, g = lj.getEnergyGradient(atomistic.reshape(-1))
grb = self.topology.transform_gradient(x0, g)
rbpot = RBPotentialWrapper(self.topology, lj)
print rbpot.getEnergy(x0)
class TestCppRBPotentialWrapper(TestOTPExplicit):
def test_pot_wrapper(self):
from pele.angleaxis import _cpp_aa
from pele.potentials import LJ
rbpot_cpp = _cpp_aa.RBPotentialWrapper(self.topology, LJ())
rbpot = RBPotentialWrapper(self.topology, LJ())
self.assertAlmostEqual(rbpot_cpp.getEnergy(self.x0),
rbpot.getEnergy(self.x0), 4)
e1, grad1 = rbpot_cpp.getEnergyGradient(self.x0)
e2, grad2 = rbpot.getEnergyGradient(self.x0)
self.assertAlmostEqual(e1, e2, 4)
for g1, g2 in zip(grad1, grad2):
self.assertAlmostEqual(g1, g2, 3)
# print "energy cpp"
# print e1, e2
# print grad1
# print grad2
_x1 = np.array([ 1.9025655 , 0.39575842, 2.70994994, 1.12711741, 0.63413933,
1.99433564, 1.86553644, 1.71434811, 2.22927686, 0.80189315,
1.19513512, 3.02357997, 1.25845172, -0.06244027, 1.27217385,
-2.26564485, 0.25537024, 0.66231258, -1.49510664, 0.94428774,
-0.04120075, -0.87664883, -0.21441754, 2.05796547])
_x2 = np.array([ 2.01932983, 0.32928065, 2.34949584, 1.12261277, 0.84195098,
2.08827517, 1.42644916, 1.83608794, 2.23147536, 1.12872074,
0.93206141, 3.28789605, 1.73243138, -0.1199651 , 1.02925229,
-1.64603729, 0.30701482, 0.90204992, -1.96259809, 0.06557119,
0.11010908, -0.37462588, -0.42374544, 1.97728056])
class TestOTPCluster(unittest.TestCase):
def setUp(self):
np.random.seed(0)
self.nmol = 4
self.system = OTPCluster(self.nmol)
pot = self.system.get_potential()
self.db = self.system.create_database()
self.m1 = self.db.addMinimum(pot.getEnergy(_x1), _x1)
self.m2 = self.db.addMinimum(pot.getEnergy(_x2), _x2)
def test1(self):
pot = self.system.get_potential()
self.assertLess(np.linalg.norm(pot.getGradient(self.m1.coords)), .1)
self.assertLess(np.linalg.norm(pot.getGradient(self.m2.coords)), .1)
def test_basinhopping(self):
db = self.system.create_database()
bh = self.system.get_basinhopping(db)
bh.setPrinting(ostream=None)
bh.run(5)
self.assertGreaterEqual(db.number_of_minima(), 1)
def test_double_ended_connect(self):
connect = self.system.get_double_ended_connect(self.m1, self.m2, self.db)
connect.connect()
self.assertTrue(connect.success())
path = connect.returnPath()
def test_thermodynamics(self):
get_thermodynamic_information(self.system, self.db, nproc=None, recalculate=True)
self.assertIsNotNone(self.m1.fvib)
mt = self.system.get_metric_tensor(self.m1.coords)
print "metric tensor"
print mt
class TestRBTopologyOTP(unittest.TestCase):
def setUp(self):
np.random.seed(0)
self.nmol = 3
self.system = OTPCluster(self.nmol)
# pot = self.system.get_potential()
# self.db = self.system.create_database()
# self.m1 = self.db.addMinimum(pot.getEnergy(_x1), _x1)
# self.m2 = self.db.addMinimum(pot.getEnergy(_x2), _x2)
self.x0 = np.array([ 0, 1, 2, 3, 4, 5, 6, 7, 8,
0.517892, 0.575435, 0.632979,
0.531891, 0.576215, 0.620539,
0.540562, 0.5766, 0.612637 ])
from pele.angleaxis.aamindist import TransformAngleAxisCluster
self.topology = self.system.aatopology
self.transform = TransformAngleAxisCluster(self.topology)
self.p0 = np.array(range(1,4), dtype=float)
self.p0 /= np.linalg.norm(self.p0)
def test_transform_rotate(self):
print "\ntest rotate"
x = self.x0.copy()
p = np.array(range(1,4), dtype=float)
p /= np.linalg.norm(p)
self.transform.rotate(x, rotations.aa2mx(p))
xnewtrue = np.array([ 0.48757698, 0.61588594, 2.09355038, 2.02484605, 4.76822812,
4.81289924, 3.56211511, 8.92057031, 7.53224809, 0.71469473,
1.23875927, 1.36136748, 0.72426504, 1.24674367, 1.34426835,
0.73015833, 1.25159032, 1.33345003])
for v1, v2 in izip(x, xnewtrue):
self.assertAlmostEqual(v1, v2, 5)
def test_align_path(self):
print "\ntest align_path"
x1 = self.x0.copy()
x2 = self.x0 + 5
self.topology.align_path([x1, x2])
x2true = np.array([ 5. , 6. , 7. , 8. ,
9. , 10. , 11. , 12. ,
13. , 1.92786071, 1.94796529, 1.96807021,
1.93320298, 1.94869267, 1.96418236, 1.93645608,
1.94905155, 1.96164668])
for v1, v2 in izip(x1, self.x0):
self.assertAlmostEqual(v1, v2, 5)
for v1, v2 in izip(x2, x2true):
self.assertAlmostEqual(v1, v2, 5)
def test_cpp_zero_ev(self):
print "\ntest zeroEV cpp"
x = self.x0.copy()
zev = self.topology._zeroEV_python(x)
czev = self.topology.cpp_topology.get_zero_modes(x)
self.assertEqual(len(czev), 6)
for ev, cev in izip(zev, czev):
for v1, v2 in izip(ev, cev):
self.assertAlmostEqual(v1, v2, 5)
def test_site_distance_squared(self):
print "\ntest site distance squared"
c0 = np.zeros(3)
c1 = np.ones(3)
p0 = self.p0.copy()
p1 = p0 + 1
site = self.system.make_otp()
d2 = site.distance_squared(c0, p0, c1, p1)
d2p = _sitedist(c1-c0, p0, p1, site.S, site.W, site.cog)
self.assertAlmostEqual(d2, 10.9548367929, 5)
def test_distance_squared(self):
print "\ntest distance squared"
x1 = self.x0.copy()
x2 = self.x0 + 1.1
d2 = self.topology.distance_squared(x1, x2)
d3 = self.topology._distance_squared_python(x1, x2)
self.assertAlmostEqual(d2, 38.9401810973, 5)
self.assertAlmostEqual(d2, d3, 5)
def test_distance_squared_grad(self):
print "\ntest distance squared grad"
x1 = self.x0.copy()
x2 = self.x0 + 1.1
grad = self.topology.distance_squared_grad(x1, x2)
g2 = self.topology._distance_squared_grad_python(x1, x2)
gtrue = np.array([-6.6 , -6.6 , -6.6 , -6.6 , -6.6 ,
-6.6 , -6.6 , -6.6 , -6.6 , -1.21579025,
-0.07013805, -1.2988823 , -1.21331786, -0.06984532, -1.28945301,
-1.2116105 , -0.06975828, -1.28362943])
for v1, v2 in izip(grad, gtrue):
self.assertAlmostEqual(v1, v2, 5)
for v1, v2 in izip(grad, g2):
self.assertAlmostEqual(v1, v2, 5)
def test_measure_align(self):
print "\ntest measure align"
x1 = self.x0.copy()
x2 = self.x0 + 5.1
x2[-1] = x1[-1] + .1
x20 = x2.copy()
measure = MeasureRigidBodyCluster(self.topology)
measure.align(x1, x2)
if __name__ == "__main__":
unittest.main()<|fim▁end|> | from pele.thermodynamics import get_thermodynamic_information
from pele.utils import rotations
from pele.angleaxis._aa_utils import _rot_mat_derivative, _sitedist_grad, _sitedist
from pele.angleaxis.aamindist import MeasureRigidBodyCluster |
<|file_name|>layers_pooling.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
__author__ = 'ar'
from layers_basic import LW_Layer, default_data_format
from layers_convolutional import conv_output_length
###############################################
class _LW_Pooling1D(LW_Layer):
input_dim = 3
def __init__(self, pool_size=2, strides=None, padding='valid'):
if strides is None:
strides = pool_size
assert padding in {'valid', 'same'}, 'border_mode must be in {valid, same}'
self.pool_length = pool_size
self.stride = strides
self.border_mode = padding
def get_output_shape_for(self, input_shape):
length = conv_output_length(input_shape[1], self.pool_length, self.border_mode, self.stride)
return (input_shape[0], length, input_shape[2])
class LW_MaxPooling1D(_LW_Pooling1D):
def __init__(self, pool_size=2, strides=None, padding='valid'):
super(LW_MaxPooling1D, self).__init__(pool_size, strides, padding)
class LW_AveragePooling1D(_LW_Pooling1D):
def __init__(self, pool_size=2, strides=None, padding='valid'):
super(LW_AveragePooling1D, self).__init__(pool_size, strides, padding)
###############################################
class _LW_Pooling2D(LW_Layer):
def __init__(self, pool_size=(2, 2), strides=None, padding='valid', data_format='default'):
if data_format == 'default':
data_format = default_data_format
assert data_format in {'channels_last', 'channels_first'}, 'data_format must be in {channels_last, channels_first}'
self.pool_size = tuple(pool_size)
if strides is None:
strides = self.pool_size
self.strides = tuple(strides)
assert padding in {'valid', 'same'}, 'border_mode must be in {valid, same}'
self.border_mode = padding
self.dim_ordering = data_format
def get_output_shape_for(self, input_shape):
if self.dim_ordering == 'channels_first':
rows = input_shape[2]
cols = input_shape[3]
elif self.dim_ordering == 'channels_last':
rows = input_shape[1]
cols = input_shape[2]
else:
raise Exception('Invalid dim_ordering: ' + self.dim_ordering)
rows = conv_output_length(rows, self.pool_size[0], self.border_mode, self.strides[0])
cols = conv_output_length(cols, self.pool_size[1], self.border_mode, self.strides[1])
if self.dim_ordering == 'channels_first':
return (input_shape[0], input_shape[1], rows, cols)
elif self.dim_ordering == 'channels_last':
return (input_shape[0], rows, cols, input_shape[3])
else:
raise Exception('Invalid dim_ordering: ' + self.dim_ordering)
class LW_MaxPooling2D(_LW_Pooling2D):
def __init__(self, pool_size=(2, 2), strides=None, padding='valid', data_format='default'):
super(LW_MaxPooling2D, self).__init__(pool_size, strides, padding, data_format)
class LW_AveragePooling2D(_LW_Pooling2D):
def __init__(self, pool_size=(2, 2), strides=None, padding='valid', data_format='default'):
super(LW_AveragePooling2D, self).__init__(pool_size, strides, padding, data_format)
###############################################
class _LW_Pooling3D(LW_Layer):
def __init__(self, pool_size=(2, 2, 2), strides=None, border_mode='valid', dim_ordering='default'):
if dim_ordering == 'default':
dim_ordering = default_data_format
assert dim_ordering in {'channels_last', 'channels_first'}, 'data_format must be in {channels_last, channels_first}'
self.pool_size = tuple(pool_size)
if strides is None:
strides = self.pool_size
self.strides = tuple(strides)
assert border_mode in {'valid', 'same'}, 'border_mode must be in {valid, same}'
self.border_mode = border_mode
self.dim_ordering = dim_ordering
def get_output_shape_for(self, input_shape):
if self.dim_ordering == 'channels_first':
len_dim1 = input_shape[2]
len_dim2 = input_shape[3]
len_dim3 = input_shape[4]
elif self.dim_ordering == 'channels_last':
len_dim1 = input_shape[1]
len_dim2 = input_shape[2]
len_dim3 = input_shape[3]
else:
raise Exception('Invalid dim_ordering: ' + self.dim_ordering)
len_dim1 = conv_output_length(len_dim1, self.pool_size[0], self.border_mode, self.strides[0])
len_dim2 = conv_output_length(len_dim2, self.pool_size[1], self.border_mode, self.strides[1])
len_dim3 = conv_output_length(len_dim3, self.pool_size[2], self.border_mode, self.strides[2])<|fim▁hole|> else:
raise Exception('Invalid dim_ordering: ' + self.dim_ordering)
class LW_MaxPooling3D(_LW_Pooling3D):
def __init__(self, pool_size=(2, 2, 2), strides=None, border_mode='valid', dim_ordering='default'):
super(LW_MaxPooling3D, self).__init__(pool_size, strides, border_mode, dim_ordering)
class LW_AveragePooling3D(_LW_Pooling3D):
def __init__(self, pool_size=(2, 2, 2), strides=None, border_mode='valid', dim_ordering='default'):
super(LW_AveragePooling3D, self).__init__(pool_size, strides, border_mode, dim_ordering)
###############################################
class _LW_GlobalPooling1D(LW_Layer):
def __init__(self):
pass
def get_output_shape_for(self, input_shape):
return (input_shape[0], input_shape[2])
class LW_GlobalAveragePooling1D(_LW_GlobalPooling1D):
pass
class LW_GlobalMaxPooling1D(_LW_GlobalPooling1D):
pass
###############################################
class _LW_GlobalPooling2D(LW_Layer):
def __init__(self, data_format='default'):
if data_format == 'default':
data_format = default_data_format
self.dim_ordering = data_format
def get_output_shape_for(self, input_shape):
if self.dim_ordering == 'channels_last':
return (input_shape[0], input_shape[3])
else:
return (input_shape[0], input_shape[1])
class LW_GlobalAveragePooling2D(_LW_GlobalPooling2D):
pass
class LW_GlobalMaxPooling2D(_LW_GlobalPooling2D):
pass
###############################################
class _LW_GlobalPooling3D(LW_Layer):
def __init__(self, data_format='default'):
if data_format == 'default':
data_format = default_data_format
self.dim_ordering = data_format
def get_output_shape_for(self, input_shape):
if self.dim_ordering == 'channels_last':
return (input_shape[0], input_shape[4])
else:
return (input_shape[0], input_shape[1])
class LW_GlobalAveragePooling3D(_LW_GlobalPooling3D):
pass
class LW_GlobalMaxPooling3D(_LW_GlobalPooling3D):
pass
###############################################
if __name__ == '__main__':
pass<|fim▁end|> | if self.dim_ordering == 'channels_first':
return (input_shape[0], input_shape[1], len_dim1, len_dim2, len_dim3)
elif self.dim_ordering == 'channels_last':
return (input_shape[0], len_dim1, len_dim2, len_dim3, input_shape[4]) |
<|file_name|>properties_render_layer.py<|end_file_name|><|fim▁begin|># ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
import bpy
from bpy.types import Menu, Panel, UIList
class RenderLayerButtonsPanel():
bl_space_type = 'PROPERTIES'<|fim▁hole|> @classmethod
def poll(cls, context):
scene = context.scene
return scene and (scene.render.engine in cls.COMPAT_ENGINES)
class RENDERLAYER_UL_renderlayers(UIList):
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
# assert(isinstance(item, bpy.types.SceneRenderLayer)
layer = item
if self.layout_type in {'DEFAULT', 'COMPACT'}:
layout.label(layer.name, icon_value=icon, translate=False)
layout.prop(layer, "use", text="", index=index)
elif self.layout_type in {'GRID'}:
layout.alignment = 'CENTER'
layout.label("", icon_value=icon)
class RENDERLAYER_PT_layers(RenderLayerButtonsPanel, Panel):
bl_label = "Layer List"
bl_options = {'HIDE_HEADER'}
COMPAT_ENGINES = {'BLENDER_RENDER'}
def draw(self, context):
layout = self.layout
scene = context.scene
rd = scene.render
row = layout.row()
row.template_list("RENDERLAYER_UL_renderlayers", "", rd, "layers", rd.layers, "active_index", rows=2)
col = row.column(align=True)
col.operator("scene.render_layer_add", icon='ZOOMIN', text="")
col.operator("scene.render_layer_remove", icon='ZOOMOUT', text="")
row = layout.row()
rl = rd.layers.active
if rl:
row.prop(rl, "name")
row.prop(rd, "use_single_layer", text="", icon_only=True)
class RENDERLAYER_PT_layer_options(RenderLayerButtonsPanel, Panel):
bl_label = "Layer"
COMPAT_ENGINES = {'BLENDER_RENDER'}
def draw(self, context):
layout = self.layout
scene = context.scene
rd = scene.render
rl = rd.layers.active
split = layout.split()
col = split.column()
col.prop(scene, "layers", text="Scene")
col.label(text="")
col.prop(rl, "light_override", text="Light")
col.prop(rl, "material_override", text="Material")
col = split.column()
col.prop(rl, "layers", text="Layer")
col.prop(rl, "layers_zmask", text="Mask Layer")
layout.separator()
layout.label(text="Include:")
split = layout.split()
col = split.column()
col.prop(rl, "use_zmask")
row = col.row()
row.prop(rl, "invert_zmask", text="Negate")
row.active = rl.use_zmask
col.prop(rl, "use_all_z")
col = split.column()
col.prop(rl, "use_solid")
col.prop(rl, "use_halo")
col.prop(rl, "use_ztransp")
col = split.column()
col.prop(rl, "use_sky")
col.prop(rl, "use_edge_enhance")
col.prop(rl, "use_strand")
if bpy.app.build_options.freestyle:
row = col.row()
row.prop(rl, "use_freestyle")
row.active = rd.use_freestyle
class RENDERLAYER_PT_layer_passes(RenderLayerButtonsPanel, Panel):
bl_label = "Passes"
bl_options = {'DEFAULT_CLOSED'}
COMPAT_ENGINES = {'BLENDER_RENDER'}
def draw_pass_type_buttons(self, box, rl, pass_type):
# property names
use_pass_type = "use_pass_" + pass_type
exclude_pass_type = "exclude_" + pass_type
# draw pass type buttons
row = box.row()
row.prop(rl, use_pass_type)
row.prop(rl, exclude_pass_type, text="")
def draw(self, context):
layout = self.layout
scene = context.scene
rd = scene.render
rl = rd.layers.active
split = layout.split()
col = split.column()
col.prop(rl, "use_pass_combined")
col.prop(rl, "use_pass_z")
col.prop(rl, "use_pass_vector")
col.prop(rl, "use_pass_normal")
col.prop(rl, "use_pass_uv")
col.prop(rl, "use_pass_mist")
col.prop(rl, "use_pass_object_index")
col.prop(rl, "use_pass_material_index")
col.prop(rl, "use_pass_color")
col = split.column()
col.prop(rl, "use_pass_diffuse")
self.draw_pass_type_buttons(col, rl, "specular")
self.draw_pass_type_buttons(col, rl, "shadow")
self.draw_pass_type_buttons(col, rl, "emit")
self.draw_pass_type_buttons(col, rl, "ambient_occlusion")
self.draw_pass_type_buttons(col, rl, "environment")
self.draw_pass_type_buttons(col, rl, "indirect")
self.draw_pass_type_buttons(col, rl, "reflection")
self.draw_pass_type_buttons(col, rl, "refraction")
if __name__ == "__main__": # only for live edit.
bpy.utils.register_module(__name__)<|fim▁end|> | bl_region_type = 'WINDOW'
bl_context = "render_layer"
# COMPAT_ENGINES must be defined in each subclass, external engines can add themselves here
|
<|file_name|>json.rs<|end_file_name|><|fim▁begin|>#![feature(test)]
extern crate test;
use self::test::Bencher;
use std::fs::File;
use std::io::Read;
extern crate pom;
#[path = "../examples/json.rs"]
mod json;
#[bench]
fn json_byte(b: &mut Bencher) {
let mut file = File::open("assets/data.json").unwrap();<|fim▁hole|>
b.iter(|| {
json::json().parse(&input).ok();
});
}<|fim▁end|> | let mut input = Vec::new();
file.read_to_end(&mut input).unwrap(); |
<|file_name|>delegate-token.hpp<|end_file_name|><|fim▁begin|>/*
* The MIT License (MIT)
*
* Copyright (c) 2015 Freeman Zhang <[email protected]>
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use, copy,
* modify, merge, publish, distribute, sublicense, and/or sell copies
* of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
* BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
* ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
#pragma once
#include <cppevent/delegate.hpp>
#include <cppevent/invokable-token.hpp>
namespace CppEvent {
template<typename ... ParamTypes>
class DelegateToken : public InvokableToken < ParamTypes... >
{
public:
DelegateToken() = delete;
inline DelegateToken(const Delegate<void, ParamTypes...>& d);
virtual ~DelegateToken();
virtual void Invoke(ParamTypes... Args) override;
const Delegate<void, ParamTypes...>& delegate () const
{
return delegate_;
}
private:<|fim▁hole|>
template<typename ... ParamTypes>
inline DelegateToken<ParamTypes...>::DelegateToken(const Delegate<void, ParamTypes...>& d)
: InvokableToken<ParamTypes...>(), delegate_(d)
{
}
template<typename ... ParamTypes>
DelegateToken<ParamTypes...>::~DelegateToken()
{
}
template<typename ... ParamTypes>
void DelegateToken<ParamTypes...>::Invoke(ParamTypes... Args)
{
delegate_(Args...);
}
} // namespace CppEvent<|fim▁end|> |
Delegate<void, ParamTypes...> delegate_;
}; |
<|file_name|>12709(Falling_Ants).cpp<|end_file_name|><|fim▁begin|>#include <iostream>
#include <string>
#include <algorithm>
#include <fstream>
#define IN cin
using namespace std;
//bool sort_logic(int i, int j);
//void pr(int data[], int length);
// EOF Termination
int main(){
int __initial_input, max_index, rear;
int register i;
//ifstream input;
//input.open("input.txt");
for(IN >> __initial_input; __initial_input != 0; IN >> __initial_input){
int data[__initial_input][3];
for(i = 0; i < __initial_input; i++){
IN >> data[i][0] >> data[i][1] >> data[i][2];
}
for(i = 0, max_index = 0; i < __initial_input; i++){
if(data[i][2] > data[max_index][2]){
max_index = i;
}
}
<|fim▁hole|> for(i = 0; i < __initial_input; i++){
if(data[max_index][2] == data[i][2]){
max_volumes[rear] = i;
rear++;
}
}
// Piciking up the optimal result
for(i = 0, max_index = max_volumes[i]; i < rear; i++){
if( (data[max_volumes[i]][0] * data[max_volumes[i]][1] * data[max_volumes[i]][2]) > (data[max_index][0] * data[max_index][1] * data[max_index][2]) ){
max_index = max_volumes[i];
}
}
cout << (data[max_index][0] * data[max_index][1] * data[max_index][2]) << endl;
}
//input.close();
return 0;
}
// Input Termination
/*int main(){
int __initial_input;
for(cin >> __initial_input; __initial_input != 0; cin >> __initial_input){
cout << "Initail Input ::" << __initial_input << endl;
}
return 0;
}*/
// Case Number
/*int main(){
int __initial_input, __case_number;
//ifstream input;
//input.open("input.txt");
for(IN >> __initial_input, __case_number = 1; __case_number <= __initial_input; __case_number++){
cout << "Case #" << __case_number << endl;
}
//input.close();
return 0;
}
bool sort_logic(int i, int j){
return i > j;
}
void pr(int data[], int length){
int i;
for(i = 0; i< length; i++) cout << data[i] << " ";
cout << endl;
}
*/<|fim▁end|> | // Cheacking if has any equeal
int max_volumes[__initial_input];
rear = 0; |
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for node-hook 1.0
// Project: https://github.com/bahmutov/node-hook#readme
// Definitions by: Nathan Hardy <https://github.com/nhardy>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
interface Options {
verbose?: boolean | undefined;
}
<|fim▁hole|>interface NodeHook {
hook: {
(extension: string, transform: Transform, options?: Options): void;
(transform: Transform, _?: undefined, options?: Options): void;
};
unhook(extension?: string): void;
}
declare const hook: NodeHook;
export = hook;<|fim▁end|> | type Transform = (source: string, filename: string) => string;
|
<|file_name|>es_primo_v1.py<|end_file_name|><|fim▁begin|>n = int(raw_input('Ingrese n: '))
es_primo = True
d = 2
while d < n:
if n % d == 0:
es_primo = False
d = d + 1
if es_primo:
print(n, 'es primo')<|fim▁hole|> print(n, 'es compuesto')<|fim▁end|> | else: |
<|file_name|>test_ec2service.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2014 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import posixpath
import unittest
from oslo.config import cfg
from six.moves.urllib import error
from cloudbaseinit.metadata.services import base
from cloudbaseinit.metadata.services import ec2service
CONF = cfg.CONF
class EC2ServiceTest(unittest.TestCase):
def setUp(self):
CONF.set_override('retry_count_interval', 0)
self._service = ec2service.EC2Service()
<|fim▁hole|> side_effect):
mock_get_host_name.side_effect = [side_effect]
response = self._service.load()
mock_check_metadata_ip_route.assert_called_once_with(
CONF.ec2_metadata_base_url)
mock_get_host_name.assert_called_once()
if side_effect is Exception:
self.assertFalse(response)
else:
self.assertTrue(response)
def test_load(self):
self._test_load(side_effect=None)
def test_load_exception(self):
self._test_load(side_effect=Exception)
@mock.patch('six.moves.urllib.request.urlopen')
def _test_get_response(self, mock_urlopen, ret_value):
req = mock.MagicMock()
mock_urlopen.side_effect = [ret_value]
is_instance = isinstance(ret_value, error.HTTPError)
if is_instance and ret_value.code == 404:
self.assertRaises(base.NotExistingMetadataException,
self._service._get_response, req)
elif is_instance and ret_value.code != 404:
self.assertRaises(error.HTTPError,
self._service._get_response, req)
else:
response = self._service._get_response(req)
self.assertEqual(ret_value, response)
mock_urlopen.assert_called_once_with(req)
def test_get_response(self):
self._test_get_response(ret_value=None)
def test_get_response_error_404(self):
err = error.HTTPError("http://169.254.169.254/", 404,
'test error 404', {}, None)
self._test_get_response(ret_value=err)
def test_get_response_error_other(self):
err = error.HTTPError("http://169.254.169.254/", 409,
'test error 409', {}, None)
self._test_get_response(ret_value=err)
@mock.patch('six.moves.urllib.request.Request')
@mock.patch('cloudbaseinit.metadata.services.ec2service.EC2Service'
'._get_response')
def test_get_data(self, mock_get_response, mock_Request):
response = self._service._get_data('fake')
fake_path = posixpath.join(CONF.ec2_metadata_base_url, 'fake')
mock_Request.assert_called_once_with(fake_path)
mock_get_response.assert_called_once_with(mock_Request())
self.assertEqual(mock_get_response.return_value.read.return_value,
response)
@mock.patch('cloudbaseinit.metadata.services.ec2service.EC2Service'
'._get_cache_data')
def test_get_host_name(self, mock_get_cache_data):
response = self._service.get_host_name()
mock_get_cache_data.assert_called_once_with(
'%s/meta-data/local-hostname' % self._service._metadata_version)
self.assertEqual(mock_get_cache_data.return_value, response)
@mock.patch('cloudbaseinit.metadata.services.ec2service.EC2Service'
'._get_cache_data')
def test_get_instance_id(self, mock_get_cache_data):
response = self._service.get_instance_id()
mock_get_cache_data.assert_called_once_with(
'%s/meta-data/instance-id' % self._service._metadata_version)
self.assertEqual(mock_get_cache_data.return_value, response)
@mock.patch('cloudbaseinit.metadata.services.ec2service.EC2Service'
'._get_cache_data')
def test_get_public_keys(self, mock_get_cache_data):
mock_get_cache_data.side_effect = ['key=info', 'fake key']
response = self._service.get_public_keys()
expected = [
mock.call('%s/meta-data/public-keys' %
self._service._metadata_version),
mock.call('%(version)s/meta-data/public-keys/%('
'idx)s/openssh-key' %
{'version': self._service._metadata_version,
'idx': 'key'})]
self.assertEqual(expected, mock_get_cache_data.call_args_list)
self.assertEqual(['fake key'], response)<|fim▁end|> | @mock.patch('cloudbaseinit.utils.network.check_metadata_ip_route')
@mock.patch('cloudbaseinit.metadata.services.ec2service.EC2Service'
'.get_host_name')
def _test_load(self, mock_get_host_name, mock_check_metadata_ip_route, |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""<|fim▁hole|>Tools and data structures for working with genomic intervals (or sets of
regions on a line in general) efficiently.
"""
# For compatiblity with existing stuff
from bx.intervals.intersection import *<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from ._StabData import *<|fim▁hole|>from ._MotorData import *
from ._GyroData import *<|fim▁end|> | from ._AccelData import * |
<|file_name|>build.js<|end_file_name|><|fim▁begin|>'use strict'
<|fim▁hole|> require('./build/node')(gulp)
require('./build/browser')(gulp)
require('./clean')(gulp)
gulp.task('build', ['build:browser'])
}<|fim▁end|> | module.exports = (gulp) => { |
<|file_name|>main.cpp<|end_file_name|><|fim▁begin|>#include <rxcpp/rx-lite.hpp>
#include <rxcpp/operators/rx-reduce.hpp>
#include <rxcpp/operators/rx-filter.hpp>
#include <rxcpp/operators/rx-map.hpp>
#include <rxcpp/operators/rx-tap.hpp>
#include <rxcpp/operators/rx-concat_map.hpp>
#include <rxcpp/operators/rx-flat_map.hpp>
#include <rxcpp/operators/rx-concat.hpp>
#include <rxcpp/operators/rx-merge.hpp>
#include <rxcpp/operators/rx-repeat.hpp>
#include <rxcpp/operators/rx-publish.hpp>
#include <rxcpp/operators/rx-ref_count.hpp>
#include <rxcpp/operators/rx-window.hpp>
#include <rxcpp/operators/rx-window_toggle.hpp>
namespace Rx {
using namespace rxcpp;
using namespace rxcpp::sources;
using namespace rxcpp::operators;
using namespace rxcpp::util;
}
using namespace Rx;
#include <regex>
#include <random>
using namespace std;
using namespace std::chrono;
int main()
{
random_device rd; // non-deterministic generator
mt19937 gen(rd());
uniform_int_distribution<> dist(4, 18);
// for testing purposes, produce byte stream that from lines of text
auto bytes = range(0, 10) |
flat_map([&](int i){
auto body = from((uint8_t)('A' + i)) |
repeat(dist(gen)) |
as_dynamic();
auto delim = from((uint8_t)'\r');
return from(body, delim) | concat();
}) |
window(17) |
flat_map([](observable<uint8_t> w){
return w |
reduce(
vector<uint8_t>(),
[](vector<uint8_t> v, uint8_t b){
v.push_back(b);
return v;
}) |
as_dynamic();
}) |
tap([](vector<uint8_t>& v){
// print input packet of bytes
copy(v.begin(), v.end(), ostream_iterator<long>(cout, " "));<|fim▁hole|> cout << endl;
});
//
// recover lines of text from byte stream
//
auto removespaces = [](string s){
s.erase(remove_if(s.begin(), s.end(), ::isspace), s.end());
return s;
};
// create strings split on \r
auto strings = bytes |
concat_map([](vector<uint8_t> v){
string s(v.begin(), v.end());
regex delim(R"/(\r)/");
cregex_token_iterator cursor(&s[0], &s[0] + s.size(), delim, {-1, 0});
cregex_token_iterator end;
vector<string> splits(cursor, end);
return iterate(move(splits));
}) |
filter([](const string& s){
return !s.empty();
}) |
publish() |
ref_count();
// filter to last string in each line
auto closes = strings |
filter(
[](const string& s){
return s.back() == '\r';
}) |
Rx::map([](const string&){return 0;});
// group strings by line
auto linewindows = strings |
window_toggle(closes | start_with(0), [=](int){return closes;});
// reduce the strings for a line into one string
auto lines = linewindows |
flat_map([&](observable<string> w) {
return w | start_with<string>("") | sum() | Rx::map(removespaces);
});
// print result
lines |
subscribe<string>(println(cout));
return 0;
}<|fim▁end|> | |
<|file_name|>test_install.py<|end_file_name|><|fim▁begin|>import os
import textwrap
import glob
from os.path import join, curdir, pardir
import pytest
from pip.utils import rmtree
from tests.lib import pyversion
from tests.lib.local_repos import local_checkout
from tests.lib.path import Path
@pytest.mark.network
def test_without_setuptools(script):
script.run("pip", "uninstall", "setuptools", "-y")
result = script.run(
"python", "-c",
"import pip; pip.main(['install', 'INITools==0.2', '--no-use-wheel'])",
expect_error=True,
)
assert (
"setuptools must be installed to install from a source distribution"
in result.stdout
)
@pytest.mark.network
def test_pip_second_command_line_interface_works(script):
"""
Check if ``pip<PYVERSION>`` commands behaves equally
"""
args = ['pip%s' % pyversion]
args.extend(['install', 'INITools==0.2'])
result = script.run(*args)
egg_info_folder = (
script.site_packages / 'INITools-0.2-py%s.egg-info' % pyversion
)
initools_folder = script.site_packages / 'initools'
assert egg_info_folder in result.files_created, str(result)
assert initools_folder in result.files_created, str(result)
@pytest.mark.network
def test_install_from_pypi(script):
"""
Test installing a package from PyPI.
"""
result = script.pip('install', '-vvv', 'INITools==0.2')
egg_info_folder = (
script.site_packages / 'INITools-0.2-py%s.egg-info' % pyversion
)
initools_folder = script.site_packages / 'initools'
assert egg_info_folder in result.files_created, str(result)
assert initools_folder in result.files_created, str(result)
def test_editable_install(script):
"""
Test editable installation.
"""
result = script.pip('install', '-e', 'INITools==0.2', expect_error=True)
assert (
"INITools==0.2 should either be a path to a local project or a VCS url"
in result.stdout
)
assert not result.files_created
assert not result.files_updated
@pytest.mark.network
def test_install_editable_from_svn(script, tmpdir):
"""
Test checking out from svn.
"""
result = script.pip(
'install',
'-e',
'%s#egg=initools-dev' %
local_checkout(
'svn+http://svn.colorstudy.com/INITools/trunk',
tmpdir.join("cache")
)
)
result.assert_installed('INITools', with_files=['.svn'])
@pytest.mark.network
def test_download_editable_to_custom_path(script, tmpdir):
"""
Test downloading an editable using a relative custom src folder.
"""
script.scratch_path.join("customdl").mkdir()
result = script.pip(
'install',
'-e',
'%s#egg=initools-dev' %
local_checkout(
'svn+http://svn.colorstudy.com/INITools/trunk',
tmpdir.join("cache")
),
'--src',
'customsrc',
'--download',
'customdl',
)
customsrc = Path('scratch') / 'customsrc' / 'initools'
assert customsrc in result.files_created, (
sorted(result.files_created.keys())
)
assert customsrc / 'setup.py' in result.files_created, (
sorted(result.files_created.keys())
)
customdl = Path('scratch') / 'customdl' / 'initools'
customdl_files_created = [
filename for filename in result.files_created
if filename.startswith(customdl)
]
assert customdl_files_created
@pytest.mark.network
def test_editable_no_install_followed_by_no_download(script, tmpdir):
"""
Test installing an editable in two steps (first with --no-install, then
with --no-download).
"""
result = script.pip(
'install',
'-e',
'%s#egg=initools-dev' %
local_checkout(
'svn+http://svn.colorstudy.com/INITools/trunk',
tmpdir.join("cache"),
),
'--no-install',
expect_error=True,
)
result.assert_installed(
'INITools', without_egg_link=True, with_files=['.svn'],
)
result = script.pip(
'install',
'-e',
'%s#egg=initools-dev' %
local_checkout(
'svn+http://svn.colorstudy.com/INITools/trunk',
tmpdir.join("cache"),
),
'--no-download',
expect_error=True,
)
result.assert_installed('INITools', without_files=[curdir, '.svn'])
@pytest.mark.network
def test_no_install_followed_by_no_download(script):
"""
Test installing in two steps (first with --no-install, then with
--no-download).
"""
egg_info_folder = (
script.site_packages / 'INITools-0.2-py%s.egg-info' % pyversion
)
initools_folder = script.site_packages / 'initools'
build_dir = script.venv / 'build' / 'INITools'
result1 = script.pip(
'install', 'INITools==0.2', '--no-install', expect_error=True,
)
assert egg_info_folder not in result1.files_created, str(result1)
assert initools_folder not in result1.files_created, (
sorted(result1.files_created)
)
assert build_dir in result1.files_created, result1.files_created
assert build_dir / 'INITools.egg-info' in result1.files_created
result2 = script.pip(
'install', 'INITools==0.2', '--no-download', expect_error=True,
)
assert egg_info_folder in result2.files_created, str(result2)
assert initools_folder in result2.files_created, (
sorted(result2.files_created)
)
assert build_dir not in result2.files_created
assert build_dir / 'INITools.egg-info' not in result2.files_created
def test_bad_install_with_no_download(script):
"""
Test that --no-download behaves sensibly if the package source can't be
found.
"""
result = script.pip(
'install', 'INITools==0.2', '--no-download', expect_error=True,
)
assert (
"perhaps --no-download was used without first running "
"an equivalent install with --no-install?" in result.stdout
)
@pytest.mark.network
def test_install_dev_version_from_pypi(script):
"""
Test using package==dev.
"""
result = script.pip(
'install', 'INITools===dev',
'--allow-external', 'INITools',
'--allow-unverified', 'INITools',
expect_error=True,
)
assert (script.site_packages / 'initools') in result.files_created, (
str(result.stdout)
)
@pytest.mark.network
def test_install_editable_from_git(script, tmpdir):
"""
Test cloning from Git.
"""
args = ['install']
args.extend([
'-e',
'%s#egg=pip-test-package' %
local_checkout(
'git+http://github.com/pypa/pip-test-package.git',
tmpdir.join("cache"),
),
])
result = script.pip(*args, **{"expect_error": True})
result.assert_installed('pip-test-package', with_files=['.git'])
@pytest.mark.network
def test_install_editable_from_hg(script, tmpdir):
"""
Test cloning from Mercurial.
"""
result = script.pip(
'install', '-e',
'%s#egg=ScriptTest' %
local_checkout(
'hg+https://bitbucket.org/ianb/scripttest',
tmpdir.join("cache"),
),
expect_error=True,
)
result.assert_installed('ScriptTest', with_files=['.hg'])
@pytest.mark.network
def test_vcs_url_final_slash_normalization(script, tmpdir):
"""
Test that presence or absence of final slash in VCS URL is normalized.
"""
script.pip(
'install', '-e',
'%s/#egg=ScriptTest' %
local_checkout(
'hg+https://bitbucket.org/ianb/scripttest',
tmpdir.join("cache"),
),
)
@pytest.mark.network
def test_install_editable_from_bazaar(script, tmpdir):
"""
Test checking out from Bazaar.
"""
result = script.pip(
'install', '-e',
'%s/@174#egg=django-wikiapp' %
local_checkout(
'bzr+http://bazaar.launchpad.net/%7Edjango-wikiapp/django-wikiapp'
'/release-0.1',
tmpdir.join("cache"),
),
expect_error=True,
)
result.assert_installed('django-wikiapp', with_files=['.bzr'])
@pytest.mark.network
def test_vcs_url_urlquote_normalization(script, tmpdir):
"""
Test that urlquoted characters are normalized for repo URL comparison.
"""
script.pip(
'install', '-e',
'%s/#egg=django-wikiapp' %
local_checkout(
'bzr+http://bazaar.launchpad.net/%7Edjango-wikiapp/django-wikiapp'
'/release-0.1',
tmpdir.join("cache"),
),
)
def test_install_from_local_directory(script, data):
"""
Test installing from a local directory.<|fim▁hole|> result = script.pip('install', to_install, expect_error=False)
fspkg_folder = script.site_packages / 'fspkg'
egg_info_folder = (
script.site_packages / 'FSPkg-0.1.dev0-py%s.egg-info' % pyversion
)
assert fspkg_folder in result.files_created, str(result.stdout)
assert egg_info_folder in result.files_created, str(result)
def test_install_from_local_directory_with_symlinks_to_directories(
script, data):
"""
Test installing from a local directory containing symlinks to directories.
"""
to_install = data.packages.join("symlinks")
result = script.pip('install', to_install, expect_error=False)
pkg_folder = script.site_packages / 'symlinks'
egg_info_folder = (
script.site_packages / 'symlinks-0.1.dev0-py%s.egg-info' % pyversion
)
assert pkg_folder in result.files_created, str(result.stdout)
assert egg_info_folder in result.files_created, str(result)
def test_install_from_local_directory_with_no_setup_py(script, data):
"""
Test installing from a local directory with no 'setup.py'.
"""
result = script.pip('install', data.root, expect_error=True)
assert not result.files_created
assert "is not installable. File 'setup.py' not found." in result.stdout
def test_editable_install_from_local_directory_with_no_setup_py(script, data):
"""
Test installing from a local directory with no 'setup.py'.
"""
result = script.pip('install', '-e', data.root, expect_error=True)
assert not result.files_created
assert "is not installable. File 'setup.py' not found." in result.stdout
def test_install_as_egg(script, data):
"""
Test installing as egg, instead of flat install.
"""
to_install = data.packages.join("FSPkg")
result = script.pip('install', to_install, '--egg', expect_error=False)
fspkg_folder = script.site_packages / 'fspkg'
egg_folder = script.site_packages / 'FSPkg-0.1.dev0-py%s.egg' % pyversion
assert fspkg_folder not in result.files_created, str(result.stdout)
assert egg_folder in result.files_created, str(result)
assert join(egg_folder, 'fspkg') in result.files_created, str(result)
def test_install_curdir(script, data):
"""
Test installing current directory ('.').
"""
run_from = data.packages.join("FSPkg")
# Python 2.4 Windows balks if this exists already
egg_info = join(run_from, "FSPkg.egg-info")
if os.path.isdir(egg_info):
rmtree(egg_info)
result = script.pip('install', curdir, cwd=run_from, expect_error=False)
fspkg_folder = script.site_packages / 'fspkg'
egg_info_folder = (
script.site_packages / 'FSPkg-0.1.dev0-py%s.egg-info' % pyversion
)
assert fspkg_folder in result.files_created, str(result.stdout)
assert egg_info_folder in result.files_created, str(result)
def test_install_pardir(script, data):
"""
Test installing parent directory ('..').
"""
run_from = data.packages.join("FSPkg", "fspkg")
result = script.pip('install', pardir, cwd=run_from, expect_error=False)
fspkg_folder = script.site_packages / 'fspkg'
egg_info_folder = (
script.site_packages / 'FSPkg-0.1.dev0-py%s.egg-info' % pyversion
)
assert fspkg_folder in result.files_created, str(result.stdout)
assert egg_info_folder in result.files_created, str(result)
@pytest.mark.network
def test_install_global_option(script):
"""
Test using global distutils options.
(In particular those that disable the actual install action)
"""
result = script.pip(
'install', '--global-option=--version', "INITools==0.1",
)
assert '0.1\n' in result.stdout
def test_install_with_pax_header(script, data):
"""
test installing from a tarball with pax header for python<2.6
"""
script.pip('install', 'paxpkg.tar.bz2', cwd=data.packages)
def test_install_with_hacked_egg_info(script, data):
"""
test installing a package which defines its own egg_info class
"""
run_from = data.packages.join("HackedEggInfo")
result = script.pip('install', '.', cwd=run_from)
assert 'Successfully installed hackedegginfo-0.0.0\n' in result.stdout
@pytest.mark.network
def test_install_using_install_option_and_editable(script, tmpdir):
"""
Test installing a tool using -e and --install-option
"""
folder = 'script_folder'
script.scratch_path.join(folder).mkdir()
url = 'git+git://github.com/pypa/pip-test-package'
result = script.pip(
'install', '-e', '%s#egg=pip-test-package' %
local_checkout(url, tmpdir.join("cache")),
'--install-option=--script-dir=%s' % folder
)
script_file = (
script.venv / 'src' / 'pip-test-package' /
folder / 'pip-test-package' + script.exe
)
assert script_file in result.files_created
@pytest.mark.network
def test_install_global_option_using_editable(script, tmpdir):
"""
Test using global distutils options, but in an editable installation
"""
url = 'hg+http://bitbucket.org/runeh/anyjson'
result = script.pip(
'install', '--global-option=--version', '-e',
'%[email protected]#egg=anyjson' % local_checkout(url, tmpdir.join("cache"))
)
assert 'Successfully installed anyjson' in result.stdout
@pytest.mark.network
def test_install_package_with_same_name_in_curdir(script):
"""
Test installing a package with the same name of a local folder
"""
script.scratch_path.join("mock==0.6").mkdir()
result = script.pip('install', 'mock==0.6')
egg_folder = script.site_packages / 'mock-0.6.0-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
mock100_setup_py = textwrap.dedent('''\
from setuptools import setup
setup(name='mock',
version='100.1')''')
def test_install_folder_using_dot_slash(script):
"""
Test installing a folder using pip install ./foldername
"""
script.scratch_path.join("mock").mkdir()
pkg_path = script.scratch_path / 'mock'
pkg_path.join("setup.py").write(mock100_setup_py)
result = script.pip('install', './mock')
egg_folder = script.site_packages / 'mock-100.1-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
def test_install_folder_using_slash_in_the_end(script):
r"""
Test installing a folder using pip install foldername/ or foldername\
"""
script.scratch_path.join("mock").mkdir()
pkg_path = script.scratch_path / 'mock'
pkg_path.join("setup.py").write(mock100_setup_py)
result = script.pip('install', 'mock' + os.path.sep)
egg_folder = script.site_packages / 'mock-100.1-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
def test_install_folder_using_relative_path(script):
"""
Test installing a folder using pip install folder1/folder2
"""
script.scratch_path.join("initools").mkdir()
script.scratch_path.join("initools", "mock").mkdir()
pkg_path = script.scratch_path / 'initools' / 'mock'
pkg_path.join("setup.py").write(mock100_setup_py)
result = script.pip('install', Path('initools') / 'mock')
egg_folder = script.site_packages / 'mock-100.1-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
@pytest.mark.network
def test_install_package_which_contains_dev_in_name(script):
"""
Test installing package from pypi which contains 'dev' in name
"""
result = script.pip('install', 'django-devserver==0.0.4')
devserver_folder = script.site_packages / 'devserver'
egg_info_folder = (
script.site_packages / 'django_devserver-0.0.4-py%s.egg-info' %
pyversion
)
assert devserver_folder in result.files_created, str(result.stdout)
assert egg_info_folder in result.files_created, str(result)
@pytest.mark.network
def test_install_package_with_target(script):
"""
Test installing a package using pip install --target
"""
target_dir = script.scratch_path / 'target'
result = script.pip('install', '-t', target_dir, "initools==0.1")
assert Path('scratch') / 'target' / 'initools' in result.files_created, (
str(result)
)
# Test repeated call without --upgrade, no files should have changed
result = script.pip('install', '-t', target_dir, "initools==0.1")
assert not Path('scratch') / 'target' / 'initools' in result.files_updated
# Test upgrade call, check that new version is installed
result = script.pip('install', '--upgrade', '-t',
target_dir, "initools==0.2")
assert Path('scratch') / 'target' / 'initools' in result.files_updated, (
str(result)
)
egg_folder = (
Path('scratch') / 'target' / 'INITools-0.2-py%s.egg-info' % pyversion)
assert egg_folder in result.files_created, (
str(result)
)
# Test install and upgrade of single-module package
result = script.pip('install', '-t', target_dir, 'six')
assert Path('scratch') / 'target' / 'six.py' in result.files_created, (
str(result)
)
result = script.pip('install', '-t', target_dir, '--upgrade', 'six')
assert Path('scratch') / 'target' / 'six.py' in result.files_updated, (
str(result)
)
def test_install_package_with_root(script, data):
"""
Test installing a package using pip install --root
"""
root_dir = script.scratch_path / 'root'
result = script.pip(
'install', '--root', root_dir, '-f', data.find_links, '--no-index',
'simple==1.0',
)
normal_install_path = (
script.base_path / script.site_packages / 'simple-1.0-py%s.egg-info' %
pyversion
)
# use distutils to change the root exactly how the --root option does it
from distutils.util import change_root
root_path = change_root(
os.path.join(script.scratch, 'root'),
normal_install_path
)
assert root_path in result.files_created, str(result)
# skip on win/py3 for now, see issue #782
@pytest.mark.skipif("sys.platform == 'win32' and sys.version_info >= (3,)")
def test_install_package_that_emits_unicode(script, data):
"""
Install a package with a setup.py that emits UTF-8 output and then fails.
Refs https://github.com/pypa/pip/issues/326
"""
to_install = data.packages.join("BrokenEmitsUTF8")
result = script.pip(
'install', to_install, expect_error=True, expect_temp=True, quiet=True,
)
assert (
'FakeError: this package designed to fail on install' in result.stdout
)
assert 'UnicodeDecodeError' not in result.stdout
def test_install_package_with_utf8_setup(script, data):
"""Install a package with a setup.py that declares a utf-8 encoding."""
to_install = data.packages.join("SetupPyUTF8")
script.pip('install', to_install)
def test_install_package_with_latin1_setup(script, data):
"""Install a package with a setup.py that declares a latin-1 encoding."""
to_install = data.packages.join("SetupPyLatin1")
script.pip('install', to_install)
def test_url_req_case_mismatch_no_index(script, data):
"""
tar ball url requirements (with no egg fragment), that happen to have upper
case project names, should be considered equal to later requirements that
reference the project name using lower case.
tests/packages contains Upper-1.0.tar.gz and Upper-2.0.tar.gz
'requiresupper' has install_requires = ['upper']
"""
Upper = os.path.join(data.find_links, 'Upper-1.0.tar.gz')
result = script.pip(
'install', '--no-index', '-f', data.find_links, Upper, 'requiresupper'
)
# only Upper-1.0.tar.gz should get installed.
egg_folder = script.site_packages / 'Upper-1.0-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
egg_folder = script.site_packages / 'Upper-2.0-py%s.egg-info' % pyversion
assert egg_folder not in result.files_created, str(result)
def test_url_req_case_mismatch_file_index(script, data):
"""
tar ball url requirements (with no egg fragment), that happen to have upper
case project names, should be considered equal to later requirements that
reference the project name using lower case.
tests/packages3 contains Dinner-1.0.tar.gz and Dinner-2.0.tar.gz
'requiredinner' has install_requires = ['dinner']
This test is similar to test_url_req_case_mismatch_no_index; that test
tests behaviour when using "--no-index -f", while this one does the same
test when using "--index-url". Unfortunately this requires a different
set of packages as it requires a prepared index.html file and
subdirectory-per-package structure.
"""
Dinner = os.path.join(data.find_links3, 'Dinner', 'Dinner-1.0.tar.gz')
result = script.pip(
'install', '--index-url', data.find_links3, Dinner, 'requiredinner'
)
# only Upper-1.0.tar.gz should get installed.
egg_folder = script.site_packages / 'Dinner-1.0-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
egg_folder = script.site_packages / 'Dinner-2.0-py%s.egg-info' % pyversion
assert egg_folder not in result.files_created, str(result)
def test_url_incorrect_case_no_index(script, data):
"""
Same as test_url_req_case_mismatch_no_index, except testing for the case
where the incorrect case is given in the name of the package to install
rather than in a requirements file.
"""
result = script.pip(
'install', '--no-index', '-f', data.find_links, "upper",
)
# only Upper-2.0.tar.gz should get installed.
egg_folder = script.site_packages / 'Upper-1.0-py%s.egg-info' % pyversion
assert egg_folder not in result.files_created, str(result)
egg_folder = script.site_packages / 'Upper-2.0-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
def test_url_incorrect_case_file_index(script, data):
"""
Same as test_url_req_case_mismatch_file_index, except testing for the case
where the incorrect case is given in the name of the package to install
rather than in a requirements file.
"""
result = script.pip(
'install', '--index-url', data.find_links3, "dinner",
)
# only Upper-2.0.tar.gz should get installed.
egg_folder = script.site_packages / 'Dinner-1.0-py%s.egg-info' % pyversion
assert egg_folder not in result.files_created, str(result)
egg_folder = script.site_packages / 'Dinner-2.0-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
@pytest.mark.network
def test_compiles_pyc(script):
"""
Test installing with --compile on
"""
del script.environ["PYTHONDONTWRITEBYTECODE"]
script.pip("install", "--compile", "--no-use-wheel", "INITools==0.2")
# There are many locations for the __init__.pyc file so attempt to find
# any of them
exists = [
os.path.exists(script.site_packages_path / "initools/__init__.pyc"),
]
exists += glob.glob(
script.site_packages_path / "initools/__pycache__/__init__*.pyc"
)
assert any(exists)
@pytest.mark.network
def test_no_compiles_pyc(script, data):
"""
Test installing from wheel with --compile on
"""
del script.environ["PYTHONDONTWRITEBYTECODE"]
script.pip("install", "--no-compile", "--no-use-wheel", "INITools==0.2")
# There are many locations for the __init__.pyc file so attempt to find
# any of them
exists = [
os.path.exists(script.site_packages_path / "initools/__init__.pyc"),
]
exists += glob.glob(
script.site_packages_path / "initools/__pycache__/__init__*.pyc"
)
assert not any(exists)<|fim▁end|> | """
to_install = data.packages.join("FSPkg") |
<|file_name|>_font.py<|end_file_name|><|fim▁begin|>from plotly.basedatatypes import BaseLayoutHierarchyType as _BaseLayoutHierarchyType
import copy as _copy
class Font(_BaseLayoutHierarchyType):
# class properties
# --------------------
_parent_path_str = "layout.yaxis.title"
_path_str = "layout.yaxis.title.font"
_valid_props = {"color", "family", "size"}
<|fim▁hole|> def color(self):
"""
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
Returns
-------
str
"""
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
# family
# ------
@property
def family(self):
"""
HTML font family - the typeface that will be applied by the web
browser. The web browser will only be able to apply a font if
it is available on the system which it operates. Provide
multiple font families, separated by commas, to indicate the
preference in which to apply fonts if they aren't available on
the system. The Chart Studio Cloud (at https://chart-
studio.plotly.com or on-premise) generates images on a server,
where only a select number of fonts are installed and
supported. These include "Arial", "Balto", "Courier New",
"Droid Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
The 'family' property is a string and must be specified as:
- A non-empty string
Returns
-------
str
"""
return self["family"]
@family.setter
def family(self, val):
self["family"] = val
# size
# ----
@property
def size(self):
"""
The 'size' property is a number and may be specified as:
- An int or float in the interval [1, inf]
Returns
-------
int|float
"""
return self["size"]
@size.setter
def size(self, val):
self["size"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
color
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The Chart
Studio Cloud (at https://chart-studio.plotly.com or on-
premise) generates images on a server, where only a
select number of fonts are installed and supported.
These include "Arial", "Balto", "Courier New", "Droid
Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT
Sans Narrow", "Raleway", "Times New Roman".
size
"""
def __init__(self, arg=None, color=None, family=None, size=None, **kwargs):
"""
Construct a new Font object
Sets this axis' title font. Note that the title's font used to
be customized by the now deprecated `titlefont` attribute.
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of
:class:`plotly.graph_objs.layout.yaxis.title.Font`
color
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The Chart
Studio Cloud (at https://chart-studio.plotly.com or on-
premise) generates images on a server, where only a
select number of fonts are installed and supported.
These include "Arial", "Balto", "Courier New", "Droid
Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT
Sans Narrow", "Raleway", "Times New Roman".
size
Returns
-------
Font
"""
super(Font, self).__init__("font")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.layout.yaxis.title.Font
constructor must be a dict or
an instance of :class:`plotly.graph_objs.layout.yaxis.title.Font`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("color", None)
_v = color if color is not None else _v
if _v is not None:
self["color"] = _v
_v = arg.pop("family", None)
_v = family if family is not None else _v
if _v is not None:
self["family"] = _v
_v = arg.pop("size", None)
_v = size if size is not None else _v
if _v is not None:
self["size"] = _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False<|fim▁end|> | # color
# -----
@property |
<|file_name|>trait-bounds-sugar.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Tests for "default" bounds inferred for traits with no bounds list.
trait Foo {}
fn a(_x: ~Foo:Send) {
}
fn b(_x: &'static Foo) { // should be same as &'static Foo:'static
}
<|fim▁hole|>fn c(x: ~Foo:Share) {
a(x); //~ ERROR expected bounds `Send`
}
fn d(x: &'static Foo:Share) {
b(x); //~ ERROR expected bounds `'static`
}
fn main() {}<|fim▁end|> | |
<|file_name|>decorators.py<|end_file_name|><|fim▁begin|>from functools import wraps
from django.utils.translation import ugettext as _
from django.contrib.admin.forms import AdminAuthenticationForm
from django.contrib.auth.views import login
from django.contrib.auth import REDIRECT_FIELD_NAME
def staff_member_required(backoffice):
def decorate(view_func):
"""
Decorator for views that checks that the user is logged in and is a staff
member, displaying the login page if necessary.
"""<|fim▁hole|> # The user is valid. Continue to the admin page.
return view_func(request, *args, **kwargs)
assert hasattr(request, 'session'), "Advanced Reports Backoffice requires session middleware to be installed. Edit your MIDDLEWARE_CLASSES setting to insert 'django.contrib.sessions.middleware.SessionMiddleware'."
defaults = {
'template_name': backoffice.login_template,
'authentication_form': AdminAuthenticationForm,
'extra_context': {
'backoffice': backoffice,
REDIRECT_FIELD_NAME: request.get_full_path(),
},
}
return login(request, **defaults)
return _checklogin
return decorate<|fim▁end|> | @wraps(view_func)
def _checklogin(request, *args, **kwargs):
if request.user.is_active and request.user.is_staff: |
<|file_name|>ex6.py<|end_file_name|><|fim▁begin|>#Pizza please<|fim▁hole|>spk = pyaudiogame.speak
MyApp = pyaudiogame.App("Pizza Please")
storage.screen = ["start"]
storage.toppings = ["cheese", "olives", "mushrooms", "Pepperoni", "french fries"]
storage.your_toppings = ["cheese"]
storage.did_run = False
def is_number(number, topping_list):
"""Will check that what the user enters is really a number and not a letter, also that it is within our list"""
if number in "0123456789":
number = int(number)
if number <= len(topping_list)-1:
return number
def say_message(message):
"""Will check if the message has been read and if so, passes. Else, it will read the message"""
if not storage.did_run:
spk(message)
storage.did_run = True
def add_topping(key):
"""Will add a topping to your pizza"""
number = is_number(key, storage.toppings)
if number or number == 0:
storage.your_toppings.append(storage.toppings[number])
spk("You added %s to your pizza. Your pizza currently has %s on top" % (storage.toppings[number], storage.your_toppings))
def remove_topping(key):
"""Removes toppings from the pizza"""
number = is_number(key, storage.your_toppings)
if number or number == 0:
t = storage.your_toppings.pop(number)
if t == "cheese":
spk("You can't remove cheese, what are you, Italian?")
storage.your_toppings.insert(0, "cheese")
else:
spk("You removed %s from your pizza. Now your pizza has %s on top" % (t, storage.your_toppings))
def logic(actions):
"""Press a and d to switch from adding and removing toppings, press 0-9 to deal with the toppings and press space to eat the pizza"""
key = actions['key']
if key == "d":
spk("Press a number to remove a topping from your pizza, press a to add toppings again")
storage.screen[0] = "remove"
storage.did_run = False
elif key == "a":
spk("Press a number to add a topping to your pizza. Press d to remove a topping you don't like")
storage.screen[0] = "add"
storage.did_run = False
elif key == "space":
spk("You sit down to enjoy a yummy pizza. You eat... eat... eat... eat... and are finally done. That was good! Now it's time for another!")
storage.your_toppings = ['cheese']
storage.did_run = False
elif storage.screen[0] == "start":
spk("Welcom to pizza madness! Here you can build your own pizza to eat! Press a to add toppings, press d to remove them and when you are done, press space to eat your yummy pizza!!!")
storage.screen.remove("start")
storage.screen.append("add")
elif storage.screen[0] == "add":
say_message("Please choose a number of toppings to add! Press d to start removing toppings. Toppings are %s" % storage.toppings)
if key:
add_topping(key)
elif storage.screen[0] == "remove" and key:
remove_topping(key)
MyApp.logic = logic
MyApp.run()<|fim▁end|> | import pyaudiogame
from pyaudiogame import storage |
<|file_name|>appsrc.rs<|end_file_name|><|fim▁begin|>use ffi::*;
use ::Transfer;
use ::Element;
use ::Caps;
use std::mem;
use reference::Reference;
use std::ops::{Deref, DerefMut};
pub struct AppSrc{
appsrc: ::Element
}
unsafe impl Sync for AppSrc {}
unsafe impl Send for AppSrc {}
impl AppSrc{
pub fn new(name: &str) -> Option<AppSrc>{
let appsrc = ::Element::new("appsrc",name);
match appsrc{
Some(appsrc) => Some(AppSrc{appsrc: appsrc}),
None => None
}
}
pub fn new_from_element(element: ::Element) -> AppSrc{
AppSrc{appsrc: element}
}
/// Set the capabilities on the `AppSrc`. After calling this method, the source will only
/// produce caps that match `caps`. Once caps is set, the caps on the buffers MUST either
/// match the caps OR be left unspecified.
///
/// Before operating an `AppSrc`, the `caps` property MUST be set to fixed caps describing
/// the format of the data that will be pushed with appsrc EXCEPT when pushing buffers with
/// unknown caps, in which case no caps should be set. This is typically true of file-like
/// sources that push raw byte buffers.
pub fn set_caps(&mut self, caps: &Caps){
unsafe{
gst_app_src_set_caps(self.gst_appsrc_mut(), caps.gst_caps());
}
}
pub fn caps(&self) -> Option<Caps>{
unsafe{
let gst_caps = gst_app_src_get_caps(mem::transmute(self.gst_appsrc()));
Caps::new(gst_caps)
}
}
pub fn latency(&self) -> (u64,u64){
unsafe{
let mut min: u64 = 0;
let mut max: u64 = 0;
gst_app_src_get_latency(mem::transmute(self.gst_appsrc()), &mut min, &mut max);
(min,max)
}
}
pub fn push_buffer(&mut self, buffer: ::Buffer) -> GstFlowReturn{
unsafe{
gst_app_src_push_buffer(self.gst_appsrc_mut(), buffer.transfer())
}
}
pub fn end_of_stream(&mut self) -> GstFlowReturn{
unsafe{
gst_app_src_end_of_stream(self.gst_appsrc_mut())
}
}
pub unsafe fn gst_appsrc(&self) -> *const GstAppSrc{
self.appsrc.gst_element() as *const GstAppSrc
}
pub unsafe fn gst_appsrc_mut(&mut self) -> *mut GstAppSrc{
self.appsrc.gst_element_mut() as *mut GstAppSrc
}
}
impl AsRef<::Element> for AppSrc{
fn as_ref(&self) -> &Element{
&self.appsrc
}
}
impl AsMut<::Element> for AppSrc{
fn as_mut(&mut self) -> &mut Element{
&mut self.appsrc
}
}
impl From<AppSrc> for Element{
fn from(b: AppSrc) -> Element{
b.appsrc
}
}
impl Deref for AppSrc{
type Target = Element;
fn deref(&self) -> &Element{
&self.appsrc
}
}
impl DerefMut for AppSrc{
fn deref_mut(&mut self) -> &mut Element{
&mut self.appsrc
}
}
impl ::Transfer for AppSrc{
unsafe fn transfer(self) -> *mut GstElement{<|fim▁hole|>impl Reference for AppSrc{
fn reference(&self) -> AppSrc{
AppSrc{ appsrc: self.appsrc.reference() }
}
}<|fim▁end|> | self.appsrc.transfer()
}
}
|
<|file_name|>monitor.go<|end_file_name|><|fim▁begin|>package nftables
import (
"github.com/evilsocket/opensnitch/daemon/log"
)
// AreRulesLoaded checks if the firewall rules for intercept traffic are loaded.
func (n *Nft) AreRulesLoaded() bool {
n.Lock()
defer n.Unlock()<|fim▁hole|> if err != nil {
log.Error("nftables mangle rules error: %s, %s", table.Name, n.outputChains[table].Name)
return false
}
for _, r := range rules {
if string(r.UserData) == fwKey {
nRules++
}
}
}
if nRules != 2 {
log.Warning("nftables mangle rules not loaded: %d", nRules)
return false
}
nRules = 0
for _, table := range n.filterTables {
rules, err := n.conn.GetRule(table, n.inputChains[table])
if err != nil {
log.Error("nftables filter rules error: %s, %s", table.Name, n.inputChains[table].Name)
return false
}
for _, r := range rules {
if string(r.UserData) == fwKey {
nRules++
}
}
}
if nRules != 2 {
log.Warning("nfables filter rules not loaded: %d", nRules)
return false
}
return true
}
func (n *Nft) reloadRulesCallback() {
log.Important("nftables firewall rules changed, reloading")
n.AddSystemRules()
n.InsertRules()
}<|fim▁end|> |
nRules := 0
for _, table := range n.mangleTables {
rules, err := n.conn.GetRule(table, n.outputChains[table]) |
<|file_name|>choicelists.py<|end_file_name|><|fim▁begin|># -*- coding: UTF-8 -*-
# Copyright 2016 Luc Saffre
# License: BSD (see file COPYING for details)
from lino.api import dd, _
class PartnerEvents(dd.ChoiceList):<|fim▁hole|><|fim▁end|> | verbose_name = _("Observed event")
verbose_name_plural = _("Observed events")
max_length = 50 |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>"This module provides a set of common utility functions."
__author__ = "Anders Logg"
__copyright__ = "Copyright (C) 2009 Simula Research Laboratory and %s" % __author__
__license__ = "GNU GPL Version 3 or any later version"
from math import ceil
from numpy import linspace
from dolfin import PeriodicBC, warning
def is_periodic(bcs):
"Check if boundary conditions are periodic"
return all(isinstance(bc, PeriodicBC) for bc in bcs)
def missing_function(function):
"Write an informative error message when function has not been overloaded"
error("The function %s() has not been specified. Please provide a specification of this function.")
def timestep_range(T, dt):
"""Return a matching time step range for given end time and time
step. Note that the time step may be adjusted so that it matches
the given end time."""
# Compute range
ds = dt
n = ceil(T / dt)
t_range = linspace(0, T, n + 1)[1:]<|fim▁hole|> # Warn about changing time step
if ds != dt:
warning("Changing time step from %g to %g" % (ds, dt))
return dt, t_range
def timestep_range_cfl(problem, mesh):
"""Return a sensible default time step and time step range based
on an approximate CFL condition."""
# Get problem parameters
T = problem.end_time()
dt = problem.time_step()
# Set time step based on mesh if not specified
if dt is None:
dt = 0.25*mesh.hmin()
return timestep_range(T, dt)<|fim▁end|> | dt = t_range[0]
|
<|file_name|>RegisterActivity.java<|end_file_name|><|fim▁begin|>package com.fantasy.lulutong.activity.me;
import android.os.Bundle;
import android.view.View;
import android.widget.RelativeLayout;
import com.fantasy.lulutong.R;
import com.fantasy.lulutong.activity.BaseActivity;
/**
* “注册”的页面
* @author Fantasy
<|fim▁hole|> * @version 1.0, 2017-02-
*/
public class RegisterActivity extends BaseActivity {
private RelativeLayout relativeBack;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.layout_register);
relativeBack = (RelativeLayout) findViewById(R.id.relative_register_back);
relativeBack.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
finish();
}
});
}
}<|fim▁end|> | |
<|file_name|>camera.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3.7
import ssl
import remi.gui as gui
from remi import start, App
class Camera(App):
def __init__(self, *args, **kwargs):
super(Camera, self).__init__(*args)
def video_widgets(self):
width = '300'
height = '300'
self.video = gui.Widget(_type='video')
self.video.style['overflow'] = 'hidden'
self.video.attributes['autoplay'] = 'true'
self.video.attributes['width'] = width
self.video.attributes['height'] = height
def video_start(self, widget, callback_function):
self.execute_javascript("""
var params={};
var frame = 0;
document.video_stop = false;
const video = document.querySelector('video');
video.setAttribute("playsinline", true);
const canvas = document.createElement('canvas');
navigator.mediaDevices.getUserMedia({video: { facingMode: { ideal: "environment" } }, audio: false}).
then((stream) => {video.srcObject = stream});
const render = () => {
if (document.video_stop) { return; }
if (frame==30) {
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
canvas.getContext('2d').drawImage(video, 0, 0);
params['image']=canvas.toDataURL()
remi.sendCallbackParam('%(id)s','%(callback_function)s',params)
frame = 0;
}
frame+=1;
requestAnimationFrame(render);
}
requestAnimationFrame(render);
"""%{'id':str(id(self)), 'callback_function': str(callback_function)})
def video_stop(self, widget):
self.execute_javascript("""
document.video_stop = true;
const video = document.querySelector('video');
video.srcObject.getTracks()[0].stop();
""")
def process_image(self, **kwargs):
image = kwargs['image']
print('I am here')
### Do whatever you want with the image here<|fim▁hole|> self.video_widgets()
screen = [self.video]
start_button = gui.Button('Start Video')
start_button.onclick.do(self.video_start, 'process_image')
screen.append(start_button)
stop_button = gui.Button('Stop Video')
stop_button.onclick.do(self.video_stop)
screen.append(stop_button)
return gui.VBox(children=screen)
if __name__ == "__main__":
start(Camera,
address='0.0.0.0',
port=2020,
multiple_instance=True,
enable_file_cache=True,
start_browser=False,
debug=False)
# certfile='./ssl_keys/fullchain.pem',
# keyfile='./ssl_keys/privkey.pem',
# ssl_version=ssl.PROTOCOL_TLSv1_2,<|fim▁end|> | return
def main(self): |
<|file_name|>poolmember.py<|end_file_name|><|fim▁begin|>import android
class SMSPoolMember:
def __init__(self, query):
self.droid = android.Android()
self.query = str(query).lstrip().rstrip()
def wifiConnected(self):<|fim▁hole|> def dataConnected(self):
return self.droid.getCellLocation().result["cid"] > -1
def sendResponse(self):
if self.query == "connection":
return "pool:" + str(self.wifiConnected() or self.dataConnected())
else:
return "pool: None"<|fim▁end|> | none = "<unknown ssid>"
return not self.droid.wifiGetConnectionInfo().result["ssid"] == none
|
<|file_name|>Activity.java<|end_file_name|><|fim▁begin|>package com.canigraduate.uchicago.models;
<|fim▁hole|><|fim▁end|> | public interface Activity {
} |
<|file_name|>XGUI.py<|end_file_name|><|fim▁begin|>import Base
import VS
import GUI<|fim▁hole|>XGUIRootSingleton = None
XGUIPythonScriptAPISingleton = None
"""----------------------------------------------------------------"""
""" """
""" XGUIRoot - root management interface for the XML-GUI framework."""
""" """
"""----------------------------------------------------------------"""
class XGUIRoot:
def __init__(self):
self.templates = {}
def getTemplate(self,type,name):
if type in self.templates and name in self.templates[type]:
return self.templates[type][name]
else:
return None
def addTemplate(self,tpl):
type = tpl.getType()
name = tpl.getName()
if not type in self.templates:
XGUIDebug.trace(1,"XGUI: Initializing template category \"" + str(type) + "\"\n")
self.templates[type] = {}
XGUIDebug.trace(2,"XGUI: Loading template \"" + str(name) + "\" into category \"" + str(type) + "\"\n")
self.templates[type][name] = tpl
class XGUIPythonScript:
def __init__(self,code,filename):
code = code.replace("\r\n","\n")
code += "\n"
self.code = compile(code,filename,'exec')
def execute(self,context):
exec(self.code, context)
return context
"""----------------------------------------------------------------"""
""" """
""" XGUIPythonScriptAPI - through this class, all PythonScript """
""" API calls are routed. """
""" """
"""----------------------------------------------------------------"""
class XGUIPythonScriptAPI:
def __init__(self,layout,room):
self.layout = layout
self.room = room
"""----------------------------------------------------------------"""
""" """
""" XGUI global initialization """
""" """
"""----------------------------------------------------------------"""
def XGUIInit():
XGUIRootSingleton = XGUIRoot()<|fim▁end|> | import XGUITypes
import XGUIDebug
|
<|file_name|>gcp_compute_backend_service.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 Google
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# ----------------------------------------------------------------------------
#
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
#
# ----------------------------------------------------------------------------
#
# This file is automatically generated by Magic Modules and manual
# changes will be clobbered when the file is regenerated.
#
# Please read more about how to change this file at
# https://www.github.com/GoogleCloudPlatform/magic-modules
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
__metaclass__ = type
################################################################################
# Documentation
################################################################################
ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ["preview"], 'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gcp_compute_backend_service<|fim▁hole|>description:
- A Backend Service defines a group of virtual machines that will serve traffic for
load balancing. This resource is a global backend service, appropriate for external
load balancing or self-managed internal load balancing.
- For managed internal load balancing, use a regional backend service instead.
- Currently self-managed internal load balancing is only available in beta.
short_description: Creates a GCP BackendService
version_added: '2.6'
author: Google Inc. (@googlecloudplatform)
requirements:
- python >= 2.6
- requests >= 2.18.4
- google-auth >= 1.3.0
options:
state:
description:
- Whether the given object should exist in GCP
choices:
- present
- absent
default: present
type: str
affinity_cookie_ttl_sec:
description:
- Lifetime of cookies in seconds if session_affinity is GENERATED_COOKIE. If set
to 0, the cookie is non-persistent and lasts only until the end of the browser
session (or equivalent). The maximum allowed value for TTL is one day.
- When the load balancing scheme is INTERNAL, this field is not used.
required: false
type: int
backends:
description:
- The set of backends that serve this BackendService.
required: false
type: list
suboptions:
balancing_mode:
description:
- Specifies the balancing mode for this backend.
- For global HTTP(S) or TCP/SSL load balancing, the default is UTILIZATION.
Valid values are UTILIZATION, RATE (for HTTP(S)) and CONNECTION (for TCP/SSL).
- 'Some valid choices include: "UTILIZATION", "RATE", "CONNECTION"'
required: false
default: UTILIZATION
type: str
capacity_scaler:
description:
- A multiplier applied to the group's maximum servicing capacity (based on
UTILIZATION, RATE or CONNECTION).
- Default value is 1, which means the group will serve up to 100% of its configured
capacity (depending on balancingMode). A setting of 0 means the group is
completely drained, offering 0% of its available Capacity. Valid range is
[0.0,1.0].
required: false
default: '1.0'
type: str
description:
description:
- An optional description of this resource.
- Provide this property when you create the resource.
required: false
type: str
group:
description:
- The fully-qualified URL of an Instance Group or Network Endpoint Group resource.
In case of instance group this defines the list of instances that serve
traffic. Member virtual machine instances from each instance group must
live in the same zone as the instance group itself. No two backends in a
backend service are allowed to use same Instance Group resource.
- For Network Endpoint Groups this defines list of endpoints. All endpoints
of Network Endpoint Group must be hosted on instances located in the same
zone as the Network Endpoint Group.
- Backend service can not contain mix of Instance Group and Network Endpoint
Group backends.
- Note that you must specify an Instance Group or Network Endpoint Group resource
using the fully-qualified URL, rather than a partial URL.
required: false
type: str
max_connections:
description:
- The max number of simultaneous connections for the group. Can be used with
either CONNECTION or UTILIZATION balancing modes.
- For CONNECTION mode, either maxConnections or one of maxConnectionsPerInstance
or maxConnectionsPerEndpoint, as appropriate for group type, must be set.
required: false
type: int
max_connections_per_instance:
description:
- The max number of simultaneous connections that a single backend instance
can handle. This is used to calculate the capacity of the group. Can be
used in either CONNECTION or UTILIZATION balancing modes.
- For CONNECTION mode, either maxConnections or maxConnectionsPerInstance
must be set.
required: false
type: int
max_connections_per_endpoint:
description:
- The max number of simultaneous connections that a single backend network
endpoint can handle. This is used to calculate the capacity of the group.
Can be used in either CONNECTION or UTILIZATION balancing modes.
- For CONNECTION mode, either maxConnections or maxConnectionsPerEndpoint
must be set.
required: false
type: int
version_added: '2.9'
max_rate:
description:
- The max requests per second (RPS) of the group.
- Can be used with either RATE or UTILIZATION balancing modes, but required
if RATE mode. For RATE mode, either maxRate or one of maxRatePerInstance
or maxRatePerEndpoint, as appropriate for group type, must be set.
required: false
type: int
max_rate_per_instance:
description:
- The max requests per second (RPS) that a single backend instance can handle.
This is used to calculate the capacity of the group. Can be used in either
balancing mode. For RATE mode, either maxRate or maxRatePerInstance must
be set.
required: false
type: str
max_rate_per_endpoint:
description:
- The max requests per second (RPS) that a single backend network endpoint
can handle. This is used to calculate the capacity of the group. Can be
used in either balancing mode. For RATE mode, either maxRate or maxRatePerEndpoint
must be set.
required: false
type: str
version_added: '2.9'
max_utilization:
description:
- Used when balancingMode is UTILIZATION. This ratio defines the CPU utilization
target for the group. The default is 0.8. Valid range is [0.0, 1.0].
required: false
default: '0.8'
type: str
cdn_policy:
description:
- Cloud CDN configuration for this BackendService.
required: false
type: dict
suboptions:
cache_key_policy:
description:
- The CacheKeyPolicy for this CdnPolicy.
required: false
type: dict
suboptions:
include_host:
description:
- If true requests to different hosts will be cached separately.
required: false
type: bool
include_protocol:
description:
- If true, http and https requests will be cached separately.
required: false
type: bool
include_query_string:
description:
- If true, include query string parameters in the cache key according
to query_string_whitelist and query_string_blacklist. If neither is
set, the entire query string will be included.
- If false, the query string will be excluded from the cache key entirely.
required: false
type: bool
query_string_blacklist:
description:
- Names of query string parameters to exclude in cache keys.
- All other parameters will be included. Either specify query_string_whitelist
or query_string_blacklist, not both.
- "'&' and '=' will be percent encoded and not treated as delimiters."
required: false
type: list
query_string_whitelist:
description:
- Names of query string parameters to include in cache keys.
- All other parameters will be excluded. Either specify query_string_whitelist
or query_string_blacklist, not both.
- "'&' and '=' will be percent encoded and not treated as delimiters."
required: false
type: list
signed_url_cache_max_age_sec:
description:
- Maximum number of seconds the response to a signed URL request will be considered
fresh, defaults to 1hr (3600s). After this time period, the response will
be revalidated before being served.
- 'When serving responses to signed URL requests, Cloud CDN will internally
behave as though all responses from this backend had a "Cache-Control: public,
max-age=[TTL]" header, regardless of any existing Cache-Control header.
The actual headers served in responses will not be altered.'
required: false
default: '3600'
type: int
version_added: '2.8'
connection_draining:
description:
- Settings for connection draining .
required: false
type: dict
suboptions:
draining_timeout_sec:
description:
- Time for which instance will be drained (not accept new connections, but
still work to finish started).
required: false
default: '300'
type: int
description:
description:
- An optional description of this resource.
required: false
type: str
enable_cdn:
description:
- If true, enable Cloud CDN for this BackendService.
required: false
type: bool
health_checks:
description:
- The set of URLs to the HttpHealthCheck or HttpsHealthCheck resource for health
checking this BackendService. Currently at most one health check can be specified,
and a health check is required.
- For internal load balancing, a URL to a HealthCheck resource must be specified
instead.
required: true
type: list
iap:
description:
- Settings for enabling Cloud Identity Aware Proxy.
required: false
type: dict
version_added: '2.7'
suboptions:
enabled:
description:
- Enables IAP.
required: false
type: bool
oauth2_client_id:
description:
- OAuth2 Client ID for IAP .
required: true
type: str
oauth2_client_secret:
description:
- OAuth2 Client Secret for IAP .
required: true
type: str
load_balancing_scheme:
description:
- Indicates whether the backend service will be used with internal or external
load balancing. A backend service created for one type of load balancing cannot
be used with the other. Must be `EXTERNAL` or `INTERNAL_SELF_MANAGED` for a
global backend service. Defaults to `EXTERNAL`.
- 'Some valid choices include: "EXTERNAL", "INTERNAL_SELF_MANAGED"'
required: false
default: EXTERNAL
type: str
version_added: '2.7'
name:
description:
- Name of the resource. Provided by the client when the resource is created. The
name must be 1-63 characters long, and comply with RFC1035. Specifically, the
name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`
which means the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last character,
which cannot be a dash.
required: true
type: str
port_name:
description:
- Name of backend port. The same name should appear in the instance groups referenced
by this service. Required when the load balancing scheme is EXTERNAL.
required: false
type: str
protocol:
description:
- The protocol this BackendService uses to communicate with backends.
- 'Possible values are HTTP, HTTPS, HTTP2, TCP, and SSL. The default is HTTP.
**NOTE**: HTTP2 is only valid for beta HTTP/2 load balancer types and may result
in errors if used with the GA API.'
- 'Some valid choices include: "HTTP", "HTTPS", "HTTP2", "TCP", "SSL"'
required: false
type: str
security_policy:
description:
- The security policy associated with this backend service.
required: false
type: str
version_added: '2.8'
session_affinity:
description:
- Type of session affinity to use. The default is NONE. Session affinity is not
applicable if the protocol is UDP.
- 'Some valid choices include: "NONE", "CLIENT_IP", "CLIENT_IP_PORT_PROTO", "CLIENT_IP_PROTO",
"GENERATED_COOKIE", "HEADER_FIELD", "HTTP_COOKIE"'
required: false
type: str
timeout_sec:
description:
- How many seconds to wait for the backend before considering it a failed request.
Default is 30 seconds. Valid range is [1, 86400].
required: false
type: int
aliases:
- timeout_seconds
project:
description:
- The Google Cloud Platform project to use.
type: str
auth_kind:
description:
- The type of credential used.
type: str
required: true
choices:
- application
- machineaccount
- serviceaccount
service_account_contents:
description:
- The contents of a Service Account JSON file, either in a dictionary or as a
JSON string that represents it.
type: jsonarg
service_account_file:
description:
- The path of a Service Account JSON file if serviceaccount is selected as type.
type: path
service_account_email:
description:
- An optional service account email address if machineaccount is selected and
the user does not wish to use the default email.
type: str
scopes:
description:
- Array of scopes to be used
type: list
env_type:
description:
- Specifies which Ansible environment you're running this module within.
- This should not be set unless you know what you're doing.
- This only alters the User Agent string for any API requests.
type: str
notes:
- 'API Reference: U(https://cloud.google.com/compute/docs/reference/v1/backendServices)'
- 'Official Documentation: U(https://cloud.google.com/compute/docs/load-balancing/http/backend-service)'
- for authentication, you can set service_account_file using the c(gcp_service_account_file)
env variable.
- for authentication, you can set service_account_contents using the c(GCP_SERVICE_ACCOUNT_CONTENTS)
env variable.
- For authentication, you can set service_account_email using the C(GCP_SERVICE_ACCOUNT_EMAIL)
env variable.
- For authentication, you can set auth_kind using the C(GCP_AUTH_KIND) env variable.
- For authentication, you can set scopes using the C(GCP_SCOPES) env variable.
- Environment variables values will only be used if the playbook values are not set.
- The I(service_account_email) and I(service_account_file) options are mutually exclusive.
'''
EXAMPLES = '''
- name: create a instance group
gcp_compute_instance_group:
name: instancegroup-backendservice
zone: us-central1-a
project: "{{ gcp_project }}"
auth_kind: "{{ gcp_cred_kind }}"
service_account_file: "{{ gcp_cred_file }}"
state: present
register: instancegroup
- name: create a HTTP health check
gcp_compute_http_health_check:
name: httphealthcheck-backendservice
healthy_threshold: 10
port: 8080
timeout_sec: 2
unhealthy_threshold: 5
project: "{{ gcp_project }}"
auth_kind: "{{ gcp_cred_kind }}"
service_account_file: "{{ gcp_cred_file }}"
state: present
register: healthcheck
- name: create a backend service
gcp_compute_backend_service:
name: test_object
backends:
- group: "{{ instancegroup.selfLink }}"
health_checks:
- "{{ healthcheck.selfLink }}"
enable_cdn: 'true'
project: test_project
auth_kind: serviceaccount
service_account_file: "/tmp/auth.pem"
state: present
'''
RETURN = '''
affinityCookieTtlSec:
description:
- Lifetime of cookies in seconds if session_affinity is GENERATED_COOKIE. If set
to 0, the cookie is non-persistent and lasts only until the end of the browser
session (or equivalent). The maximum allowed value for TTL is one day.
- When the load balancing scheme is INTERNAL, this field is not used.
returned: success
type: int
backends:
description:
- The set of backends that serve this BackendService.
returned: success
type: complex
contains:
balancingMode:
description:
- Specifies the balancing mode for this backend.
- For global HTTP(S) or TCP/SSL load balancing, the default is UTILIZATION.
Valid values are UTILIZATION, RATE (for HTTP(S)) and CONNECTION (for TCP/SSL).
returned: success
type: str
capacityScaler:
description:
- A multiplier applied to the group's maximum servicing capacity (based on UTILIZATION,
RATE or CONNECTION).
- Default value is 1, which means the group will serve up to 100% of its configured
capacity (depending on balancingMode). A setting of 0 means the group is completely
drained, offering 0% of its available Capacity. Valid range is [0.0,1.0].
returned: success
type: str
description:
description:
- An optional description of this resource.
- Provide this property when you create the resource.
returned: success
type: str
group:
description:
- The fully-qualified URL of an Instance Group or Network Endpoint Group resource.
In case of instance group this defines the list of instances that serve traffic.
Member virtual machine instances from each instance group must live in the
same zone as the instance group itself. No two backends in a backend service
are allowed to use same Instance Group resource.
- For Network Endpoint Groups this defines list of endpoints. All endpoints
of Network Endpoint Group must be hosted on instances located in the same
zone as the Network Endpoint Group.
- Backend service can not contain mix of Instance Group and Network Endpoint
Group backends.
- Note that you must specify an Instance Group or Network Endpoint Group resource
using the fully-qualified URL, rather than a partial URL.
returned: success
type: str
maxConnections:
description:
- The max number of simultaneous connections for the group. Can be used with
either CONNECTION or UTILIZATION balancing modes.
- For CONNECTION mode, either maxConnections or one of maxConnectionsPerInstance
or maxConnectionsPerEndpoint, as appropriate for group type, must be set.
returned: success
type: int
maxConnectionsPerInstance:
description:
- The max number of simultaneous connections that a single backend instance
can handle. This is used to calculate the capacity of the group. Can be used
in either CONNECTION or UTILIZATION balancing modes.
- For CONNECTION mode, either maxConnections or maxConnectionsPerInstance must
be set.
returned: success
type: int
maxConnectionsPerEndpoint:
description:
- The max number of simultaneous connections that a single backend network endpoint
can handle. This is used to calculate the capacity of the group. Can be used
in either CONNECTION or UTILIZATION balancing modes.
- For CONNECTION mode, either maxConnections or maxConnectionsPerEndpoint must
be set.
returned: success
type: int
maxRate:
description:
- The max requests per second (RPS) of the group.
- Can be used with either RATE or UTILIZATION balancing modes, but required
if RATE mode. For RATE mode, either maxRate or one of maxRatePerInstance or
maxRatePerEndpoint, as appropriate for group type, must be set.
returned: success
type: int
maxRatePerInstance:
description:
- The max requests per second (RPS) that a single backend instance can handle.
This is used to calculate the capacity of the group. Can be used in either
balancing mode. For RATE mode, either maxRate or maxRatePerInstance must be
set.
returned: success
type: str
maxRatePerEndpoint:
description:
- The max requests per second (RPS) that a single backend network endpoint can
handle. This is used to calculate the capacity of the group. Can be used in
either balancing mode. For RATE mode, either maxRate or maxRatePerEndpoint
must be set.
returned: success
type: str
maxUtilization:
description:
- Used when balancingMode is UTILIZATION. This ratio defines the CPU utilization
target for the group. The default is 0.8. Valid range is [0.0, 1.0].
returned: success
type: str
cdnPolicy:
description:
- Cloud CDN configuration for this BackendService.
returned: success
type: complex
contains:
cacheKeyPolicy:
description:
- The CacheKeyPolicy for this CdnPolicy.
returned: success
type: complex
contains:
includeHost:
description:
- If true requests to different hosts will be cached separately.
returned: success
type: bool
includeProtocol:
description:
- If true, http and https requests will be cached separately.
returned: success
type: bool
includeQueryString:
description:
- If true, include query string parameters in the cache key according to
query_string_whitelist and query_string_blacklist. If neither is set,
the entire query string will be included.
- If false, the query string will be excluded from the cache key entirely.
returned: success
type: bool
queryStringBlacklist:
description:
- Names of query string parameters to exclude in cache keys.
- All other parameters will be included. Either specify query_string_whitelist
or query_string_blacklist, not both.
- "'&' and '=' will be percent encoded and not treated as delimiters."
returned: success
type: list
queryStringWhitelist:
description:
- Names of query string parameters to include in cache keys.
- All other parameters will be excluded. Either specify query_string_whitelist
or query_string_blacklist, not both.
- "'&' and '=' will be percent encoded and not treated as delimiters."
returned: success
type: list
signedUrlCacheMaxAgeSec:
description:
- Maximum number of seconds the response to a signed URL request will be considered
fresh, defaults to 1hr (3600s). After this time period, the response will
be revalidated before being served.
- 'When serving responses to signed URL requests, Cloud CDN will internally
behave as though all responses from this backend had a "Cache-Control: public,
max-age=[TTL]" header, regardless of any existing Cache-Control header. The
actual headers served in responses will not be altered.'
returned: success
type: int
connectionDraining:
description:
- Settings for connection draining .
returned: success
type: complex
contains:
drainingTimeoutSec:
description:
- Time for which instance will be drained (not accept new connections, but still
work to finish started).
returned: success
type: int
creationTimestamp:
description:
- Creation timestamp in RFC3339 text format.
returned: success
type: str
fingerprint:
description:
- Fingerprint of this resource. A hash of the contents stored in this object. This
field is used in optimistic locking.
returned: success
type: str
description:
description:
- An optional description of this resource.
returned: success
type: str
enableCDN:
description:
- If true, enable Cloud CDN for this BackendService.
returned: success
type: bool
healthChecks:
description:
- The set of URLs to the HttpHealthCheck or HttpsHealthCheck resource for health
checking this BackendService. Currently at most one health check can be specified,
and a health check is required.
- For internal load balancing, a URL to a HealthCheck resource must be specified
instead.
returned: success
type: list
id:
description:
- The unique identifier for the resource.
returned: success
type: int
iap:
description:
- Settings for enabling Cloud Identity Aware Proxy.
returned: success
type: complex
contains:
enabled:
description:
- Enables IAP.
returned: success
type: bool
oauth2ClientId:
description:
- OAuth2 Client ID for IAP .
returned: success
type: str
oauth2ClientSecret:
description:
- OAuth2 Client Secret for IAP .
returned: success
type: str
oauth2ClientSecretSha256:
description:
- OAuth2 Client Secret SHA-256 for IAP .
returned: success
type: str
loadBalancingScheme:
description:
- Indicates whether the backend service will be used with internal or external load
balancing. A backend service created for one type of load balancing cannot be
used with the other. Must be `EXTERNAL` or `INTERNAL_SELF_MANAGED` for a global
backend service. Defaults to `EXTERNAL`.
returned: success
type: str
name:
description:
- Name of the resource. Provided by the client when the resource is created. The
name must be 1-63 characters long, and comply with RFC1035. Specifically, the
name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`
which means the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last character,
which cannot be a dash.
returned: success
type: str
portName:
description:
- Name of backend port. The same name should appear in the instance groups referenced
by this service. Required when the load balancing scheme is EXTERNAL.
returned: success
type: str
protocol:
description:
- The protocol this BackendService uses to communicate with backends.
- 'Possible values are HTTP, HTTPS, HTTP2, TCP, and SSL. The default is HTTP. **NOTE**:
HTTP2 is only valid for beta HTTP/2 load balancer types and may result in errors
if used with the GA API.'
returned: success
type: str
securityPolicy:
description:
- The security policy associated with this backend service.
returned: success
type: str
sessionAffinity:
description:
- Type of session affinity to use. The default is NONE. Session affinity is not
applicable if the protocol is UDP.
returned: success
type: str
timeoutSec:
description:
- How many seconds to wait for the backend before considering it a failed request.
Default is 30 seconds. Valid range is [1, 86400].
returned: success
type: int
'''
################################################################################
# Imports
################################################################################
from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest, remove_nones_from_dict, replace_resource_dict
import json
import time
################################################################################
# Main
################################################################################
def main():
"""Main function"""
module = GcpModule(
argument_spec=dict(
state=dict(default='present', choices=['present', 'absent'], type='str'),
affinity_cookie_ttl_sec=dict(type='int'),
backends=dict(
type='list',
elements='dict',
options=dict(
balancing_mode=dict(default='UTILIZATION', type='str'),
capacity_scaler=dict(default=1.0, type='str'),
description=dict(type='str'),
group=dict(type='str'),
max_connections=dict(type='int'),
max_connections_per_instance=dict(type='int'),
max_connections_per_endpoint=dict(type='int'),
max_rate=dict(type='int'),
max_rate_per_instance=dict(type='str'),
max_rate_per_endpoint=dict(type='str'),
max_utilization=dict(default=0.8, type='str'),
),
),
cdn_policy=dict(
type='dict',
options=dict(
cache_key_policy=dict(
type='dict',
options=dict(
include_host=dict(type='bool'),
include_protocol=dict(type='bool'),
include_query_string=dict(type='bool'),
query_string_blacklist=dict(type='list', elements='str'),
query_string_whitelist=dict(type='list', elements='str'),
),
),
signed_url_cache_max_age_sec=dict(default=3600, type='int'),
),
),
connection_draining=dict(type='dict', options=dict(draining_timeout_sec=dict(default=300, type='int'))),
description=dict(type='str'),
enable_cdn=dict(type='bool'),
health_checks=dict(required=True, type='list', elements='str'),
iap=dict(
type='dict',
options=dict(enabled=dict(type='bool'), oauth2_client_id=dict(required=True, type='str'), oauth2_client_secret=dict(required=True, type='str')),
),
load_balancing_scheme=dict(default='EXTERNAL', type='str'),
name=dict(required=True, type='str'),
port_name=dict(type='str'),
protocol=dict(type='str'),
security_policy=dict(type='str'),
session_affinity=dict(type='str'),
timeout_sec=dict(type='int', aliases=['timeout_seconds']),
)
)
if not module.params['scopes']:
module.params['scopes'] = ['https://www.googleapis.com/auth/compute']
state = module.params['state']
kind = 'compute#backendService'
fetch = fetch_resource(module, self_link(module), kind)
changed = False
if fetch:
if state == 'present':
if is_different(module, fetch):
update(module, self_link(module), kind, fetch)
fetch = fetch_resource(module, self_link(module), kind)
changed = True
else:
delete(module, self_link(module), kind)
fetch = {}
changed = True
else:
if state == 'present':
fetch = create(module, collection(module), kind)
changed = True
else:
fetch = {}
fetch.update({'changed': changed})
module.exit_json(**fetch)
def create(module, link, kind):
auth = GcpSession(module, 'compute')
return wait_for_operation(module, auth.post(link, resource_to_request(module)))
def update(module, link, kind, fetch):
update_fields(module, resource_to_request(module), response_to_hash(module, fetch))
auth = GcpSession(module, 'compute')
return wait_for_operation(module, auth.put(link, resource_to_request(module)))
def update_fields(module, request, response):
if response.get('securityPolicy') != request.get('securityPolicy'):
security_policy_update(module, request, response)
def security_policy_update(module, request, response):
auth = GcpSession(module, 'compute')
auth.post(
''.join(["https://www.googleapis.com/compute/v1/", "projects/{project}/global/backendServices/{name}/setSecurityPolicy"]).format(**module.params),
{u'securityPolicy': module.params.get('security_policy')},
)
def delete(module, link, kind):
auth = GcpSession(module, 'compute')
return wait_for_operation(module, auth.delete(link))
def resource_to_request(module):
request = {
u'kind': 'compute#backendService',
u'affinityCookieTtlSec': module.params.get('affinity_cookie_ttl_sec'),
u'backends': BackendServiceBackendsArray(module.params.get('backends', []), module).to_request(),
u'cdnPolicy': BackendServiceCdnpolicy(module.params.get('cdn_policy', {}), module).to_request(),
u'connectionDraining': BackendServiceConnectiondraining(module.params.get('connection_draining', {}), module).to_request(),
u'description': module.params.get('description'),
u'enableCDN': module.params.get('enable_cdn'),
u'healthChecks': module.params.get('health_checks'),
u'iap': BackendServiceIap(module.params.get('iap', {}), module).to_request(),
u'loadBalancingScheme': module.params.get('load_balancing_scheme'),
u'name': module.params.get('name'),
u'portName': module.params.get('port_name'),
u'protocol': module.params.get('protocol'),
u'securityPolicy': module.params.get('security_policy'),
u'sessionAffinity': module.params.get('session_affinity'),
u'timeoutSec': module.params.get('timeout_sec'),
}
return_vals = {}
for k, v in request.items():
if v or v is False:
return_vals[k] = v
return return_vals
def fetch_resource(module, link, kind, allow_not_found=True):
auth = GcpSession(module, 'compute')
return return_if_object(module, auth.get(link), kind, allow_not_found)
def self_link(module):
return "https://www.googleapis.com/compute/v1/projects/{project}/global/backendServices/{name}".format(**module.params)
def collection(module):
return "https://www.googleapis.com/compute/v1/projects/{project}/global/backendServices".format(**module.params)
def return_if_object(module, response, kind, allow_not_found=False):
# If not found, return nothing.
if allow_not_found and response.status_code == 404:
return None
# If no content, return nothing.
if response.status_code == 204:
return None
try:
module.raise_for_status(response)
result = response.json()
except getattr(json.decoder, 'JSONDecodeError', ValueError):
module.fail_json(msg="Invalid JSON response with error: %s" % response.text)
if navigate_hash(result, ['error', 'errors']):
module.fail_json(msg=navigate_hash(result, ['error', 'errors']))
return result
def is_different(module, response):
request = resource_to_request(module)
response = response_to_hash(module, response)
# Remove all output-only from response.
response_vals = {}
for k, v in response.items():
if k in request:
response_vals[k] = v
request_vals = {}
for k, v in request.items():
if k in response:
request_vals[k] = v
return GcpRequest(request_vals) != GcpRequest(response_vals)
# Remove unnecessary properties from the response.
# This is for doing comparisons with Ansible's current parameters.
def response_to_hash(module, response):
return {
u'affinityCookieTtlSec': response.get(u'affinityCookieTtlSec'),
u'backends': BackendServiceBackendsArray(response.get(u'backends', []), module).from_response(),
u'cdnPolicy': BackendServiceCdnpolicy(response.get(u'cdnPolicy', {}), module).from_response(),
u'connectionDraining': BackendServiceConnectiondraining(response.get(u'connectionDraining', {}), module).from_response(),
u'creationTimestamp': response.get(u'creationTimestamp'),
u'fingerprint': response.get(u'fingerprint'),
u'description': response.get(u'description'),
u'enableCDN': response.get(u'enableCDN'),
u'healthChecks': response.get(u'healthChecks'),
u'id': response.get(u'id'),
u'iap': BackendServiceIap(response.get(u'iap', {}), module).from_response(),
u'loadBalancingScheme': module.params.get('load_balancing_scheme'),
u'name': module.params.get('name'),
u'portName': response.get(u'portName'),
u'protocol': response.get(u'protocol'),
u'securityPolicy': response.get(u'securityPolicy'),
u'sessionAffinity': response.get(u'sessionAffinity'),
u'timeoutSec': response.get(u'timeoutSec'),
}
def async_op_url(module, extra_data=None):
if extra_data is None:
extra_data = {}
url = "https://www.googleapis.com/compute/v1/projects/{project}/global/operations/{op_id}"
combined = extra_data.copy()
combined.update(module.params)
return url.format(**combined)
def wait_for_operation(module, response):
op_result = return_if_object(module, response, 'compute#operation')
if op_result is None:
return {}
status = navigate_hash(op_result, ['status'])
wait_done = wait_for_completion(status, op_result, module)
return fetch_resource(module, navigate_hash(wait_done, ['targetLink']), 'compute#backendService')
def wait_for_completion(status, op_result, module):
op_id = navigate_hash(op_result, ['name'])
op_uri = async_op_url(module, {'op_id': op_id})
while status != 'DONE':
raise_if_errors(op_result, ['error', 'errors'], module)
time.sleep(1.0)
op_result = fetch_resource(module, op_uri, 'compute#operation', False)
status = navigate_hash(op_result, ['status'])
return op_result
def raise_if_errors(response, err_path, module):
errors = navigate_hash(response, err_path)
if errors is not None:
module.fail_json(msg=errors)
class BackendServiceBackendsArray(object):
def __init__(self, request, module):
self.module = module
if request:
self.request = request
else:
self.request = []
def to_request(self):
items = []
for item in self.request:
items.append(self._request_for_item(item))
return items
def from_response(self):
items = []
for item in self.request:
items.append(self._response_from_item(item))
return items
def _request_for_item(self, item):
return remove_nones_from_dict(
{
u'balancingMode': item.get('balancing_mode'),
u'capacityScaler': item.get('capacity_scaler'),
u'description': item.get('description'),
u'group': item.get('group'),
u'maxConnections': item.get('max_connections'),
u'maxConnectionsPerInstance': item.get('max_connections_per_instance'),
u'maxConnectionsPerEndpoint': item.get('max_connections_per_endpoint'),
u'maxRate': item.get('max_rate'),
u'maxRatePerInstance': item.get('max_rate_per_instance'),
u'maxRatePerEndpoint': item.get('max_rate_per_endpoint'),
u'maxUtilization': item.get('max_utilization'),
}
)
def _response_from_item(self, item):
return remove_nones_from_dict(
{
u'balancingMode': item.get(u'balancingMode'),
u'capacityScaler': item.get(u'capacityScaler'),
u'description': item.get(u'description'),
u'group': item.get(u'group'),
u'maxConnections': item.get(u'maxConnections'),
u'maxConnectionsPerInstance': item.get(u'maxConnectionsPerInstance'),
u'maxConnectionsPerEndpoint': item.get(u'maxConnectionsPerEndpoint'),
u'maxRate': item.get(u'maxRate'),
u'maxRatePerInstance': item.get(u'maxRatePerInstance'),
u'maxRatePerEndpoint': item.get(u'maxRatePerEndpoint'),
u'maxUtilization': item.get(u'maxUtilization'),
}
)
class BackendServiceCdnpolicy(object):
def __init__(self, request, module):
self.module = module
if request:
self.request = request
else:
self.request = {}
def to_request(self):
return remove_nones_from_dict(
{
u'cacheKeyPolicy': BackendServiceCachekeypolicy(self.request.get('cache_key_policy', {}), self.module).to_request(),
u'signedUrlCacheMaxAgeSec': self.request.get('signed_url_cache_max_age_sec'),
}
)
def from_response(self):
return remove_nones_from_dict(
{
u'cacheKeyPolicy': BackendServiceCachekeypolicy(self.request.get(u'cacheKeyPolicy', {}), self.module).from_response(),
u'signedUrlCacheMaxAgeSec': self.request.get(u'signedUrlCacheMaxAgeSec'),
}
)
class BackendServiceCachekeypolicy(object):
def __init__(self, request, module):
self.module = module
if request:
self.request = request
else:
self.request = {}
def to_request(self):
return remove_nones_from_dict(
{
u'includeHost': self.request.get('include_host'),
u'includeProtocol': self.request.get('include_protocol'),
u'includeQueryString': self.request.get('include_query_string'),
u'queryStringBlacklist': self.request.get('query_string_blacklist'),
u'queryStringWhitelist': self.request.get('query_string_whitelist'),
}
)
def from_response(self):
return remove_nones_from_dict(
{
u'includeHost': self.request.get(u'includeHost'),
u'includeProtocol': self.request.get(u'includeProtocol'),
u'includeQueryString': self.request.get(u'includeQueryString'),
u'queryStringBlacklist': self.request.get(u'queryStringBlacklist'),
u'queryStringWhitelist': self.request.get(u'queryStringWhitelist'),
}
)
class BackendServiceConnectiondraining(object):
def __init__(self, request, module):
self.module = module
if request:
self.request = request
else:
self.request = {}
def to_request(self):
return remove_nones_from_dict({u'drainingTimeoutSec': self.request.get('draining_timeout_sec')})
def from_response(self):
return remove_nones_from_dict({u'drainingTimeoutSec': self.request.get(u'drainingTimeoutSec')})
class BackendServiceIap(object):
def __init__(self, request, module):
self.module = module
if request:
self.request = request
else:
self.request = {}
def to_request(self):
return remove_nones_from_dict(
{
u'enabled': self.request.get('enabled'),
u'oauth2ClientId': self.request.get('oauth2_client_id'),
u'oauth2ClientSecret': self.request.get('oauth2_client_secret'),
}
)
def from_response(self):
return remove_nones_from_dict(
{
u'enabled': self.request.get(u'enabled'),
u'oauth2ClientId': self.request.get(u'oauth2ClientId'),
u'oauth2ClientSecret': self.request.get(u'oauth2ClientSecret'),
}
)
if __name__ == '__main__':
main()<|fim▁end|> | |
<|file_name|>MultipleBrokerProducer.java<|end_file_name|><|fim▁begin|>package com.cosmos.kafka.client.producer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import java.util.Properties;
import java.util.Random;
import static org.apache.kafka.clients.producer.ProducerConfig.*;
/**
* Producer using multiple partition
*/
public class MultipleBrokerProducer implements Runnable {
private KafkaProducer<Integer, String> producer;
private String topic;
public MultipleBrokerProducer(String topic) {
final Properties props = new Properties();
props.put(BOOTSTRAP_SERVERS_CONFIG, "localhost:9092,localhost:9093,localhost:9094");
props.put(KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.IntegerSerializer");
props.put(VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
props.put(PARTITIONER_CLASS_CONFIG, "org.apache.kafka.clients.producer.internals.DefaultPartitioner");
props.put(ACKS_CONFIG, "1");
this.producer = new KafkaProducer<>(props);
this.topic = topic;
}
@Override
public void run() {
System.out.println("Sending 1000 messages");
Random rnd = new Random();
int i = 1;
while (i <= 1000) {
int key = rnd.nextInt(255);
String message = String.format("Message for key - [%d]: %d", key, i);
System.out.printf("Send: %s\n", message);
this.producer.send(new ProducerRecord<>(this.topic, key, message));
i++;
try {
Thread.sleep(3);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
producer.close();<|fim▁hole|>}<|fim▁end|> | } |
<|file_name|>journal.py<|end_file_name|><|fim▁begin|># -*- Mode: python; coding:utf-8; indent-tabs-mode: nil -*- */
#
# This file is part of systemd.
#
# Copyright 2012 David Strauss <[email protected]>
# Copyright 2012 Zbigniew Jędrzejewski-Szmek <[email protected]>
# Copyright 2012 Marti Raudsepp <[email protected]>
#
# systemd is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# systemd is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with systemd; If not, see <http://www.gnu.org/licenses/>.
from __future__ import division
import sys as _sys
import datetime as _datetime
import uuid as _uuid
import traceback as _traceback
import os as _os
import logging as _logging
if _sys.version_info >= (3,3):
from collections import ChainMap as _ChainMap
from syslog import (LOG_EMERG, LOG_ALERT, LOG_CRIT, LOG_ERR,
LOG_WARNING, LOG_NOTICE, LOG_INFO, LOG_DEBUG)
from ._journal import __version__, sendv, stream_fd
from ._reader import (_Reader, NOP, APPEND, INVALIDATE,
LOCAL_ONLY, RUNTIME_ONLY, SYSTEM_ONLY,
_get_catalog)
from . import id128 as _id128
if _sys.version_info >= (3,):
from ._reader import Monotonic
else:
Monotonic = tuple
def _convert_monotonic(m):
return Monotonic((_datetime.timedelta(microseconds=m[0]),
_uuid.UUID(bytes=m[1])))
def _convert_source_monotonic(s):
return _datetime.timedelta(microseconds=int(s))
def _convert_realtime(t):
return _datetime.datetime.fromtimestamp(t / 1000000)
def _convert_timestamp(s):
return _datetime.datetime.fromtimestamp(int(s) / 1000000)
if _sys.version_info >= (3,):
def _convert_uuid(s):
return _uuid.UUID(s.decode())
else:
_convert_uuid = _uuid.UUID
DEFAULT_CONVERTERS = {
'MESSAGE_ID': _convert_uuid,
'_MACHINE_ID': _convert_uuid,
'_BOOT_ID': _convert_uuid,
'PRIORITY': int,
'LEADER': int,
'SESSION_ID': int,
'USERSPACE_USEC': int,
'INITRD_USEC': int,
'KERNEL_USEC': int,
'_UID': int,
'_GID': int,
'_PID': int,
'SYSLOG_FACILITY': int,
'SYSLOG_PID': int,
'_AUDIT_SESSION': int,
'_AUDIT_LOGINUID': int,
'_SYSTEMD_SESSION': int,
'_SYSTEMD_OWNER_UID': int,
'CODE_LINE': int,
'ERRNO': int,
'EXIT_STATUS': int,
'_SOURCE_REALTIME_TIMESTAMP': _convert_timestamp,
'__REALTIME_TIMESTAMP': _convert_realtime,
'_SOURCE_MONOTONIC_TIMESTAMP': _convert_source_monotonic,
'__MONOTONIC_TIMESTAMP': _convert_monotonic,
'COREDUMP': bytes,<|fim▁hole|> 'COREDUMP_SIGNAL': int,
'COREDUMP_TIMESTAMP': _convert_timestamp,
}
_IDENT_LETTER = set('ABCDEFGHIJKLMNOPQRTSUVWXYZ_')
def _valid_field_name(s):
return not (set(s) - _IDENT_LETTER)
class Reader(_Reader):
"""Reader allows the access and filtering of systemd journal
entries. Note that in order to access the system journal, a
non-root user must be in the `systemd-journal` group.
Example usage to print out all informational or higher level
messages for systemd-udevd for this boot:
>>> j = journal.Reader()
>>> j.this_boot()
>>> j.log_level(journal.LOG_INFO)
>>> j.add_match(_SYSTEMD_UNIT="systemd-udevd.service")
>>> for entry in j:
... print(entry['MESSAGE'])
See systemd.journal-fields(7) for more info on typical fields
found in the journal.
"""
def __init__(self, flags=0, path=None, converters=None):
"""Create an instance of Reader, which allows filtering and
return of journal entries.
Argument `flags` sets open flags of the journal, which can be one
of, or ORed combination of constants: LOCAL_ONLY (default) opens
journal on local machine only; RUNTIME_ONLY opens only
volatile journal files; and SYSTEM_ONLY opens only
journal files of system services and the kernel.
Argument `path` is the directory of journal files. Note that
`flags` and `path` are exclusive.
Argument `converters` is a dictionary which updates the
DEFAULT_CONVERTERS to convert journal field values. Field
names are used as keys into this dictionary. The values must
be single argument functions, which take a `bytes` object and
return a converted value. When there's no entry for a field
name, then the default UTF-8 decoding will be attempted. If
the conversion fails with a ValueError, unconverted bytes
object will be returned. (Note that ValueEror is a superclass
of UnicodeDecodeError).
Reader implements the context manager protocol: the journal
will be closed when exiting the block.
"""
super(Reader, self).__init__(flags, path)
if _sys.version_info >= (3,3):
self.converters = _ChainMap()
if converters is not None:
self.converters.maps.append(converters)
self.converters.maps.append(DEFAULT_CONVERTERS)
else:
self.converters = DEFAULT_CONVERTERS.copy()
if converters is not None:
self.converters.update(converters)
def _convert_field(self, key, value):
"""Convert value using self.converters[key]
If `key` is not present in self.converters, a standard unicode
decoding will be attempted. If the conversion (either
key-specific or the default one) fails with a ValueError, the
original bytes object will be returned.
"""
convert = self.converters.get(key, bytes.decode)
try:
return convert(value)
except ValueError:
# Leave in default bytes
return value
def _convert_entry(self, entry):
"""Convert entire journal entry utilising _covert_field"""
result = {}
for key, value in entry.items():
if isinstance(value, list):
result[key] = [self._convert_field(key, val) for val in value]
else:
result[key] = self._convert_field(key, value)
return result
def __iter__(self):
"""Part of iterator protocol.
Returns self.
"""
return self
if _sys.version_info >= (3,):
def __next__(self):
"""Part of iterator protocol.
Returns self.get_next().
"""
return self.get_next()
else:
def next(self):
"""Part of iterator protocol.
Returns self.get_next().
"""
return self.get_next()
def add_match(self, *args, **kwargs):
"""Add one or more matches to the filter journal log entries.
All matches of different field are combined in a logical AND,
and matches of the same field are automatically combined in a
logical OR.
Matches can be passed as strings of form "FIELD=value", or
keyword arguments FIELD="value".
"""
args = list(args)
args.extend(_make_line(key, val) for key, val in kwargs.items())
for arg in args:
super(Reader, self).add_match(arg)
def get_next(self, skip=1):
"""Return the next log entry as a mapping type, currently
a standard dictionary of fields.
Optional skip value will return the `skip`\-th log entry.
Entries will be processed with converters specified during
Reader creation.
"""
if super(Reader, self)._next(skip):
entry = super(Reader, self)._get_all()
if entry:
entry['__REALTIME_TIMESTAMP'] = self._get_realtime()
entry['__MONOTONIC_TIMESTAMP'] = self._get_monotonic()
entry['__CURSOR'] = self._get_cursor()
return self._convert_entry(entry)
return dict()
def get_previous(self, skip=1):
"""Return the previous log entry as a mapping type,
currently a standard dictionary of fields.
Optional skip value will return the -`skip`\-th log entry.
Entries will be processed with converters specified during
Reader creation.
Equivalent to get_next(-skip).
"""
return self.get_next(-skip)
def query_unique(self, field):
"""Return unique values appearing in the journal for given `field`.
Note this does not respect any journal matches.
Entries will be processed with converters specified during
Reader creation.
"""
return set(self._convert_field(field, value)
for value in super(Reader, self).query_unique(field))
def wait(self, timeout=None):
"""Wait for a change in the journal. `timeout` is the maximum
time in seconds to wait, or None, to wait forever.
Returns one of NOP (no change), APPEND (new entries have been
added to the end of the journal), or INVALIDATE (journal files
have been added or removed).
"""
us = -1 if timeout is None else int(timeout * 1000000)
return super(Reader, self).wait(us)
def seek_realtime(self, realtime):
"""Seek to a matching journal entry nearest to `realtime` time.
Argument `realtime` must be either an integer unix timestamp
or datetime.datetime instance.
"""
if isinstance(realtime, _datetime.datetime):
realtime = float(realtime.strftime("%s.%f")) * 1000000
return super(Reader, self).seek_realtime(int(realtime))
def seek_monotonic(self, monotonic, bootid=None):
"""Seek to a matching journal entry nearest to `monotonic` time.
Argument `monotonic` is a timestamp from boot in either
seconds or a datetime.timedelta instance. Argument `bootid`
is a string or UUID representing which boot the monotonic time
is reference to. Defaults to current bootid.
"""
if isinstance(monotonic, _datetime.timedelta):
monotonic = monotonic.totalseconds()
monotonic = int(monotonic * 1000000)
if isinstance(bootid, _uuid.UUID):
bootid = bootid.get_hex()
return super(Reader, self).seek_monotonic(monotonic, bootid)
def log_level(self, level):
"""Set maximum log `level` by setting matches for PRIORITY.
"""
if 0 <= level <= 7:
for i in range(level+1):
self.add_match(PRIORITY="%d" % i)
else:
raise ValueError("Log level must be 0 <= level <= 7")
def messageid_match(self, messageid):
"""Add match for log entries with specified `messageid`.
`messageid` can be string of hexadicimal digits or a UUID
instance. Standard message IDs can be found in systemd.id128.
Equivalent to add_match(MESSAGE_ID=`messageid`).
"""
if isinstance(messageid, _uuid.UUID):
messageid = messageid.get_hex()
self.add_match(MESSAGE_ID=messageid)
def this_boot(self, bootid=None):
"""Add match for _BOOT_ID equal to current boot ID or the specified boot ID.
If specified, bootid should be either a UUID or a 32 digit hex number.
Equivalent to add_match(_BOOT_ID='bootid').
"""
if bootid is None:
bootid = _id128.get_boot().hex
else:
bootid = getattr(bootid, 'hex', bootid)
self.add_match(_BOOT_ID=bootid)
def this_machine(self, machineid=None):
"""Add match for _MACHINE_ID equal to the ID of this machine.
If specified, machineid should be either a UUID or a 32 digit hex number.
Equivalent to add_match(_MACHINE_ID='machineid').
"""
if machineid is None:
machineid = _id128.get_machine().hex
else:
machineid = getattr(machineid, 'hex', machineid)
self.add_match(_MACHINE_ID=machineid)
def get_catalog(mid):
if isinstance(mid, _uuid.UUID):
mid = mid.get_hex()
return _get_catalog(mid)
def _make_line(field, value):
if isinstance(value, bytes):
return field.encode('utf-8') + b'=' + value
else:
return field + '=' + value
def send(MESSAGE, MESSAGE_ID=None,
CODE_FILE=None, CODE_LINE=None, CODE_FUNC=None,
**kwargs):
r"""Send a message to the journal.
>>> journal.send('Hello world')
>>> journal.send('Hello, again, world', FIELD2='Greetings!')
>>> journal.send('Binary message', BINARY=b'\xde\xad\xbe\xef')
Value of the MESSAGE argument will be used for the MESSAGE=
field. MESSAGE must be a string and will be sent as UTF-8 to
the journal.
MESSAGE_ID can be given to uniquely identify the type of
message. It must be a string or a uuid.UUID object.
CODE_LINE, CODE_FILE, and CODE_FUNC can be specified to
identify the caller. Unless at least on of the three is given,
values are extracted from the stack frame of the caller of
send(). CODE_FILE and CODE_FUNC must be strings, CODE_LINE
must be an integer.
Additional fields for the journal entry can only be specified
as keyword arguments. The payload can be either a string or
bytes. A string will be sent as UTF-8, and bytes will be sent
as-is to the journal.
Other useful fields include PRIORITY, SYSLOG_FACILITY,
SYSLOG_IDENTIFIER, SYSLOG_PID.
"""
args = ['MESSAGE=' + MESSAGE]
if MESSAGE_ID is not None:
id = getattr(MESSAGE_ID, 'hex', MESSAGE_ID)
args.append('MESSAGE_ID=' + id)
if CODE_LINE == CODE_FILE == CODE_FUNC == None:
CODE_FILE, CODE_LINE, CODE_FUNC = \
_traceback.extract_stack(limit=2)[0][:3]
if CODE_FILE is not None:
args.append('CODE_FILE=' + CODE_FILE)
if CODE_LINE is not None:
args.append('CODE_LINE={:d}'.format(CODE_LINE))
if CODE_FUNC is not None:
args.append('CODE_FUNC=' + CODE_FUNC)
args.extend(_make_line(key, val) for key, val in kwargs.items())
return sendv(*args)
def stream(identifier, priority=LOG_DEBUG, level_prefix=False):
r"""Return a file object wrapping a stream to journal.
Log messages written to this file as simple newline sepearted
text strings are written to the journal.
The file will be line buffered, so messages are actually sent
after a newline character is written.
>>> stream = journal.stream('myapp')
>>> stream
<open file '<fdopen>', mode 'w' at 0x...>
>>> stream.write('message...\n')
will produce the following message in the journal::
PRIORITY=7
SYSLOG_IDENTIFIER=myapp
MESSAGE=message...
Using the interface with print might be more convinient:
>>> from __future__ import print_function
>>> print('message...', file=stream)
priority is the syslog priority, one of `LOG_EMERG`,
`LOG_ALERT`, `LOG_CRIT`, `LOG_ERR`, `LOG_WARNING`,
`LOG_NOTICE`, `LOG_INFO`, `LOG_DEBUG`.
level_prefix is a boolean. If true, kernel-style log priority
level prefixes (such as '<1>') are interpreted. See
sd-daemon(3) for more information.
"""
fd = stream_fd(identifier, priority, level_prefix)
return _os.fdopen(fd, 'w', 1)
class JournalHandler(_logging.Handler):
"""Journal handler class for the Python logging framework.
Please see the Python logging module documentation for an
overview: http://docs.python.org/library/logging.html.
To create a custom logger whose messages go only to journal:
>>> log = logging.getLogger('custom_logger_name')
>>> log.propagate = False
>>> log.addHandler(journal.JournalHandler())
>>> log.warn("Some message: %s", detail)
Note that by default, message levels `INFO` and `DEBUG` are
ignored by the logging framework. To enable those log levels:
>>> log.setLevel(logging.DEBUG)
To redirect all logging messages to journal regardless of where
they come from, attach it to the root logger:
>>> logging.root.addHandler(journal.JournalHandler())
For more complex configurations when using `dictConfig` or
`fileConfig`, specify `systemd.journal.JournalHandler` as the
handler class. Only standard handler configuration options
are supported: `level`, `formatter`, `filters`.
To attach journal MESSAGE_ID, an extra field is supported:
>>> import uuid
>>> mid = uuid.UUID('0123456789ABCDEF0123456789ABCDEF')
>>> log.warn("Message with ID", extra={'MESSAGE_ID': mid})
Fields to be attached to all messages sent through this
handler can be specified as keyword arguments. This probably
makes sense only for SYSLOG_IDENTIFIER and similar fields
which are constant for the whole program:
>>> journal.JournalHandler(SYSLOG_IDENTIFIER='my-cool-app')
The following journal fields will be sent:
`MESSAGE`, `PRIORITY`, `THREAD_NAME`, `CODE_FILE`, `CODE_LINE`,
`CODE_FUNC`, `LOGGER` (name as supplied to getLogger call),
`MESSAGE_ID` (optional, see above), `SYSLOG_IDENTIFIER` (defaults
to sys.argv[0]).
"""
def __init__(self, level=_logging.NOTSET, **kwargs):
super(JournalHandler, self).__init__(level)
for name in kwargs:
if not _valid_field_name(name):
raise ValueError('Invalid field name: ' + name)
if 'SYSLOG_IDENTIFIER' not in kwargs:
kwargs['SYSLOG_IDENTIFIER'] = _sys.argv[0]
self._extra = kwargs
def emit(self, record):
"""Write record as journal event.
MESSAGE is taken from the message provided by the
user, and PRIORITY, LOGGER, THREAD_NAME,
CODE_{FILE,LINE,FUNC} fields are appended
automatically. In addition, record.MESSAGE_ID will be
used if present.
"""
try:
msg = self.format(record)
pri = self.mapPriority(record.levelno)
mid = getattr(record, 'MESSAGE_ID', None)
send(msg,
MESSAGE_ID=mid,
PRIORITY=format(pri),
LOGGER=record.name,
THREAD_NAME=record.threadName,
CODE_FILE=record.pathname,
CODE_LINE=record.lineno,
CODE_FUNC=record.funcName,
**self._extra)
except Exception:
self.handleError(record)
@staticmethod
def mapPriority(levelno):
"""Map logging levels to journald priorities.
Since Python log level numbers are "sparse", we have
to map numbers in between the standard levels too.
"""
if levelno <= _logging.DEBUG:
return LOG_DEBUG
elif levelno <= _logging.INFO:
return LOG_INFO
elif levelno <= _logging.WARNING:
return LOG_WARNING
elif levelno <= _logging.ERROR:
return LOG_ERR
elif levelno <= _logging.CRITICAL:
return LOG_CRIT
else:
return LOG_ALERT<|fim▁end|> | 'COREDUMP_PID': int,
'COREDUMP_UID': int,
'COREDUMP_GID': int,
'COREDUMP_SESSION': int, |
<|file_name|>gulpfile.ts<|end_file_name|><|fim▁begin|>import * as gulp from 'gulp';
import * as runSequence from 'run-sequence';
import {loadTasks} from './tools/utils';
import {SEED_TASKS_DIR, PROJECT_TASKS_DIR} from './tools/config';
loadTasks(SEED_TASKS_DIR);
loadTasks(PROJECT_TASKS_DIR);
// --------------
// Build dev.
gulp.task('build.dev', (done: any) =>
runSequence('clean.dev',
'tslint',
// 'css-lint', // the old css task we no longer need
'scss-lint', // the task we created<|fim▁hole|> 'build.assets.dev',
// 'build.html_css', // the old css task we no longer need
'build.html_scss', // the task we created
'build.js.dev',
'build.index.dev',
done));
// --------------
// Build dev watch.
gulp.task('build.dev.watch', (done: any) =>
runSequence('build.dev',
'watch.dev',
done));
// --------------
// Build e2e.
gulp.task('build.e2e', (done: any) =>
runSequence('clean.dev',
'tslint',
'build.assets.dev',
'build.js.e2e',
'build.index.dev',
done));
// --------------
// Build prod.
gulp.task('build.prod', (done: any) =>
runSequence('clean.prod',
'tslint',
// 'css-lint', // the old css task we no longer need
'scss-lint', // the task we created
'build.assets.prod',
// 'build.html_css', // the old css task we no longer need
'build.html_scss', // the task we created
'copy.js.prod',
'build.js.prod',
'build.bundles',
'build.bundles.app',
'build.index.prod',
done));
// --------------
// Build test.
gulp.task('build.test', (done: any) =>
runSequence('clean.dev',
'tslint',
'build.assets.dev',
'build.js.test',
'build.index.dev',
done));
// --------------
// Build test watch.
gulp.task('build.test.watch', (done: any) =>
runSequence('build.test',
'watch.test',
done));
// --------------
// Build tools.
gulp.task('build.tools', (done: any) =>
runSequence('clean.tools',
'build.js.tools',
done));
// --------------
// Docs
gulp.task('docs', (done: any) =>
runSequence('build.docs',
'serve.docs',
done));
// --------------
// Serve dev
gulp.task('serve.dev', (done: any) =>
runSequence('build.dev',
'server.start',
'watch.dev',
done));
// --------------
// Serve e2e
gulp.task('serve.e2e', (done: any) =>
runSequence('build.e2e',
'server.start',
'watch.e2e',
done));
// --------------
// Serve prod
gulp.task('serve.prod', (done: any) =>
runSequence('build.prod',
'server.prod',
done));
// --------------
// Test.
gulp.task('test', (done: any) =>
runSequence('build.test',
'karma.start',
done));<|fim▁end|> | |
<|file_name|>specPageObjects.js<|end_file_name|><|fim▁begin|>"use strict";
// local import of the exported AngularPage class
var angularPage_1 = require('./angularPage');
// The jasmine typings are brought in via DefinitelyTyped ambient typings.<|fim▁hole|> it('should greet the named user', function () {
var angularHomepage = new angularPage_1.AngularHomepage();
angularHomepage.get();
angularHomepage.setName('Julie');
expect(angularHomepage.getGreeting()).toEqual('Hello Julie!');
});
});<|fim▁end|> | describe('angularjs homepage', function () { |
<|file_name|>program.rs<|end_file_name|><|fim▁begin|>//! A `Program` holds multiple `Function`.
use crate::il::*;
use crate::RC;
use std::collections::BTreeMap;
use std::fmt;
/// A representation of a program by `il::Function`
#[derive(Clone, Debug, Deserialize, Serialize, Default)]
pub struct Program {
// Mapping of function indices (not addresses) to `Function`.
functions: BTreeMap<usize, RC<Function>>,
// The next index to assign to a function when added to the program.
next_index: usize,
}
impl Program {
/// Creates a new, empty `Program`.
pub fn new() -> Program {
Program {
functions: BTreeMap::new(),
next_index: 0,
}
}
/// Search for a `Function` by its optional address, assuming one was assigned.
/// Returns the `Function` if found, or `None` if not found.
pub fn function_by_address(&self, address: u64) -> Option<&Function> {
for function in &self.functions {<|fim▁hole|> return Some(function.1);
}
}
None
}
/// Get all `Function` for this `Program`.
pub fn functions(&self) -> Vec<&Function> {
let mut v = Vec::new();
for f in &self.functions {
let f: &Function = &f.1;
v.push(f);
}
v
}
/// Get the underlying BTreeMap holding all `Function` for this `Program`.
pub fn functions_map(&self) -> BTreeMap<usize, &Function> {
self.functions
.iter()
.map(|(index, function)| (*index, function.as_ref()))
.collect::<BTreeMap<usize, &Function>>()
}
/// Get a `Function` by its index.
///
/// A `Function` index is assigned by `Program` and is not the address where the `Function`
/// was discovered.
pub fn function(&self, index: usize) -> Option<&Function> {
self.functions.get(&index).map(|f| f.as_ref())
}
/// Add a `Function` to the `Program`.
///
/// This will also assign an index to the `Function`.
pub fn add_function(&mut self, mut function: Function) {
function.set_index(Some(self.next_index));
self.functions.insert(self.next_index, RC::new(function));
self.next_index += 1;
}
/// Get a `Function` by its name.
pub fn function_by_name(&self, name: &str) -> Option<&Function> {
self.functions
.iter()
.find(|(_, function)| function.name() == name)
.map(|(_, function)| function.as_ref())
}
}
impl fmt::Display for Program {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for function in &self.functions {
writeln!(f, "{}@{:08X}", function.1.name(), function.0)?
}
Ok(())
}
}<|fim▁end|> | if function.1.address() == address { |
<|file_name|>waiting.spec.js<|end_file_name|><|fim▁begin|>/// <reference types="cypress" />
context('Waiting', () => {
beforeEach(() => {
cy.visit('https://example.cypress.io/commands/waiting')
})
// BE CAREFUL of adding unnecessary wait times.
// https://on.cypress.io/best-practices#Unnecessary-Waiting
// https://on.cypress.io/wait
it('cy.wait() - wait for a specific amount of time', () => {
cy.get('.wait-input1').type('Wait 1000ms after typing')
cy.wait(1000)
cy.get('.wait-input2').type('Wait 1000ms after typing')
cy.wait(1000)
cy.get('.wait-input3').type('Wait 1000ms after typing')<|fim▁hole|>
it('cy.wait() - wait for a specific route', () => {
cy.server()
// Listen to GET to comments/1
cy.route('GET', 'comments/*').as('getComment')
// we have code that gets a comment when
// the button is clicked in scripts.js
cy.get('.network-btn').click()
// wait for GET comments/1
cy.wait('@getComment').its('status').should('eq', 200)
})
})<|fim▁end|> | cy.wait(1000)
}) |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>import sys
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
Base = declarative_base()<|fim▁hole|>
class User(Base):
"""docstring for User"""
__tablename__ = 'users'
id = Column(Integer, primary_key = True)
name = Column(String(250), nullable = False)
email = Column(String(250), nullable = False)
picture = Column(String(250))
role = Column(String(5))
class Category(Base):
"""docstring for Category"""
__tablename__ = 'category'
id = Column(Integer, primary_key = True)
name = Column(String(80), nullable = False)
@property
def serialize(self):
return {
'id': self.id,
'name': self.name
}
class Item(Base):
"""docstring for Item"""
__tablename__ = 'item'
id = Column(Integer, primary_key = True)
title = Column(String(250), nullable = False)
description = Column(String())
picture = Column(String(250))
price = Column(String(10))
category_id = Column(Integer, ForeignKey('category.id'))
category = relationship(Category)
user_id = Column(Integer, ForeignKey('users.id'))
user = relationship(User)
@property
def serialize(self):
return {
'id': self.id,
'title': self.title,
'description': self.description,
'picture': self.picture,
'category_id': self.category_id
}
engine = create_engine('postgresql://catalog:catalog@localhost/catalog')
Base.metadata.create_all(engine)<|fim▁end|> | |
<|file_name|>generator.rs<|end_file_name|><|fim▁begin|>use super::super::{Grid, Symmetry, D, LP, P};
use super::*;
extern crate rand;
use rand::Rng;
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum Endpoint {
Any,
Forced,
Prohibited,
}
pub struct GeneratorOption<'a> {
pub chain_threshold: i32,
pub endpoint_constraint: Option<&'a Grid<Endpoint>>,
pub forbid_adjacent_clue: bool,
pub symmetry: Symmetry,
pub clue_limit: Option<i32>,
pub prioritized_extension: bool,
}
pub fn generate_endpoint_constraint<R: Rng>(
height: i32,
width: i32,
empty_width: i32,
corner_constraint: Option<(i32, i32)>,
symmetry: Symmetry,
rng: &mut R,
) -> Grid<Endpoint> {
let mut ret = Grid::new(height, width, Endpoint::Any);
for d in 0..empty_width {
for y in 0..height {
ret[P(y, d)] = Endpoint::Prohibited;
ret[P(y, width - 1 - d)] = Endpoint::Prohibited;<|fim▁hole|> }
for x in 0..width {
ret[P(d, x)] = Endpoint::Prohibited;
ret[P(height - 1 - d, x)] = Endpoint::Prohibited;
}
}
if let Some((lo, hi)) = corner_constraint {
// upper left, upper right, lower left, lower right
let mut corner_positions = [-1, -1, -1, -1];
for i in 0..4 {
if corner_positions[i] != -1 {
continue;
}
corner_positions[i] = rng.gen_range(lo, hi + 1);
if symmetry.tetrad || symmetry.vertical || (symmetry.dyad && symmetry.horizontal) {
corner_positions[i ^ 1] = corner_positions[i];
}
if symmetry.tetrad || symmetry.horizontal || (symmetry.dyad && symmetry.vertical) {
corner_positions[i ^ 2] = corner_positions[i];
}
if symmetry.dyad || symmetry.tetrad || (symmetry.vertical && symmetry.horizontal) {
corner_positions[i ^ 3] = corner_positions[i];
}
}
for i in 0..4 {
let y = if (i & 2) == 0 {
corner_positions[i]
} else {
height - 1 - corner_positions[i]
};
let x = if (i & 1) == 0 {
corner_positions[i]
} else {
width - 1 - corner_positions[i]
};
ret[P(y, x)] = Endpoint::Forced;
}
}
ret
}
/// A type for an update of `AnswerField`.
/// - `Corner(e, f)`: both `e` and `f` must be `Line` to make a corner.
/// - `Endpoint(e, f)`: `e` must be `Line` but `f` must be `Blank` to make an endpoint.
/// - `Extend(e)`: `e` must be `Line` to extend an existing chain.
#[derive(Clone, Copy)]
enum FieldUpdate {
Corner(LP, LP),
Endpoint(LP, LP),
Extend(LP),
}
pub struct PlacementGenerator {
pool: Vec<AnswerField>,
active_fields: Vec<AnswerField>,
next_fields: Vec<AnswerField>,
height: i32,
width: i32,
beam_width: usize,
}
impl PlacementGenerator {
pub fn new(height: i32, width: i32) -> PlacementGenerator {
let template = AnswerField::new(
height,
width,
&GeneratorOption {
chain_threshold: 1,
endpoint_constraint: None,
forbid_adjacent_clue: false,
symmetry: Symmetry::none(),
clue_limit: None,
prioritized_extension: false,
},
);
let beam_width = 100;
PlacementGenerator {
pool: vec![template; beam_width * 2 + 1],
active_fields: Vec::with_capacity(beam_width),
next_fields: Vec::with_capacity(beam_width),
height,
width,
beam_width,
}
}
pub fn generate<R: Rng>(
&mut self,
opt: &GeneratorOption,
rng: &mut R,
) -> Option<LinePlacement> {
let beam_width = self.beam_width;
let height = self.height;
let width = self.width;
let fields = &mut self.active_fields;
let symmetry = Symmetry {
dyad: opt.symmetry.dyad || opt.symmetry.tetrad,
tetrad: opt.symmetry.tetrad && (height == width),
..opt.symmetry
};
let mut endpoint_constraint = match opt.endpoint_constraint {
Some(e) => e.clone(),
None => Grid::new(height, width, Endpoint::Any),
};
if symmetry.dyad && height % 2 == 1 && width % 2 == 1 {
endpoint_constraint[P(height / 2, width / 2)] = Endpoint::Prohibited;
}
if opt.forbid_adjacent_clue {
if symmetry.horizontal && height % 2 == 0 {
for x in 0..width {
endpoint_constraint[P(height / 2, x)] = Endpoint::Prohibited;
endpoint_constraint[P(height / 2 + 1, x)] = Endpoint::Prohibited;
}
}
if symmetry.horizontal && width % 2 == 0 {
for y in 0..height {
endpoint_constraint[P(y, width / 2)] = Endpoint::Prohibited;
endpoint_constraint[P(y, width / 2 + 1)] = Endpoint::Prohibited;
}
}
if symmetry.dyad {
endpoint_constraint[P(height / 2, width / 2)] = Endpoint::Prohibited;
endpoint_constraint[P(height / 2, (width - 1) / 2)] = Endpoint::Prohibited;
endpoint_constraint[P((height - 1) / 2, width / 2)] = Endpoint::Prohibited;
endpoint_constraint[P((height - 1) / 2, (width - 1) / 2)] = Endpoint::Prohibited;
}
}
let opt = GeneratorOption {
endpoint_constraint: Some(&endpoint_constraint),
symmetry,
..*opt
};
let template = AnswerField::new(height, width, &opt);
let mut field_base = self.pool.pop().unwrap();
field_base.copy_from(&template);
field_base.inspect_all();
fields.push(field_base);
loop {
if fields.len() == 0 {
break;
}
let fields_next = &mut self.next_fields;
'outer: for _ in 0..(5 * fields.len()) {
if fields_next.len() >= beam_width || fields.len() == 0 {
break;
}
let id = rng.gen_range(0, fields.len());
if fields[id].is_invalid() || !fields[id].has_seed() {
self.pool.push(fields.swap_remove(id));
continue;
}
let mut field = self.pool.pop().unwrap();
field.copy_from(&fields[id]);
if !field.has_seed() {
continue;
}
let cd = if opt.prioritized_extension {
field.best_seed(5, rng)
} else {
field.random_seed(rng)
};
let update = PlacementGenerator::choose_update(&field, cd, rng);
PlacementGenerator::apply_update(&mut field, update);
PlacementGenerator::check_invalidity(&mut field, &opt);
if field.is_invalid() {
self.pool.push(field);
PlacementGenerator::deny_update(&mut fields[id], cd, update);
PlacementGenerator::check_invalidity(&mut fields[id], &opt);
if fields[id].is_invalid() {
self.pool.push(fields.swap_remove(id));
}
continue;
}
if !field.has_seed() {
if !check_answer_validity(&field) {
self.pool.push(field);
continue 'outer;
}
let line_placement = field.as_line_placement();
self.pool.push(field);
// release used fields
for used in fields.drain(0..) {
self.pool.push(used);
}
for used in fields_next.drain(0..) {
self.pool.push(used);
}
return Some(line_placement);
}
fields_next.push(field);
}
// release old fields
for old in fields.drain(0..) {
self.pool.push(old);
}
::std::mem::swap(fields, fields_next);
}
None
}
pub fn generate_and_test<R: Rng>(
&mut self,
opt: &GeneratorOption,
rng: &mut R,
) -> Option<Grid<Clue>> {
if let Some(placement) = self.generate(opt, rng) {
if uniqueness_pretest(&placement) {
let problem = extract_problem(&placement, rng);
let ans = solve2(&problem, Some(2), false, true);
if ans.len() == 1 && !ans.found_not_fully_filled {
return Some(problem);
}
}
}
None
}
fn check_invalidity(field: &mut AnswerField, opt: &GeneratorOption) {
if field.is_invalid() {
return;
}
if let Some(limit) = opt.clue_limit {
limit_clue_number(field, limit);
if field.is_invalid() {
return;
}
}
if is_entangled(field) {
field.set_invalid();
return;
}
// TODO: better check for other symmetry types?
if opt.symmetry.dyad && check_symmetry(field) {
field.set_invalid();
}
}
fn choose_update<R: Rng>(field: &AnswerField, pos: LP, rng: &mut R) -> FieldUpdate {
let pos_vtx = pos.as_vertex();
let nbs = field.undecided_neighbors(pos);
if field.count_neighbor(pos) == (0, 2) {
let constraint = field.get_endpoint_constraint(pos_vtx);
if constraint != Endpoint::Forced && rng.gen::<f64>() < 0.9f64 {
FieldUpdate::Corner(nbs[0], nbs[1])
} else {
let i = rng.gen_range(0, 2);
FieldUpdate::Endpoint(nbs[i], nbs[1 - i])
}
} else {
let i = rng.gen_range(0, nbs.len());
FieldUpdate::Extend(nbs[i])
}
}
fn apply_update(field: &mut AnswerField, update: FieldUpdate) {
match update {
FieldUpdate::Corner(e, f) => {
field.decide(e, Edge::Line);
field.decide(f, Edge::Line);
}
FieldUpdate::Endpoint(e, f) => {
field.decide(e, Edge::Line);
field.decide(f, Edge::Blank);
}
FieldUpdate::Extend(e) => field.decide(e, Edge::Line),
}
}
fn deny_update(field: &mut AnswerField, pos: LP, update: FieldUpdate) {
match update {
FieldUpdate::Corner(_, _) => {
field.update_endpoint_constraint(pos.as_vertex(), Endpoint::Forced);
}
FieldUpdate::Endpoint(e, _) => field.decide(e, Edge::Blank),
FieldUpdate::Extend(e) => field.decide(e, Edge::Blank),
}
}
}
fn is_entangled(field: &AnswerField) -> bool {
let height = field.height();
let width = field.width();
let mut entangled_pairs = vec![];
for y in 1..(height - 1) {
for x in 1..(width - 1) {
if field.get_endpoint_constraint(P(y, x)) == Endpoint::Forced {
let pos = LP(y * 2, x * 2);
for &d in &FOUR_NEIGHBOURS {
if field.get_edge(pos + d) != Edge::Line {
continue;
}
let dr = d.rotate_clockwise();
if field.get_edge(pos + dr * 2 - d) == Edge::Line
&& field.get_edge(pos - dr * 2 - d) == Edge::Line
&& field.get_edge(pos + dr - d * 2) == Edge::Line
&& field.get_edge(pos - dr - d * 2) == Edge::Line
&& (field.get_edge(pos + dr * 2 + d) == Edge::Line
|| field.get_edge(pos + dr + d * 2) == Edge::Line)
&& (field.get_edge(pos - dr * 2 + d) == Edge::Line
|| field.get_edge(pos - dr + d * 2) == Edge::Line)
{
let u = field.root_from_coord(P(y, x));
let v = field.root_from_coord(P(y, x) - d);
if u < v {
entangled_pairs.push((u, v));
} else {
entangled_pairs.push((v, u));
}
}
}
}
}
}
entangled_pairs.sort();
for i in 1..entangled_pairs.len() {
if entangled_pairs[i - 1] == entangled_pairs[i] {
return true;
}
}
false
}
/// Extract a problem from `placement`.
/// Clue numbers are randomly assigned using `rng`.
pub fn extract_problem<R: Rng>(placement: &LinePlacement, rng: &mut R) -> Grid<Clue> {
let height = placement.height();
let width = placement.width();
let groups = match placement.extract_chain_groups() {
Some(groups) => groups,
None => panic!(),
};
let mut max_id = 0;
for y in 0..height {
for x in 0..width {
max_id = ::std::cmp::max(max_id, groups[P(y, x)]);
}
}
let mut shuffler = vec![0; (max_id + 1) as usize];
for i in 0..(max_id + 1) {
shuffler[i as usize] = i;
}
rng.shuffle(&mut shuffler);
let mut ret = Grid::new(height, width, NO_CLUE);
for y in 0..height {
for x in 0..width {
let pos = P(y, x);
if placement.is_endpoint(pos) {
ret[pos] = Clue(1 + shuffler[groups[pos] as usize]);
}
}
}
ret
}
/// Check whether the problem obtained from `placement` *may have* unique solution.
/// If `false` is returned, the problem is guaranteed to have several solutions.
/// However, even if `true` is returned, it is still possible that the problem has several solutions.
pub fn uniqueness_pretest(placement: &LinePlacement) -> bool {
let height = placement.height();
let width = placement.width();
let ids = match placement.extract_chain_groups() {
Some(ids) => ids,
None => return false,
};
if !uniqueness_pretest_horizontal(&ids) {
return false;
}
if height == width {
let mut ids_fliped = Grid::new(width, height, -1);
for y in 0..height {
for x in 0..width {
let pos = P(y, x);
ids_fliped[pos] = ids[pos];
}
}
if !uniqueness_pretest_horizontal(&ids_fliped) {
return false;
}
}
true
}
fn uniqueness_pretest_horizontal(ids: &Grid<i32>) -> bool {
let height = ids.height();
let width = ids.width();
let mut max_id = 0;
for y in 0..height {
for x in 0..width {
max_id = ::std::cmp::max(max_id, ids[P(y, x)]);
}
}
let mut positions = vec![vec![]; (max_id + 1) as usize];
for y in 0..height {
for x in 0..width {
let pos = P(y, x);
positions[ids[pos] as usize].push(pos);
}
}
for mode in 0..2 {
let mut checked = vec![false; (max_id + 1) as usize];
let mut screen_problem = Grid::new(height, width, UNUSED);
let mut used_cells = 0;
for x in 0..width {
let x = if mode == 0 { x } else { width - 1 - x };
for y in 0..height {
let pos = P(y, x);
let i = ids[pos];
if !checked[i as usize] {
for &loc in &positions[i as usize] {
let P(y, x) = loc;
let is_endpoint = 1
== (if y > 0 && ids[loc] == ids[loc + D(-1, 0)] {
1
} else {
0
} + if x > 0 && ids[loc] == ids[loc + D(0, -1)] {
1
} else {
0
} + if y < height - 1 && ids[loc] == ids[loc + D(1, 0)] {
1
} else {
0
} + if x < width - 1 && ids[loc] == ids[loc + D(0, 1)] {
1
} else {
0
});
screen_problem[loc] = if is_endpoint { Clue(i + 1) } else { NO_CLUE };
}
checked[i as usize] = true;
used_cells += positions[i as usize].len() as i32;
}
}
if used_cells >= height * width / 2 {
break;
}
}
let ans = solve2(&screen_problem, Some(2), false, true);
if ans.len() >= 2 || ans.found_not_fully_filled {
return false;
}
}
return true;
}
/// Check whether `field` is valid.
/// A field is considered invalid if it contains a self-touching line.
fn check_answer_validity(field: &AnswerField) -> bool {
let height = field.height();
let width = field.width();
let mut ids = Grid::new(height, width, -1);
let mut id = 1;
for y in 0..height {
for x in 0..width {
let pos = P(y, x);
if ids[pos] == -1 {
fill_line_id(pos, &field, &mut ids, id);
id += 1;
}
}
}
let mut end_count = vec![0; id as usize];
for y in 0..height {
for x in 0..width {
if field.count_neighbor(LP(y * 2, x * 2)) == (1, 0) {
end_count[ids[P(y, x)] as usize] += 1;
}
}
}
for i in 1..id {
if end_count[i as usize] != 2 {
return false;
}
}
for y in 0..(2 * height - 1) {
for x in 0..(2 * width - 1) {
if y % 2 == 1 && x % 2 == 0 {
if (ids[P(y / 2, x / 2)] == ids[P(y / 2 + 1, x / 2)])
!= (field.get_edge(LP(y, x)) == Edge::Line)
{
return false;
}
} else if y % 2 == 0 && x % 2 == 1 {
if (ids[P(y / 2, x / 2)] == ids[P(y / 2, x / 2 + 1)])
!= (field.get_edge(LP(y, x)) == Edge::Line)
{
return false;
}
}
}
}
true
}
/// Returns true if the line placements in `field` is too *symmetry*
fn check_symmetry(field: &AnswerField) -> bool {
let mut n_equal = 0i32;
let mut n_diff = 0i32;
let height = field.height();
let width = field.width();
for y in 0..(2 * height - 1) {
for x in 0..(2 * width - 1) {
if y % 2 != x % 2 {
let e1 = field.get_edge(LP(y, x));
let e2 = field.get_edge(LP(2 * height - 2 - y, 2 * width - 2 - x));
if e1 == Edge::Undecided && e2 == Edge::Undecided {
continue;
}
if e1 == e2 {
n_equal += 1;
} else {
n_diff += 1;
}
}
}
}
n_equal as f64 >= (n_equal + n_diff) as f64 * 0.85 + 4.0f64
}
fn limit_clue_number(field: &mut AnswerField, limit: i32) {
let limit = limit * 2;
if field.endpoint_forced_cells() > limit {
field.set_invalid();
} else {
if field.endpoint_forced_cells() == limit {
field.forbid_further_endpoint();
}
if field.endpoint_forced_cells() > limit {
field.set_invalid();
}
}
}
fn fill_line_id(pos: P, field: &AnswerField, ids: &mut Grid<i32>, id: i32) {
if ids[pos] != -1 {
return;
}
ids[pos] = id;
for &d in &FOUR_NEIGHBOURS {
if field.get_edge(LP::of_vertex(pos) + d) == Edge::Line {
fill_line_id(pos + d, field, ids, id);
}
}
}<|fim▁end|> | |
<|file_name|>MainController.java<|end_file_name|><|fim▁begin|>package org.peerbox.presenter;
import javafx.fxml.FXML;
import javafx.scene.Node;
import javafx.scene.layout.Pane;
public class MainController implements INavigatable {
@FXML
private Pane mainPane;<|fim▁hole|> /*
* (non-Javadoc)
*
* @see org.peerbox.presenter.INavigatable#setContent(javafx.scene.Node)
*/
@Override
public void setContent(Node content) {
mainPane.getChildren().clear();
mainPane.getChildren().add(content);
mainPane.requestLayout();
}
}<|fim▁end|> | |
<|file_name|>eg-enum-use.rs<|end_file_name|><|fim▁begin|>#![allow(dead_code)]
enum HTTPGood {
HTTP200,
HTTP300,
}
enum HTTPBad {
HTTP400,
HTTP500,<|fim▁hole|> use HTTPBad::*; // automatically use each name
let good = HTTP200; // equivalent to HTTPGood::HTTP200
let bad = HTTP500; // equivalent to HTTPBad::HTTP500
match good {
HTTP200 => println!("okay"),
HTTP300 => println!("redirect"),
}
match bad {
HTTP400 => println!("bad client"),
HTTP500 => println!("bad server"),
}
}
enum BrowserEvent {
// may be unit like
Render,
Clear,
// tuple structs
KeyPress(char),
LoadFrame(String),
// or structs
Click { x: i64, y: i64 },
}
fn browser_debug(event: BrowserEvent) {
match event {
BrowserEvent::Render => println!("render page"),
BrowserEvent::Clear => println!("..."),
BrowserEvent::KeyPress(c) => println!("pressed `{}`", c),
BrowserEvent::LoadFrame(u) => println!("fetch `{}`", u),
BrowserEvent::Click {x,y} => {
println!("clicked at `{},{}`", x ,y);
},
}
}
fn main(){
server_debug();
let render = BrowserEvent::Render;
let clear = BrowserEvent::Clear;
let keypress = BrowserEvent::KeyPress('z');
let frame = BrowserEvent::LoadFrame("example.com".to_owned()); // creates an owned String from string slice
let click = BrowserEvent::Click {x: 120, y: 240};
browser_debug(render);
browser_debug(clear);
browser_debug(keypress);
browser_debug(frame);
browser_debug(click);
}<|fim▁end|> | }
fn server_debug() {
use HTTPGood::{HTTP200, HTTP300}; // explicitly pick name without manual scoping |
<|file_name|>num_format.rs<|end_file_name|><|fim▁begin|>//! handles creating printed output for numeric substitutions
use std::env;
use std::vec::Vec;
use cli;
use super::format_field::{FormatField, FieldType};
use super::formatter::{Formatter, FormatPrimitive, InPrefix, Base};
use super::formatters::intf::Intf;
use super::formatters::floatf::Floatf;
use super::formatters::cninetyninehexfloatf::CninetyNineHexFloatf;
use super::formatters::scif::Scif;
use super::formatters::decf::Decf;
pub fn warn_expected_numeric(pf_arg: &String) {
// important: keep println here not print
cli::err_msg(&format!("{}: expected a numeric value", pf_arg));
}
// when character costant arguments have excess characters
// issue a warning when POSIXLY_CORRECT is not set
fn warn_char_constant_ign(remaining_bytes: Vec<u8>) {
match env::var("POSIXLY_CORRECT") {
Ok(_) => {}
Err(e) => {
match e {
env::VarError::NotPresent => {
cli::err_msg(&format!("warning: {:?}: character(s) following character \
constant have been ignored",
&*remaining_bytes));
}
_ => {}
}
}
}
}
// this function looks at the first few
// characters of an argument and returns a value if we can learn
// a value from that (e.g. no argument? return 0, char constant? ret value)
fn get_provided(str_in_opt: Option<&String>) -> Option<u8> {
const C_S_QUOTE: u8 = 39;
const C_D_QUOTE: u8 = 34;
match str_in_opt {
Some(str_in) => {
let mut byte_it = str_in.bytes();
if let Some(qchar) = byte_it.next() {<|fim▁hole|> match qchar {
C_S_QUOTE | C_D_QUOTE => {
return Some(match byte_it.next() {
Some(second_byte) => {
let mut ignored: Vec<u8> = Vec::new();
while let Some(cont) = byte_it.next() {
ignored.push(cont);
}
if ignored.len() > 0 {
warn_char_constant_ign(ignored);
}
second_byte as u8
}
// no byte after quote
None => {
let so_far = (qchar as u8 as char).to_string();
warn_expected_numeric(&so_far);
0 as u8
}
});
}
// first byte is not quote
_ => {
return None;
}
// no first byte
}
} else {
Some(0 as u8)
}
}
None => Some(0),
}
}
// takes a string and returns
// a sign,
// a base,
// and an offset for index after all
// initial spacing, sign, base prefix, and leading zeroes
fn get_inprefix(str_in: &String, field_type: &FieldType) -> InPrefix {
let mut str_it = str_in.chars();
let mut ret = InPrefix {
radix_in: Base::Ten,
sign: 1,
offset: 0,
};
let mut topchar = str_it.next().clone();
// skip spaces and ensure topchar is the first non-space char
// (or None if none exists)
loop {
match topchar {
Some(' ') => {
ret.offset += 1;
topchar = str_it.next();
}
_ => {
break;
}
}
}
// parse sign
match topchar {
Some('+') => {
ret.offset += 1;
topchar = str_it.next();
}
Some('-') => {
ret.sign = -1;
ret.offset += 1;
topchar = str_it.next();
}
_ => {}
}
// we want to exit with offset being
// the index of the first non-zero
// digit before the decimal point or
// if there is none, the zero before the
// decimal point, or, if there is none,
// the decimal point.
// while we are determining the offset
// we will ensure as a convention
// the offset is always on the first character
// that we are yet unsure if it is the
// final offset. If the zero could be before
// a decimal point we don't move past the zero.
let mut is_hex = false;
if Some('0') == topchar {
if let Some(base) = str_it.next() {
// lead zeroes can only exist in
// octal and hex base
let mut do_clean_lead_zeroes = false;
match base {
'x' | 'X' => {
is_hex = true;
ret.offset += 2;
ret.radix_in = Base::Hex;
do_clean_lead_zeroes = true;
}
e @ '0'...'9' => {
ret.offset += 1;
match *field_type {
FieldType::Intf => {
ret.radix_in = Base::Octal;
}
_ => {}
}
if e == '0' {
do_clean_lead_zeroes = true;
}
}
_ => {}
}
if do_clean_lead_zeroes {
let mut first = true;
while let Some(ch_zero) = str_it.next() {
// see notes on offset above:
// this is why the offset for octals and decimals
// that reach this branch is 1 even though
// they have already eaten the characters '00'
// this is also why when hex encounters its
// first zero it does not move its offset
// forward because it does not know for sure
// that it's current offset (of that zero)
// is not the final offset,
// whereas at that point octal knows its
// current offset is not the final offset.
match ch_zero {
'0' => {
if !(is_hex && first) {
ret.offset += 1;
}
}
// if decimal, keep last zero if one exists
// (it's possible for last zero to
// not exist at this branch if we're in hex input)
'.' => break,
// other digit, etc.
_ => {
if !(is_hex && first) {
ret.offset += 1;
}
break;
}
}
if first {
first = false;
}
}
}
}
}
ret
}
// this is the function a Sub's print will delegate to
// if it is a numeric field, passing the field details
// and an iterator to the argument
pub fn num_format(field: &FormatField, in_str_opt: Option<&String>) -> Option<String> {
let fchar = field.field_char.clone();
// num format mainly operates by further delegating to one of
// several Formatter structs depending on the field
// see formatter.rs for more details
// to do switch to static dispatch
let fmtr: Box<Formatter> = match *field.field_type {
FieldType::Intf => Box::new(Intf::new()),
FieldType::Floatf => Box::new(Floatf::new()),
FieldType::CninetyNineHexFloatf => Box::new(CninetyNineHexFloatf::new()),
FieldType::Scif => Box::new(Scif::new()),
FieldType::Decf => Box::new(Decf::new()),
_ => {
panic!("asked to do num format with non-num fieldtype");
}
};
let prim_opt=
// if we can get an assumed value from looking at the first
// few characters, use that value to create the FormatPrimitive
if let Some(provided_num) = get_provided(in_str_opt) {
let mut tmp : FormatPrimitive = Default::default();
match fchar {
'u' | 'i' | 'd' => {
tmp.pre_decimal = Some(
format!("{}", provided_num));
},
'x' | 'X' => {
tmp.pre_decimal = Some(
format!("{:x}", provided_num));
},
'o' => {
tmp.pre_decimal = Some(
format!("{:o}", provided_num));
},
'e' | 'E' | 'g' | 'G' => {
let as_str = format!("{}", provided_num);
let inprefix = get_inprefix(
&as_str,
&field.field_type
);
tmp=fmtr.get_primitive(field, &inprefix, &as_str)
.expect("err during default provided num");
},
_ => {
tmp.pre_decimal = Some(
format!("{}", provided_num));
tmp.post_decimal = Some(String::from("0"));
}
}
Some(tmp)
} else {
// otherwise we'll interpret the argument as a number
// using the appropriate Formatter
let in_str = in_str_opt.expect(
"please send the devs this message:
\n get_provided is failing to ret as Some(0) on no str ");
// first get information about the beginning of the
// numeric argument that would be useful for
// any formatter (int or float)
let inprefix = get_inprefix(
in_str,
&field.field_type
);
// then get the FormatPrimitive from the Formatter
fmtr.get_primitive(field, &inprefix, in_str)
};
// if we have a formatPrimitive, print its results
// according to the field-char appropriate Formatter
if let Some(prim) = prim_opt {
Some(fmtr.primitive_to_str(&prim, field.clone()))
} else {
None
}
}<|fim▁end|> | |
<|file_name|>test_mode.cpp<|end_file_name|><|fim▁begin|>// (C) Copyright Nick Thompson and Matt Borland 2020.
// Use, modification and distribution are subject to the
// Boost Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
#include <random>
#include <boost/math/statistics/univariate_statistics.hpp>
#include <benchmark/benchmark.h>
template <class Z>
void test_mode(benchmark::State& state)
{
using boost::math::statistics::sorted_mode;
std::random_device rd;
std::mt19937_64 mt(rd());
std::uniform_int_distribution<> dist {1, 10};
auto gen = [&dist, &mt](){return dist(mt);};
std::vector<Z> v(state.range(0));
std::generate(v.begin(), v.end(), gen);
for (auto _ : state)
{
std::vector<Z> modes;
benchmark::DoNotOptimize(sorted_mode(v.begin(), v.end(), std::back_inserter(modes)));
}
state.SetComplexityN(state.range(0));
}
template <class Z>
void sequential_test_mode(benchmark::State& state)
{
using boost::math::statistics::sorted_mode;
std::vector<Z> v(state.range(0));
size_t current_num {1};
// produces {1, 2, 3, 4, 5...}
for(size_t i {}; i < v.size(); ++i)
{
v[i] = current_num;
++current_num;
}
for (auto _ : state)
{
std::vector<Z> modes;
benchmark::DoNotOptimize(sorted_mode(v, std::back_inserter(modes)));
}
state.SetComplexityN(state.range(0));
}
template <class Z>
void sequential_pairs_test_mode(benchmark::State& state)
{
using boost::math::statistics::sorted_mode;
std::vector<Z> v(state.range(0));
size_t current_num {1};
size_t current_num_counter {};
// produces {1, 1, 2, 2, 3, 3, ...}
for(size_t i {}; i < v.size(); ++i)
{
v[i] = current_num;
++current_num_counter;
if(current_num_counter > 2)
{
++current_num;
current_num_counter = 0;
}
}
for (auto _ : state)
{
std::vector<Z> modes;
benchmark::DoNotOptimize(sorted_mode(v, std::back_inserter(modes)));
}
state.SetComplexityN(state.range(0));
}
template <class Z>
void sequential_multiple_test_mode(benchmark::State& state)
{
using boost::math::statistics::sorted_mode;
std::vector<Z> v(state.range(0));
size_t current_num {1};
size_t current_num_counter {};
// produces {1, 2, 2, 3, 3, 3, 4, 4, 4, 4, ...}
for(size_t i {}; i < v.size(); ++i)
{
v[i] = current_num;
++current_num_counter;
if(current_num_counter > current_num)
{
++current_num;
current_num_counter = 0;
}
}
for (auto _ : state)<|fim▁hole|> std::vector<Z> modes;
benchmark::DoNotOptimize(sorted_mode(v, std::back_inserter(modes)));
}
state.SetComplexityN(state.range(0));
}
BENCHMARK_TEMPLATE(test_mode, int32_t)->RangeMultiplier(2)->Range(1<<1, 1<<22)->Complexity();
BENCHMARK_TEMPLATE(test_mode, int64_t)->RangeMultiplier(2)->Range(1<<1, 1<<22)->Complexity();
BENCHMARK_TEMPLATE(test_mode, uint32_t)->RangeMultiplier(2)->Range(1<<1, 1<<22)->Complexity();
BENCHMARK_TEMPLATE(sequential_test_mode, int32_t)->RangeMultiplier(2)->Range(1<<1, 1<<22)->Complexity();
BENCHMARK_TEMPLATE(sequential_test_mode, int64_t)->RangeMultiplier(2)->Range(1<<1, 1<<22)->Complexity();
BENCHMARK_TEMPLATE(sequential_test_mode, uint32_t)->RangeMultiplier(2)->Range(1<<1, 1<<22)->Complexity();
BENCHMARK_TEMPLATE(sequential_pairs_test_mode, int32_t)->RangeMultiplier(2)->Range(1<<1, 1<<22)->Complexity();
BENCHMARK_TEMPLATE(sequential_pairs_test_mode, int64_t)->RangeMultiplier(2)->Range(1<<1, 1<<22)->Complexity();
BENCHMARK_TEMPLATE(sequential_pairs_test_mode, uint32_t)->RangeMultiplier(2)->Range(1<<1, 1<<22)->Complexity();
BENCHMARK_TEMPLATE(sequential_multiple_test_mode, int32_t)->RangeMultiplier(2)->Range(1<<1, 1<<22)->Complexity();
BENCHMARK_TEMPLATE(sequential_multiple_test_mode, int64_t)->RangeMultiplier(2)->Range(1<<1, 1<<22)->Complexity();
BENCHMARK_TEMPLATE(sequential_multiple_test_mode, uint32_t)->RangeMultiplier(2)->Range(1<<1, 1<<22)->Complexity();
BENCHMARK_MAIN();<|fim▁end|> | { |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>class SqliteMixin:
config_params = {'DATASTORE': 'sqlite://'}
class OdmUtils:
config_file = 'tests.odm'
async def _create_task(self, token, subject='This is a task', person=None,
**data):
data['subject'] = subject
if person:
data['assigned'] = person['id']
request = await self.client.post(self.api_url('tasks'),
json=data,
token=token)
data = self.json(request.response, 201)
self.assertIsInstance(data, dict)
self.assertTrue('id' in data)
self.assertEqual(data['subject'], subject)
self.assertTrue('created' in data)
self.assertEqual(len(request.cache.new_items), 1)
self.assertEqual(request.cache.new_items[0]['id'], data['id'])
self.assertFalse(request.cache.new_items_before_commit)
return data
async def _get_task(self, token, id):
request = await self.client.get(
'/tasks/{}'.format(id),
token=token)
response = request.response
self.assertEqual(response.status_code, 200)
data = self.json(response)
self.assertIsInstance(data, dict)
self.assertTrue('id' in data)<|fim▁hole|>
async def _delete_task(self, token, id):
request = await self.client.delete(
'/tasks/{}'.format(id),
token=token)
response = request.response
self.assertEqual(response.status_code, 204)
async def _create_person(self, token, username, name=None):
name = name or username
request = await self.client.post(
'/people',
json={'username': username, 'name': name},
token=token)
data = self.json(request.response, 201)
self.assertIsInstance(data, dict)
self.assertTrue('id' in data)
self.assertEqual(data['name'], name)
return data
async def _update_person(self, token, id, username=None, name=None):
request = await self.client.patch(
self.api_url('people/%s' % id),
json={'username': username, 'name': name},
token=token
)
data = self.json(request.response, 200)
self.assertIsInstance(data, dict)
self.assertTrue('id' in data)
if name:
self.assertEqual(data['name'], name)
return data<|fim▁end|> | return data |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>var hb = require('handlebars')
, fs = require('vinyl-fs')
, map = require('vinyl-map')
module.exports = function (opts, cb) {
if (!opts || typeof opts === 'function') throw new Error('opts is required')
if (!opts.origin) throw new Error('opts.origin is required')
if (!opts.target) throw new Error('opts.target is required')
if (!opts.context) throw new Error('opts.context is required')
var render = map(function (code, filename) {
var t = hb.compile(code.toString())
return t(opts.context)
})
fs.src([opts.origin+'/**'])<|fim▁hole|> .pipe(render)
.pipe(fs.dest(opts.target))
.on('error', cb)
.on('end', cb)
}<|fim▁end|> | |
<|file_name|>struct-literal-in-for.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// compile-flags: -Z parse-only
struct Foo {
x: isize,
}
impl Foo {
fn hi(&self) -> bool {
true
}
}
fn main() {<|fim▁hole|> }.hi() { //~ ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found `{`
println!("yo");
}
}<|fim▁end|> | for x in Foo {
x: 3 //~ ERROR expected type, found `3` |
<|file_name|>user.py<|end_file_name|><|fim▁begin|>import logging
from google.appengine.ext import ndb
from endpoints_proto_datastore.ndb.model import EndpointsModel, EndpointsAliasProperty
<|fim▁hole|> name = ndb.StringProperty()<|fim▁end|> | class UserModel(EndpointsModel):
email = ndb.StringProperty() |
<|file_name|>world.js<|end_file_name|><|fim▁begin|>'use strict';
var request = require('request')
var uuid = require('node-uuid')
var env = require('./env')
var World = function World(callback) {
var self = this
this.collection = null
this.lastResponse = null
this.doc1 = null
this.doc2 = null
this.generateCollectionId = function() {
this.collection = uuid.v1()
}
this.generateDocumentId = function() {
// MongoDB either accepts a 12 byte string or 24 hex characters
this.doc1 = uuid.v1().replace(/-/gi, '').substring(0,24)
}
this.get = function(path, callback) {
var uri = this.uri(path)
request.get(uri, function(error, response) {
if (error) {
return callback.fail(new Error('Error on GET request to ' + uri +
': ' + error.message))
}
self.lastResponse = response
callback()
})
}
this.post = function(path, requestBody, callback) {
var uri = this.uri(path)
request({url: uri, body: requestBody, method: 'POST'},
function(error, response) {
if (error) {
return callback(new Error('Error on POST request to ' + uri + ': ' +
error.message))
}
self.lastResponse = response
callback(null, self.lastResponse.headers.location)
})
}
this.put = function(path, requestBody, callback) {
var uri = this.uri(path)
request({url: uri, body: requestBody, method: 'PUT'},
function(error, response) {
if (error) {
return callback(new Error('Error on PUT request to ' + uri + ': ' +
error.message))
}
self.lastResponse = response
callback(null, self.lastResponse.headers.locations)
})
}
this.delete = function(path, callback) {
var uri = this.uri(path)
request({url: uri, method: 'DELETE'},
function(error, response) {
if (error) {
return callback(new Error('Error on DELETE request to ' + uri + ': ' +
error.message))
}
self.lastResponse = response
callback()
})
}
this.options = function(path, callback) {
var uri = this.uri(path)
request({'uri': uri, method: 'OPTIONS'}, function(error, response) {
if (error) {
return callback.fail(new Error('Error on OPTIONS request to ' + uri +
': ' + error.message))
}
self.lastResponse = response
callback()
})
}
this.rootPath = function() {
return '/'
}
this.rootUri = function() {
return this.uri(this.rootPath())<|fim▁hole|> return '/' + collection
}
this.collectionUri = function(collection) {
return this.uri(this.collectionPath(collection))
}
this.documentPath = function(collection, document) {
return this.collectionPath(collection) + '/' + document
}
this.documentUri = function(collection, document) {
return this.uri(this.documentPath(collection, document))
}
this.uri = function(path) {
return env.BASE_URL + path
}
callback()
}
exports.World = World<|fim▁end|> | }
this.collectionPath = function(collection) { |
<|file_name|>logger.py<|end_file_name|><|fim▁begin|><|fim▁hole|>except ImportError:
threading = None
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
from debug_toolbar.panels import DebugPanel
class ThreadTrackingHandler(logging.Handler):
def __init__(self):
if threading is None:
raise NotImplementedError("threading module is not available, \
the logging panel cannot be used without it")
logging.Handler.__init__(self)
self.records = {} # a dictionary that maps threads to log records
def emit(self, record):
self.get_records().append(record)
def get_records(self, thread=None):
"""
Returns a list of records for the provided thread, of if none is provided,
returns a list for the current thread.
"""
if thread is None:
thread = threading.currentThread()
if thread not in self.records:
self.records[thread] = []
return self.records[thread]
def clear_records(self, thread=None):
if thread is None:
thread = threading.currentThread()
if thread in self.records:
del self.records[thread]
handler = ThreadTrackingHandler()
logging.root.setLevel(logging.NOTSET)
logging.root.addHandler(handler)
class LoggingPanel(DebugPanel):
name = 'Logging'
has_content = True
def process_request(self, request):
handler.clear_records()
def get_and_delete(self):
records = handler.get_records()
handler.clear_records()
return records
def nav_title(self):
return _("Logging")
def nav_subtitle(self):
return "%s message%s" % (len(handler.get_records()), (len(handler.get_records()) == 1) and '' or 's')
def title(self):
return 'Log Messages'
def url(self):
return ''
def content(self):
records = []
for record in self.get_and_delete():
records.append({
'message': record.getMessage(),
'time': datetime.datetime.fromtimestamp(record.created),
'level': record.levelname,
'file': record.pathname,
'line': record.lineno,
})
return render_to_string('debug_toolbar/panels/logger.html', {'records': records})<|fim▁end|> | import datetime
import logging
try:
import threading |
<|file_name|>test_modjk.py<|end_file_name|><|fim▁begin|>"""
:codeauthor: Jayesh Kariya <[email protected]>
"""
import pytest
import salt.modules.modjk as modjk
from tests.support.mock import patch
@pytest.fixture
def configure_loader_modules():
return {modjk: {}}
def test_version():
"""
Test for return the modjk version
"""
with patch.object(
modjk, "_do_http", return_value={"worker.jk_version": "mod_jk/1.2.37"}
):<|fim▁hole|>
def test_get_running():
"""
Test for get the current running config (not from disk)
"""
with patch.object(modjk, "_do_http", return_value={}):
assert modjk.get_running() == {}
def test_dump_config():
"""
Test for dump the original configuration that was loaded from disk
"""
with patch.object(modjk, "_do_http", return_value={}):
assert modjk.dump_config() == {}
def test_list_configured_members():
"""
Test for return a list of member workers from the configuration files
"""
with patch.object(modjk, "_do_http", return_value={}):
assert modjk.list_configured_members("loadbalancer1") == []
with patch.object(
modjk,
"_do_http",
return_value={"worker.loadbalancer1.balance_workers": "SALT"},
):
assert modjk.list_configured_members("loadbalancer1") == ["SALT"]
def test_workers():
"""
Test for return a list of member workers and their status
"""
with patch.object(modjk, "_do_http", return_value={"worker.list": "Salt1,Salt2"}):
assert modjk.workers() == {}
def test_recover_all():
"""
Test for set the all the workers in lbn to recover and
activate them if they are not
"""
with patch.object(modjk, "_do_http", return_value={}):
assert modjk.recover_all("loadbalancer1") == {}
with patch.object(
modjk,
"_do_http",
return_value={"worker.loadbalancer1.balance_workers": "SALT"},
):
with patch.object(
modjk,
"worker_status",
return_value={"activation": "ACT", "state": "OK"},
):
assert modjk.recover_all("loadbalancer1") == {
"SALT": {"activation": "ACT", "state": "OK"}
}
def test_reset_stats():
"""
Test for reset all runtime statistics for the load balancer
"""
with patch.object(modjk, "_do_http", return_value={"worker.result.type": "OK"}):
assert modjk.reset_stats("loadbalancer1")
def test_lb_edit():
"""
Test for edit the loadbalancer settings
"""
with patch.object(modjk, "_do_http", return_value={"worker.result.type": "OK"}):
assert modjk.lb_edit("loadbalancer1", {"vlr": 1, "vlt": 60})
def test_bulk_stop():
"""
Test for stop all the given workers in the specific load balancer
"""
with patch.object(modjk, "_do_http", return_value={"worker.result.type": "OK"}):
assert modjk.bulk_stop(["node1", "node2", "node3"], "loadbalancer1")
def test_bulk_activate():
"""
Test for activate all the given workers in the specific load balancer
"""
with patch.object(modjk, "_do_http", return_value={"worker.result.type": "OK"}):
assert modjk.bulk_activate(["node1", "node2", "node3"], "loadbalancer1")
def test_bulk_disable():
"""
Test for disable all the given workers in the specific load balancer
"""
with patch.object(modjk, "_do_http", return_value={"worker.result.type": "OK"}):
assert modjk.bulk_disable(["node1", "node2", "node3"], "loadbalancer1")
def test_bulk_recover():
"""
Test for recover all the given workers in the specific load balancer
"""
with patch.object(modjk, "_do_http", return_value={"worker.result.type": "OK"}):
assert modjk.bulk_recover(["node1", "node2", "node3"], "loadbalancer1")
def test_worker_status():
"""
Test for return the state of the worker
"""
with patch.object(
modjk,
"_do_http",
return_value={"worker.node1.activation": "ACT", "worker.node1.state": "OK"},
):
assert modjk.worker_status("node1") == {"activation": "ACT", "state": "OK"}
with patch.object(modjk, "_do_http", return_value={}):
assert not modjk.worker_status("node1")
def test_worker_recover():
"""
Test for set the worker to recover this module will fail
if it is in OK state
"""
with patch.object(modjk, "_do_http", return_value={}):
assert modjk.worker_recover("node1", "loadbalancer1") == {}
def test_worker_disable():
"""
Test for set the worker to disable state in the lbn load balancer
"""
with patch.object(modjk, "_do_http", return_value={"worker.result.type": "OK"}):
assert modjk.worker_disable("node1", "loadbalancer1")
def test_worker_activate():
"""
Test for set the worker to activate state in the lbn load balancer
"""
with patch.object(modjk, "_do_http", return_value={"worker.result.type": "OK"}):
assert modjk.worker_activate("node1", "loadbalancer1")
def test_worker_stop():
"""
Test for set the worker to stopped state in the lbn load balancer
"""
with patch.object(modjk, "_do_http", return_value={"worker.result.type": "OK"}):
assert modjk.worker_stop("node1", "loadbalancer1")
def test_worker_edit():
"""
Test for edit the worker settings
"""
with patch.object(modjk, "_do_http", return_value={"worker.result.type": "OK"}):
assert modjk.worker_edit("node1", "loadbalancer1", {"vwf": 500, "vwd": 60})<|fim▁end|> | assert modjk.version() == "1.2.37"
|
<|file_name|>h5ls.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
"""*.h5 の値の最小・最大などを確認するスクリプト。"""
import argparse
import pathlib
import sys
import h5py
import numpy as np
try:
import pytoolkit as tk
except ImportError:
sys.path.insert(0, str(pathlib.Path(__file__).resolve().parent.parent.parent))
import pytoolkit as tk
logger = tk.log.get(__name__)
def main():
tk.utils.better_exceptions()
tk.log.init(None)
parser = argparse.ArgumentParser(description="*.h5 の値の最小・最大などを確認するスクリプト。")
parser.add_argument("model_path", type=pathlib.Path, help="対象ファイルのパス(*.h5)")
args = parser.parse_args()
logger.info(f"{args.model_path} Loading...")
absmax_list = []
with h5py.File(args.model_path, mode="r") as f:
model_weights = f["model_weights"]
layer_names = model_weights.attrs["layer_names"]<|fim▁hole|> weight_names = g.attrs["weight_names"]
for weight_name in weight_names:
w = np.asarray(g[weight_name])
key = f"/model_weights/{layer_name}/{weight_name}"
if w.size == 1:
logger.info(f"{key}\t value={np.ravel(w)[0]:.2f}")
else:
logger.info(
f"{key}\t min={w.min():.2f} max={w.max():.2f} mean={w.mean():.2f} std={w.std():.2f}"
)
absmax_list.append((key, np.abs(w).max()))
logger.info("abs Top-10:")
for key, absvalue in list(sorted(absmax_list, key=lambda x: -x[1]))[:10]:
logger.info(f"{absvalue:6.1f}: {key}")
if __name__ == "__main__":
main()<|fim▁end|> | for layer_name in layer_names:
g = model_weights[layer_name] |
<|file_name|>finddata.py<|end_file_name|><|fim▁begin|>import pathlib
import importlib
import sys
__all__ = ['sample', 'sampleTxt', 'sampleBin']
<|fim▁hole|>datadir = this.parent.parent / 'data'
loader = importlib.machinery.SourceFileLoader('sample', str(datadir / 'sample.py'))
sample = loader.load_module()
sampleTxt = datadir / 'sample.txt'
sampleBin = datadir / 'sample.bin'<|fim▁end|> | this = pathlib.Path(__file__) |
<|file_name|>utils.rs<|end_file_name|><|fim▁begin|>use std::path::PathBuf;
use std::process;
pub fn ensure_file_exists(path: &PathBuf) {
if !path.exists() {
eprintln!("the given path does not exist: {:?}", path);
process::exit(1);
}
if !path.is_file() {
eprintln!("the given path is not a file: {:?}", path);
<|fim▁hole|>
}<|fim▁end|> | process::exit(1);
}
|
<|file_name|>cursor.go<|end_file_name|><|fim▁begin|>package arango
import (
"encoding/json"
"fmt"
)
//Cursor represents a collection of items that you can iterate over.
//This library will produce a Cursor type whenever ArangoDb produces
//a list of objects that match a query.
type Cursor struct {
db *Database
json cursorResult<|fim▁hole|>
type cursorResult struct {
Result []json.RawMessage `json:"result"`
HasMore bool `json:"hasMore"`
Count int `json:"count"`
Error bool `json:"error"`
Code int `json:"code"`
Id string `json:"id"`
}
func (c Cursor) HasMore() bool {
return len(c.json.Result) > 0 || c.json.HasMore
}
func (c Cursor) Count() int {
return c.json.Count
}
func (c Cursor) Error() bool {
return c.json.Error
}
func (c Cursor) Code() int {
return c.json.Code
}
//Next retrieves the next item from the cursor.
//According to the arango docs :
//Note that even if hasMore returns true, the next
//call might still return no documents.
//If, however, hasMore is false, then the cursor
//is exhausted. Once the hasMore attribute has a value
//of false, the client can stop.
func (c *Cursor) Next(next interface{}) error {
if len(c.json.Result) > 0 {
err := json.Unmarshal(c.json.Result[0], next)
if err != nil {
return newError(err.Error())
}
c.json.Result = c.json.Result[1:len(c.json.Result)]
return nil
} else if c.json.Id != "" {
endpoint := fmt.Sprintf("%s/cursor/%s",
c.db.serverUrl.String(),
c.json.Id,
)
var e ArangoError
response, err := c.db.session.Put(endpoint, nil, &c.json, &e)
if err != nil {
return newError(err.Error())
}
switch response.Status() {
case 200:
if len(c.json.Result) > 0 {
err := json.Unmarshal(c.json.Result[0], next)
if err != nil {
return newError(err.Error())
}
c.json.Result = c.json.Result[1:len(c.json.Result)]
}
return nil
default:
return e
}
}
return newError("You called Next on a cursor that is invalid or doesn't have anymore results to return.")
}
func (c Cursor) Close() error {
endpoint := fmt.Sprintf("%s/cursor/%s",
c.db.serverUrl.String(),
c.json.Id,
)
var e ArangoError
response, err := c.db.session.Delete(endpoint, nil, &e)
if err != nil {
return newError(err.Error())
}
switch response.Status() {
case 202:
return nil
default:
return e
}
}<|fim▁end|> | } |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! Public-key authenticated encryption
//!
//! # Security model
//! The `seal()` function is designed to meet the standard notions of privacy and
//! third-party unforgeability for a public-key authenticated-encryption scheme
//! using nonces. For formal definitions see, e.g., Jee Hea An, "Authenticated
//! encryption in the public-key setting: security notions and analyses,"
//! http://eprint.iacr.org/2001/079.
//!
//! Distinct messages between the same {sender, receiver} set are required
//! to have distinct nonces. For example, the lexicographically smaller
//! public key can use nonce 1 for its first message to the other key, nonce
//! 3 for its second message, nonce 5 for its third message, etc., while the
//! lexicographically larger public key uses nonce 2 for its first message
//! to the other key, nonce 4 for its second message, nonce 6 for its third
//! message, etc. Nonces are long enough that randomly generated nonces have
//! negligible risk of collision.
//!
//! There is no harm in having the same nonce for different messages if the
//! {sender, receiver} sets are different. This is true even if the sets
//! overlap. For example, a sender can use the same nonce for two different
//! messages if the messages are sent to two different public keys.
//!
//! The `seal()` function is not meant to provide non-repudiation. On the
//! contrary: the `seal()` function guarantees repudiability. A receiver
//! can freely modify a boxed message, and therefore cannot convince third
//! parties that this particular message came from the sender. The sender
//! and receiver are nevertheless protected against forgeries by other
//! parties. In the terminology of
//! http://groups.google.com/group/sci.crypt/msg/ec5c18b23b11d82c,
//! crypto_box uses "public-key authenticators" rather than "public-key
//! signatures."
//!
//! Users who want public verifiability (or receiver-assisted public
//! verifiability) should instead use signatures (or signcryption).
//! Signature support is a high priority for NaCl; a signature API will be
//! described in subsequent NaCl documentation.
//!
//! # Selected primitive
//! `seal()` is `crypto_box_curve25519xsalsa20poly1305` , a particular
//! combination of Curve25519, Salsa20, and Poly1305 specified in
//! [Cryptography in NaCl](http://nacl.cr.yp.to/valid.html).
//!
//! This function is conjectured to meet the standard notions of privacy and
//! third-party unforgeability.
//!<|fim▁hole|>//! let (ourpk, oursk) = box_::gen_keypair();
//! // normally theirpk is sent by the other party
//! let (theirpk, theirsk) = box_::gen_keypair();
//! let nonce = box_::gen_nonce();
//! let plaintext = b"some data";
//! let ciphertext = box_::seal(plaintext, &nonce, &theirpk, &oursk);
//! let their_plaintext = box_::open(&ciphertext, &nonce, &ourpk, &theirsk).unwrap();
//! assert!(plaintext == &their_plaintext[..]);
//! ```
//! # Example (precomputation interface)
//! ```
//! use maidsafe_sodiumoxide::crypto::box_;
//!
//! let (ourpk, oursk) = box_::gen_keypair();
//! let (theirpk, theirsk) = box_::gen_keypair();
//! let our_precomputed_key = box_::precompute(&theirpk, &oursk);
//! let nonce = box_::gen_nonce();
//! let plaintext = b"plaintext";
//! let ciphertext = box_::seal_precomputed(plaintext, &nonce, &our_precomputed_key);
//! // this will be identical to our_precomputed_key
//! let their_precomputed_key = box_::precompute(&ourpk, &theirsk);
//! let their_plaintext = box_::open_precomputed(&ciphertext, &nonce,
//! &their_precomputed_key).unwrap();
//! assert!(plaintext == &their_plaintext[..]);
//! ```
pub use self::curve25519xsalsa20poly1305::*;
pub mod curve25519xsalsa20poly1305;<|fim▁end|> | //! # Example (simple interface)
//! ```
//! use maidsafe_sodiumoxide::crypto::box_;
//! |
<|file_name|>FieldContentValueAlertConditionTest.java<|end_file_name|><|fim▁begin|>/**
* This file is part of Graylog.
*
* Graylog is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Graylog is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Graylog. If not, see <http://www.gnu.org/licenses/>.
*/
package org.graylog2.alerts.types;
import com.google.common.collect.ImmutableMap;<|fim▁hole|>import com.google.common.collect.Iterators;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.graylog2.Configuration;
import org.graylog2.alerts.AbstractAlertCondition;
import org.graylog2.alerts.AlertConditionTest;
import org.graylog2.indexer.ranges.IndexRange;
import org.graylog2.indexer.ranges.MongoIndexRange;
import org.graylog2.indexer.results.SearchResult;
import org.graylog2.indexer.searches.Searches;
import org.graylog2.indexer.searches.Sorting;
import org.graylog2.plugin.Tools;
import org.graylog2.plugin.alarms.AlertCondition;
import org.graylog2.plugin.indexer.searches.timeranges.RelativeRange;
import org.graylog2.plugin.streams.Stream;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.junit.Test;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.assertNotNull;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
public class FieldContentValueAlertConditionTest extends AlertConditionTest {
@Test
public void testConstructor() throws Exception {
final Map<String, Object> parameters = getParametersMap(0, "field", "value");
final FieldContentValueAlertCondition condition = getCondition(parameters, alertConditionTitle);
assertNotNull(condition);
assertNotNull(condition.getDescription());
}
@Test
public void testRunMatchingMessagesInStream() throws Exception {
final SearchHits searchHits = mock(SearchHits.class);
final SearchHit searchHit = mock(SearchHit.class);
final HashMap<String, Object> source = Maps.newHashMap();
source.put("message", "something is in here");
when(searchHit.getId()).thenReturn("some id");
when(searchHit.getSource()).thenReturn(source);
when(searchHit.getIndex()).thenReturn("graylog_test");
when(searchHits.iterator()).thenReturn(Iterators.singletonIterator(searchHit));
final DateTime now = DateTime.now(DateTimeZone.UTC);
final IndexRange indexRange = MongoIndexRange.create("graylog_test", now.minusDays(1), now, now, 0);
final Set<IndexRange> indexRanges = Sets.newHashSet(indexRange);
final SearchResult searchResult = spy(new SearchResult(searchHits,
indexRanges,
"message:something",
null,
new TimeValue(100, TimeUnit.MILLISECONDS)));
when(searchResult.getTotalResults()).thenReturn(1L);
when(searches.search(
anyString(),
anyString(),
any(RelativeRange.class),
anyInt(),
anyInt(),
any(Sorting.class)))
.thenReturn(searchResult);
final FieldContentValueAlertCondition condition = getCondition(getParametersMap(0, "message", "something"), "Alert Condition for testing");
alertLastTriggered(-1);
final AlertCondition.CheckResult result = alertService.triggered(condition);
assertTriggered(condition, result);
}
@Test
public void testRunNoMatchingMessages() throws Exception {
final SearchHits searchHits = mock(SearchHits.class);
when(searchHits.iterator()).thenReturn(Collections.<SearchHit>emptyIterator());
final DateTime now = DateTime.now(DateTimeZone.UTC);
final IndexRange indexRange = MongoIndexRange.create("graylog_test", now.minusDays(1), now, now, 0);
final Set<IndexRange> indexRanges = Sets.newHashSet(indexRange);
final SearchResult searchResult = spy(new SearchResult(searchHits,
indexRanges,
"message:something",
null,
new TimeValue(100, TimeUnit.MILLISECONDS)));
when(searches.search(
anyString(),
anyString(),
any(RelativeRange.class),
anyInt(),
anyInt(),
any(Sorting.class)))
.thenReturn(searchResult);
final FieldContentValueAlertCondition condition = getCondition(getParametersMap(0, "message", "something"), alertConditionTitle);
alertLastTriggered(-1);
final AlertCondition.CheckResult result = alertService.triggered(condition);
assertNotTriggered(result);
}
@Test
public void testCorrectUsageOfRelativeRange() throws Exception {
final Stream stream = mock(Stream.class);
final Searches searches = mock(Searches.class);
final Configuration configuration = mock(Configuration.class);
final SearchResult searchResult = mock(SearchResult.class);
final int alertCheckInterval = 42;
final RelativeRange relativeRange = RelativeRange.create(alertCheckInterval);
when(configuration.getAlertCheckInterval()).thenReturn(alertCheckInterval);
when(searches.search(anyString(),
anyString(),
eq(relativeRange),
anyInt(),
anyInt(),
any(Sorting.class))).thenReturn(searchResult);
final FieldContentValueAlertCondition alertCondition = new FieldContentValueAlertCondition(searches, configuration, stream,
null, DateTime.now(DateTimeZone.UTC), "mockuser", ImmutableMap.<String,Object>of("field", "test", "value", "test"), "Field Content Value Test COndition");
final AbstractAlertCondition.CheckResult result = alertCondition.runCheck();
}
protected FieldContentValueAlertCondition getCondition(Map<String, Object> parameters, String title) {
return new FieldContentValueAlertCondition(
searches,
mock(Configuration.class),
stream,
CONDITION_ID,
Tools.nowUTC(),
STREAM_CREATOR,
parameters,
title);
}
protected Map<String, Object> getParametersMap(Integer grace, String field, String value) {
Map<String, Object> parameters = new HashMap<>();
parameters.put("grace", grace);
parameters.put("field", field);
parameters.put("value", value);
return parameters;
}
}<|fim▁end|> | |
<|file_name|>analyzePackage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
import scapy
#import scapy_ex
import os,sys
import printerInfo
import enum
from enum import Enum
from scapy.all import *
import time, datetime
#from time import sleep
class Message(Enum):
AUTH = "0"
DEAUTH = "1"
PROBE_REQ = "2"
PROBE_RESP = "3"
HAND_SUCC = "4"
HAND_FAIL = "5"
CORR_PACK = "6"
RTS = "7"
CTS = "8"
ACK = "9"
DATA = "10"
BEACON = "11"
ASSOC_REQ = "12"
ASSOC_RESP = "13"
DISASSOC = "14"
NUM_PACK = "15"
OTHER = "16"
class AnalyzePackage:
BROADCAST_ADDR = "ff:ff:ff:ff:ff:ff"
EXTENSION_LOG = ".log"
FOLDER_LOG = "log/"
def __init__(self, printerInfo):
self.apPresent = []
self.essid = {}
self.channel = {}
self.power = {}
self.powerAP = {}
self.authentInfo = {}
self.authent = {}
self.associationRequestInfo = {}
self.associationRequest = {}
self.associationResponceInfo = {}
self.associationResponce = {}
self.disassociationInfo = {}
self.disassociation = {}
self.deauthentInfo = {}
self.deauthent = {}
self.probeRequestInfo = {}
self.probeRequest = {}
self.probeResponseInfo = {}
self.probeResponse = {}
self.eapHandshakeSuccessInfo = {}
self.eapHandshakeSuccess = {}
self.eapHandshakeFailedInfo = {}
self.eapHandshakeFailed = {}
self.corruptedPackInfo = {}
self.corruptedPack = {}
self.eapRequest = {}
self.rtsListInfo = {}
self.rtsList = {}
self.ctsListInfo = {}
self.ctsList = {}
self.dataListInfo = {}
self.dataList = {}
self.ackListInfo = {}
self.ackList = {}
self.beaconListInfo = {}
self.beacon = {}
self.numPackInfo = {}
self.numPack = {}
self.otherListInfo = {}
self.otherList = {}
self.cont = 0
self.printerInfo = printerInfo
self.info = {}
self.infoAP = {}
self.infoClient = {}
self.roamingClient = {}
self.contForAP = 0
now = datetime.datetime.now()
date = str(now.year)+str(now.month)+str(now.day)+"-"+str(now.hour)+"-"+str(now.minute)+"-"+str(now.second)
self.titleLog = AnalyzePackage.FOLDER_LOG + date + AnalyzePackage.EXTENSION_LOG
#self.fileLog = open(self.titleLog, "w+")
f = open("DISASS.txt", "w+")
f.close()
def createArrayInfo(self,macAP, macClient):
if (macAP,macClient) not in self.deauthentInfo:
self.deauthentInfo[(macAP,macClient)] = 0
if (macAP,macClient) not in self.authentInfo:
self.authentInfo[(macAP,macClient)] = 0
if (macAP,macClient) not in self.associationRequestInfo:
self.associationRequestInfo[(macAP,macClient)] = 0
if (macAP,macClient) not in self.associationResponceInfo:
self.associationResponceInfo[(macAP,macClient)] = 0
if (macAP,macClient) not in self.disassociationInfo:
self.disassociationInfo[(macAP,macClient)] = 0
if (macAP,macClient) not in self.power:
self.power[(macAP,macClient)] = "-"
if (macAP,macClient) not in self.eapHandshakeSuccessInfo:
self.eapHandshakeSuccessInfo[(macAP,macClient)] = 0
if (macAP,macClient) not in self.eapHandshakeFailedInfo:
self.eapHandshakeFailedInfo[(macAP,macClient)] = 0
if (macAP,macClient) not in self.corruptedPackInfo:
self.corruptedPackInfo[(macAP,macClient)] = 0
if (macAP,macClient) not in self.rtsListInfo:
self.rtsListInfo[(macAP,macClient)] = 0
if (macAP,macClient) not in self.ctsListInfo:
self.ctsListInfo[(macAP,macClient)] = 0
if (macAP,macClient) not in self.dataListInfo:
self.dataListInfo[(macAP,macClient)] = 0
if (macAP,macClient) not in self.numPackInfo:
self.numPackInfo[(macAP,macClient)] = 0
if (macAP,macClient) not in self.ackListInfo:
self.ackListInfo[(macAP,macClient)] = 0
if (macAP,macClient) not in self.beaconListInfo:
self.beaconListInfo[(macAP,macClient)] = 0
if (macAP,macClient) not in self.probeResponseInfo:
self.probeResponseInfo[(macAP,macClient)] = 0
if macClient not in self.ackListInfo:
self.ackListInfo[macClient] = 0
if (macAP,macClient) not in self.otherListInfo:
self.otherListInfo[(macAP,macClient)] = 0
def createArray(self, mac):
if mac not in self.beacon:
self.beacon[mac] = 0
if mac not in self.numPack:
self.numPack[mac] = 0
if mac not in self.authent:
self.authent[mac] = 0
if mac not in self.associationRequest:
self.associationRequest[mac] = 0
if mac not in self.associationResponce:
self.associationResponce[mac] = 0
if mac not in self.disassociation:
self.disassociation[mac] = 0
if mac not in self.deauthent:
self.deauthent[mac] = 0
if mac not in self.probeRequest:
self.probeRequest[mac] = 0
if mac not in self.probeResponse:
self.probeResponse[mac] = 0
if mac not in self.eapHandshakeSuccess:
self.eapHandshakeSuccess[mac] = 0
if mac not in self.eapHandshakeFailed:
self.eapHandshakeFailed[mac] = 0
if mac not in self.corruptedPack:
self.corruptedPack[mac] = 0
if mac not in self.rtsList:
self.rtsList[mac] = 0
if mac not in self.ctsList:
self.ctsList[mac] = 0
if mac not in self.dataList:
self.dataList[mac] = 0
if mac not in self.ackList:
self.ackList[mac] = 0
if mac not in self.otherList:
self.otherList[mac] = 0
if mac not in self.power:
self.power[mac] = "-"
if mac not in self.channel:
self.channel[mac] = "-"
def checkFrequence(self,macAP, macClient, power):
if power != 0 and power != None:
if macAP != AnalyzePackage.BROADCAST_ADDR:
self.power[(macAP,macClient)] = power
self.powerAP[macAP] = power
def checkChannel(self, mac, channel):
if channel != "0" and channel != None:
self.channel[mac] = channel
def printInfo(self,essid,macAP,macClient):
if macAP != None and macClient != None:
if (essid,macClient) not in self.probeRequestInfo:
self.probeRequestInfo[(essid,macClient)] = 0
if self.numPackInfo[(macAP,macClient)] != 0:
percentCorr = int(float(self.corruptedPackInfo[(macAP,macClient)])/float(self.numPackInfo[(macAP,macClient)])*100)
strPercentage = str(percentCorr)
i = tuple([essid, macAP, macClient, self.authentInfo[(macAP,macClient)], self.deauthentInfo[(macAP,macClient)], self.associationRequestInfo[(macAP,macClient)], self.associationResponceInfo[(macAP,macClient)], self.disassociationInfo[(macAP,macClient)], self.eapHandshakeSuccessInfo[(macAP,macClient)], self.eapHandshakeFailedInfo[(macAP,macClient)], self.power[(macAP,macClient)], self.corruptedPackInfo[(macAP,macClient)], strPercentage, self.dataListInfo[(macAP,macClient)], self.rtsListInfo[(macAP,macClient)], self.ctsListInfo[(macAP,macClient)], self.ackListInfo[(macAP, macClient)], self.beaconListInfo[(macAP,macClient)], self.probeRequestInfo[(essid,macClient)], self.probeResponseInfo[(macAP,macClient)], self.numPackInfo[(macAP,macClient)], self.otherListInfo[(macAP,macClient)]])
self.info[i[1],i[2]] = i
def printInfoAP(self, essid, macAP, macClient):
if macAP != None and macAP != AnalyzePackage.BROADCAST_ADDR and macClient != None:
if (macAP) not in self.probeRequest:
self.probeRequest[macAP] = 0
if self.numPack[macAP] != 0:
percentCorr = int(float(self.corruptedPack[macAP])/float(self.numPack[macAP])*100)
strPercentage = str(percentCorr)
i = tuple([essid, macAP, macClient, self.channel[macAP], self.authent[macAP], self.deauthent[macAP], self.associationRequest[macAP], self.associationResponce[macAP], self.disassociation[macAP], self.eapHandshakeSuccess[macAP], self.eapHandshakeFailed[macAP], self.power[macAP],self.corruptedPack[macAP], strPercentage, self.dataList[macAP], self.rtsList[macAP], self.ctsList[macAP], self.ackList[macAP], self.beacon[macAP], self.probeRequest[macAP], self.probeResponse[macAP], self.numPack[macAP], self.otherList[macAP]])
self.infoAP[i[1]] = i
def printInfoClient(self, essid, macAP, macClient):
if macAP != None and macClient != None and macClient != "":
if (macClient) not in self.probeRequest:
self.probeRequest[macClient] = 0
if self.numPack[macClient] != 0:
percentCorr = int(float(self.corruptedPack[macClient])/float(self.numPack[macClient])*100)
strPercentage = str(percentCorr)
i = tuple([essid, macAP, macClient, self.channel[macClient], self.authent[macClient], self.deauthent[macClient], self.associationRequest[macClient], self.associationResponce[macClient], self.disassociation[macClient], self.eapHandshakeSuccess[macClient], self.eapHandshakeFailed[macClient], self.corruptedPack[macClient], strPercentage, self.dataList[macClient], self.rtsList[macClient], self.ctsList[macClient], self.ackList[macClient], self.beacon[macClient], self.probeRequest[macClient], self.probeResponse[macClient], self.numPack[macClient], self.otherList[macClient]])
self.infoClient[i[2]] = i
def takeInformation(self):
return self.info
def takeInformationAP(self):
return self.infoAP
def takeInformationClient(self):
return self.infoClient
def takeInformationRoamingClient(self):
return self.roamingClient
def createArrayForCorruptPack(self, essid, macAP, macClient, hasInfo):
self.createArrayAndUpdateInfo(macAP, macClient, Message.CORR_PACK)
if hasInfo:
self.printInfo(essid,macAP, macClient)
self.printInfoAP(essid, macAP, macClient)
self.printInfoClient(essid, macAP, macClient)
else:
self.checkEssid(macAP, macClient)
self.checkEssidAP(macAP, macClient)
self.checkEssidClient(macAP, macClient)
def checkFCS(self,p, from_DS, to_DS):
#if p.haslayer(Dot11ProbeReq):
if hasattr(p, 'Flags') and p.Flags is not None:
if p.Flags & 64 != 0:
if not from_DS and to_DS:
if hasattr(p, 'addr1') and hasattr(p, 'addr2'):
if p.addr1 != None and p.addr2 != None:
self.createArrayForCorruptPack("", p.addr1, p.addr2, False)
elif from_DS and not to_DS:
if hasattr(p, 'addr1') and hasattr(p, 'addr2'):
if p.addr1 != None and p.addr2 != None:
self.createArrayForCorruptPack("", p.addr2, p.addr1, False)
elif not from_DS and not to_DS:
if hasattr(p, 'addr1') and hasattr(p, 'addr2'):
if p.addr3 != None and p.addr2 != None:
if p.addr3 != p.addr2:
macAP = p.addr3
macClient = p.addr2
else:
macAP = p.addr3
macClient = None
self.createArrayForCorruptPack("", macAP, macClient, False)
return True
else:
return False
def checkEssid(self, macAP, macClient):
if macAP in self.essid:
self.printInfo(self.essid[macAP], macAP, macClient)
else:
self.printInfo("-", macAP, macClient)
def checkEssidAP(self, macAP, macClient):
if macAP in self.essid:
self.printInfoAP(self.essid[macAP], macAP, macClient)
else:
self.printInfoAP("-", macAP, macClient)
def checkEssidClient(self, macAP, macClient):
if macAP in self.essid:
self.printInfoClient(self.essid[macAP], macAP, macClient)
else:
self.printInfoClient("-", macAP, macClient)
def checkRoamingClient(self, macAP, macClient):
if macClient not in self.roamingClient:
self.roamingClient[macClient] = []
if macAP not in self.roamingClient[macClient]:
self.roamingClient[macClient].append(macAP)
def createArrayAndUpdateInfo(self, macAP, macClient, message, increaseNumPack=True):
#d = open("ROAMING.txt", "a")
self.createArrayInfo(macAP, macClient)
self.createArray(macAP)
self.createArray(macClient)
if message == Message.AUTH:
self.authentInfo[(macAP, macClient)] += 1
self.authent[macAP] += 1
self.authent[macClient] += 1
self.checkRoamingClient(macAP, macClient)
#d.write(macAP+" "+macClient+" AUTH \n")
elif message == Message.DEAUTH:
self.deauthentInfo[(macAP, macClient)] += 1
self.deauthent[macAP] += 1
self.deauthent[macClient] += 1
self.checkRoamingClient(macAP, macClient)
#d.write(macAP+" "+macClient+" DEAUTH \n")
elif message == Message.PROBE_REQ:
#self.probeRequest[(macAP, macClient)] += 1
self.probeRequest[macAP] += 1
self.probeRequest[macClient] += 1
elif message == Message.PROBE_RESP:
self.probeResponseInfo[(macAP, macClient)] += 1
self.probeResponse[macAP] += 1
self.probeResponse[macClient] += 1
elif message == Message.HAND_SUCC:
self.eapHandshakeSuccessInfo[(macAP, macClient)] += 1
self.eapHandshakeSuccess[macAP] += 1
self.eapHandshakeSuccess[macClient] += 1
elif message == Message.HAND_FAIL:
self.eapHandshakeFailedInfo[(macAP, macClient)] += 1
self.eapHandshakeFailed[macAP] += 1
self.eapHandshakeFailed[macClient] += 1
elif message == Message.CORR_PACK:
if increaseNumPack:
self.corruptedPack[macAP] += 1
self.corruptedPackInfo[(macAP, macClient)] += 1
self.corruptedPack[macClient] += 1
elif message == Message.RTS:
self.rtsListInfo[(macAP, macClient)] += 1
self.rtsList[macAP] += 1
self.rtsList[macClient] += 1
elif message == Message.CTS:
self.ctsListInfo[(macAP, macClient)] += 1
self.ctsList[macAP] += 1
self.ctsList[macClient] += 1
elif message == Message.ACK:
self.ackListInfo[(macAP, macClient)] += 1
self.ackList[macAP] += 1
self.ackList[macClient] += 1
elif message == Message.DATA:
if increaseNumPack:
self.dataList[macAP] += 1
self.dataListInfo[(macAP, macClient)] += 1
self.dataList[macClient] += 1
self.checkRoamingClient(macAP, macClient)
#d.write(macAP+" "+macClient+" DATA \n")
elif message == Message.BEACON:
if increaseNumPack:
self.beacon[macAP] += 1
self.beaconListInfo[(macAP, macClient)] += 1
self.beacon[macClient] += 1
elif message == Message.ASSOC_REQ:
self.associationRequest[macAP] += 1
self.associationRequestInfo[(macAP, macClient)] += 1
self.associationRequest[macClient] += 1
self.checkRoamingClient(macAP, macClient)
#d.write(macAP+" "+macClient+" ASSOC_REQ \n")
elif message == Message.ASSOC_RESP:
self.associationResponce[macAP] += 1
self.associationResponceInfo[(macAP, macClient)] += 1
self.associationResponce[macClient] += 1
elif message == Message.DISASSOC:
self.disassociation[macAP] += 1
self.disassociationInfo[(macAP, macClient)] += 1
self.disassociation[macClient] += 1
elif message == Message.OTHER:
self.otherList[macAP] += 1
self.otherListInfo[(macAP, macClient)] += 1
self.otherList[macClient] += 1
if increaseNumPack:
self.numPack[macAP] += 1
self.numPack[macClient] += 1
self.numPackInfo[(macAP, macClient)] += 1
self.checkEssid(macAP, macClient)
self.checkEssidAP(macAP, macClient)
self.checkEssidClient(macAP, macClient)
#d.close()
def setFrequency(self, p, addr1, addr2):
signal_decoded = ord(p.notdecoded[-2:-1])
packet_signal = -(256 - signal_decoded)
self.checkFrequence(addr1, addr2, packet_signal)
def sniffmgmt(self,p):
from_DS = None
to_DS = None
if p.haslayer(Dot11Elt):
try:
self.checkChannel(p.addr2, ord(p[Dot11Elt:3].info))
except Exception, e:
self.fileLog = open("log.log", "a")
self.fileLog.write(str(e))
self.fileLog.close()
if hasattr(p, 'FCfield') and p.FCfield is not None:
DS = p.FCfield & 0x3
to_DS = DS & 0x1 != 0
from_DS = DS & 0x2 != 0
retry = p.FCfield & 0x8
if self.contForAP > 10:
isCorrupted = self.checkFCS(p, from_DS, to_DS)
if isCorrupted:
return
#isCorrupted = False
elif not isCorrupted:
activeAp = 0
if p.haslayer(Dot11) and hasattr(p, 'info'):
#ssid = ( len(p.info) > 0 and p.info != "\x00" ) and p.info or '<hidden>'
activeAp = 1
if p.addr3 not in self.apPresent:
self.apPresent.insert(0,p.addr3)
#self.apPresent[p.addr3] = []
self.essid[p.addr3] = p.info
self.setFrequency(p, p.addr3, p.addr2)
if from_DS and not to_DS and p.addr3 != AnalyzePackage.BROADCAST_ADDR and p.addr1 != AnalyzePackage.BROADCAST_ADDR:
key = "%s" % (p.addr3)
self.createArrayInfo(key, p.addr1)
self.setFrequency(p, key, p.addr1)
elif not from_DS and to_DS and p.addr2 != AnalyzePackage.BROADCAST_ADDR:
key = "%s" % (p.addr1)
if key in self.apPresent:
self.createArrayInfo(key, p.addr2)
self.setFrequency(p, key,p.addr2)
if p.haslayer(EAP):
if p[EAP].code == 3: # -----------------------> SUCCESS
if (p.addr2,p.addr1) not in self.eapHandshakeSuccess:
self.createArrayInfo(p.addr2, p.addr1)
if not from_DS and to_DS:
self.createArrayAndUpdateInfo(p.addr1, p.addr2, Message.HAND_SUCC)
self.setFrequency(p, p.addr1, p.addr2)
#self.checkChannel(p.addr2, p.Channel)
elif from_DS and not to_DS:
self.createArrayAndUpdateInfo(p.addr2, p.addr1, Message.HAND_SUCC)
self.setFrequency(p, p.addr2, p.addr1)
#self.checkChannel(p.addr1, p.Channel)
elif not from_DS and not to_DS:
self.createArrayAndUpdateInfo(p.addr3, p.addr2, Message.HAND_SUCC)
self.setFrequency(p, p.addr3, p.addr2)
#self.checkChannel(p.addr2, p.Channel)
return
elif p[EAP].code == 4: # --------------------> FAILED
if not from_DS and to_DS:
self.createArrayAndUpdateInfo(p.addr1, p.addr2, Message.HAND_FAIL)
self.setFrequency(p, p.addr1, p.addr2)
#self.checkChannel(p.addr2, p.Channel)
elif from_DS and not to_DS:
self.createArrayAndUpdateInfo(p.addr2, p.addr1, Message.HAND_FAIL)
self.setFrequency(p, p.addr2, p.addr1)
#self.checkChannel(p.addr1, p.Channel)
elif not from_DS and not to_DS:
self.createArrayAndUpdateInfo(p.addr3, p.addr2, Message.HAND_FAIL)
self.setFrequency(p, p.addr3, p.addr2)
#self.checkChannel(p.addr2, p.Channel)
return
elif hasattr(p, 'type') and p.type == 0 and hasattr(p, 'subtype') and p.subtype == 8: #BEACON
if p.addr2 not in self.apPresent:
self.apPresent.insert(0,p.addr2)
#self.apPresent[p.addr2] = []
if not from_DS and to_DS:
self.createArrayAndUpdateInfo(p.addr1, p.addr2, Message.BEACON)
self.setFrequency(p, p.addr1, p.addr2)
#self.checkChannel(p.addr2, p.Channel)
self.createArrayAndUpdateInfo(p.addr1, p.addr3, Message.BEACON, False)
self.setFrequency(p, p.addr1, p.addr3)
#self.checkChannel(p.addr3, p.Channel)
elif from_DS and not to_DS:
self.createArrayAndUpdateInfo(p.addr2, p.addr1, Message.BEACON)
self.setFrequency(p, p.addr2, p.addr1)
#self.checkChannel(p.addr1, p.Channel)
self.createArrayAndUpdateInfo(p.addr2, p.addr3, Message.BEACON, False)
self.setFrequency(p, p.addr2, p.addr3)
#self.checkChannel(p.addr3, p.Channel)
elif not from_DS and not to_DS:
isDifferent = False
if hasattr(p, 'addr2') and hasattr(p, 'addr3'):
if p.addr3 != p.addr2:
isDifferent = True
self.createArrayAndUpdateInfo(p.addr3, p.addr2, Message.BEACON)
self.setFrequency(p, p.addr3, p.addr2)
#self.checkChannel(p.addr2, p.Channel)
if not isDifferent:
self.createArrayAndUpdateInfo(p.addr3, None, Message.BEACON)
else:
self.createArrayAndUpdateInfo(p.addr3, p.addr1, Message.BEACON, False)
self.setFrequency(p, p.addr3, p.addr1)
#self.checkChannel(p.addr1, p.Channel)
return<|fim▁hole|> if not from_DS and to_DS:
if p.addr1 != p.addr2:
isDifferent = True
self.createArrayAndUpdateInfo(p.addr1, p.addr2, Message.DATA)
self.setFrequency(p, p.addr1, p.addr2)
#self.checkChannel(p.addr2, p.Channel)
if not isDifferent:
self.createArrayAndUpdateInfo(p.addr1, p.addr3, Message.DATA)
else:
if p.addr1 != p.addr3:
self.createArrayAndUpdateInfo(p.addr1, p.addr3, Message.DATA, False)
self.setFrequency(p, p.addr1, p.addr3)
#self.checkChannel(p.addr3, p.Channel)
elif from_DS and not to_DS:
if p.addr1 != p.addr2:
isDifferent = True
self.createArrayAndUpdateInfo(p.addr2, p.addr1, Message.DATA)
self.setFrequency(p, p.addr2, p.addr1)
#self.checkChannel(p.addr1, p.Channel)
if not isDifferent:
self.createArrayAndUpdateInfo(p.addr2, p.addr3, Message.DATA)
else:
if p.addr2 != p.addr3:
self.createArrayAndUpdateInfo(p.addr2, p.addr3, Message.DATA, False)
self.setFrequency(p, p.addr2, p.addr3)
#self.checkChannel(p.addr3, p.Channel)
elif not from_DS and not to_DS:
if hasattr(p, 'addr2') and hasattr(p, 'addr3'):
if p.addr3 != p.addr2:
isDifferent = True
self.createArrayAndUpdateInfo(p.addr3, p.addr2, Message.DATA)
self.setFrequency(p, p.addr3, p.addr2)
#self.checkChannel(p.addr2, p.Channel)
if not isDifferent:
self.createArrayAndUpdateInfo(p.addr3, p.addr1, Message.DATA)
self.setFrequency(p, p.addr3, p.addr1)
#self.checkChannel(p.addr1, p.Channel)
else:
if p.addr1 != p.addr3:
self.createArrayAndUpdateInfo(p.addr3, p.addr1, Message.DATA, False)
self.setFrequency(p, p.addr3, p.addr1)
#self.checkChannel(p.addr1, p.Channel)
return
elif hasattr(p, 'type') and p.type == 1 and hasattr(p, 'subtype') and p.subtype == 11: #RTS
macAP = p.addr2
macClient = p.addr1
if p.addr1 in self.apPresent:
macAP = p.addr1
macClient = p.addr2
self.createArrayAndUpdateInfo(macAP, macClient, Message.RTS)
self.setFrequency(p, macAP, macClient)
#self.checkChannel(macClient, p.Channel)
return
elif hasattr(p, 'type') and p.type == 1 and hasattr(p, 'subtype') and p.subtype == 12: #CTS
if p.addr1 != None:
if p.addr1 in self.apPresent:
self.createArrayAndUpdateInfo(p.addr1, p.addr2, Message.CTS)
self.setFrequency(p, p.addr1, p.addr2)
#self.checkChannel(p.addr2, p.Channel)
else:
self.createArrayAndUpdateInfo(p.addr2, p.addr1, Message.CTS)
self.setFrequency(p, p.addr2, p.addr1)
#self.checkChannel(p.addr1, p.Channel)
return
elif hasattr(p, 'type') and p.type == 1 and hasattr(p, 'subtype') and p.subtype == 13: #ACK
if p.addr1 != None:
if p.addr1 in self.apPresent:
self.createArrayAndUpdateInfo(p.addr1, p.addr2, Message.ACK)
self.setFrequency(p, p.addr1, p.addr2)
#self.checkChannel(p.addr2, p.Channel)
else:
self.createArrayAndUpdateInfo(p.addr2, p.addr1, Message.ACK)
self.setFrequency(p, p.addr2, p.addr1)
#self.checkChannel(p.addr1, p.Channel)
return
elif hasattr(p, 'type') and p.type == 0 and hasattr(p, 'subtype') and p.subtype == 11: #AUTH
if retry == 0 and p.addr2 != p.addr3:
macAP = p.addr1
macClient = p.addr2
else:
#Per qualche ragione avevo messo p.addr2 != p.addr3 come condizione al primo if al posto di quello scritto ora... se dovesse servire
if p.addr2 in self.apPresent:
macAP = p.addr2
macClient = p.addr1
else:
macAP = p.addr1
macClient = p.addr2
self.createArrayAndUpdateInfo(macAP, macClient, Message.AUTH)
self.setFrequency(p, macAP, macClient)
#self.checkChannel(macClient, p.Channel)
return
elif hasattr(p, 'type') and p.type == 0 and hasattr(p, 'subtype') and p.subtype == 0: #ASSOC_REQ
macAP = p.addr1
macClient = p.addr2
self.createArrayAndUpdateInfo(macAP, macClient, Message.ASSOC_REQ)
self.setFrequency(p, macAP, macClient)
#self.checkChannel(macClient, p.Channel)
return
elif hasattr(p, 'type') and p.type == 0 and hasattr(p, 'subtype') and p.subtype == 1: #ASSOC_RESP
macAP = p.addr1
macClient = p.addr2
self.createArrayAndUpdateInfo(macAP, macClient, Message.DISASSOC)
self.setFrequency(p, macAP, macClient)
#self.checkChannel(macClient, p.Channel)
return
elif hasattr(p, 'type') and p.type == 0 and hasattr(p, 'subtype') and p.subtype == 10: #DISASSOC
if p.addr1 in self.apPresent:
macAP = p.addr1
macClient = p.addr2
else:
macAP = p.addr2
macClient = p.addr1
self.createArrayAndUpdateInfo(macAP, macClient, Message.ASSOC_RESP)
self.setFrequency(p, macAP, macClient)
#self.checkChannel(macClient, p.Channel)
return
elif hasattr(p, 'type') and hasattr(p, 'subtype') and p.type == 0 and p.subtype == 12: #DEAUTH
if p.addr1 in self.apPresent:
macAP = p.addr1
macClient = p.addr2
else:
macAP = p.addr2
macClient = p.addr1
self.createArrayAndUpdateInfo(macAP, macClient, Message.DEAUTH)
self.setFrequency(p, macAP, macClient)
#self.checkChannel(macClient, p.Channel)
return
elif hasattr(p, 'type') and p.type == 0 and hasattr(p, 'subtype') and p.subtype == 4: #PROBE_REQ
macAP = p.addr1
macClient = p.addr2
if macAP in self.essid:
p.info = self.essid[macAP]
if (p.info,macClient) not in self.probeRequest:
self.probeRequest[(p.info,macClient)] = 0
self.probeRequest[(p.info,macClient)] += 1
self.createArrayAndUpdateInfo(macAP, macClient, Message.PROBE_REQ)
self.setFrequency(p, macAP,macClient)
#self.checkChannel(macClient, p.Channel)
return
elif hasattr(p, 'type') and p.type == 0 and hasattr(p, 'subtype') and p.subtype == 5: #PROBE_RESP
if p.addr2 != None:
self.createArrayAndUpdateInfo(p.addr2, p.addr1, Message.PROBE_RESP)
self.setFrequency(p, p.addr2, p.addr1)
#self.checkChannel(p.addr1, p.Channel)
return
else:
isDifferent = False
if not from_DS and to_DS:
if p.addr1 != p.addr2:
isDifferent = True
self.createArrayAndUpdateInfo(p.addr1, p.addr2, Message.OTHER)
self.setFrequency(p, p.addr1, p.addr2)
#self.checkChannel(p.addr2, p.Channel)
if not isDifferent:
self.createArrayAndUpdateInfo(p.addr1, p.addr3, Message.OTHER)
else:
if p.addr1 != p.addr3:
self.createArrayAndUpdateInfo(p.addr1, p.addr3, Message.OTHER, False)
self.setFrequency(p, p.addr1, p.addr3)
#self.checkChannel(p.addr3, p.Channel)
elif from_DS and not to_DS:
if p.addr1 != p.addr2:
isDifferent = True
self.createArrayAndUpdateInfo(p.addr2, p.addr1, Message.OTHER)
self.setFrequency(p, p.addr2, p.addr1)
#self.checkChannel(p.addr1, p.Channel)
if not isDifferent:
self.createArrayAndUpdateInfo(p.addr2, p.addr3, Message.OTHER)
else:
if p.addr2 != p.addr3:
self.createArrayAndUpdateInfo(p.addr2, p.addr3, Message.OTHER, False)
self.setFrequency(p, p.addr2, p.addr3)
#self.checkChannel(p.addr3, p.Channel)
elif not from_DS and not to_DS:
if hasattr(p, 'addr2') and hasattr(p, 'addr3'):
if p.addr3 != p.addr2:
isDifferent = True
self.createArrayAndUpdateInfo(p.addr3, p.addr2, Message.OTHER)
self.setFrequency(p, p.addr3, p.addr2)
#self.checkChannel(p.addr2, p.Channel)
if not isDifferent:
self.createArrayAndUpdateInfo(p.addr3, p.addr1, Message.OTHER)
else:
if p.addr1 != p.addr3:
self.createArrayAndUpdateInfo(p.addr3, p.addr1, Message.OTHER, False)
self.setFrequency(p, p.addr3, p.addr1)
#self.checkChannel(p.addr1, p.Channel)
"""if hasattr(p, 'addr2') and hasattr(p, 'addr3') and hasattr(p, 'addr1') and hasattr(p, 'type') and hasattr(p, 'subtype'):
self.fileLog = open(self.titleLog, "w")
self.fileLog.write("TYPE - SUBTYPE: ")
self.fileLog.write(str(p.type)+ " " + str(p.subtype)+"\n")
self.fileLog.write("ADDRESS: ")
self.fileLog.write(str(p.addr1)+" - " + str(p.addr2)+" - " + str(p.addr3)+"\n")
self.fileLog.write("FROM_DS - TO_DS: ")
self.fileLog.write(str(from_DS)+ " "+ str(to_DS))
self.fileLog.write("\n------------------------------------------------------------------\n\n")
self.fileLog.close()"""
else:
if p.haslayer(Dot11) and hasattr(p, 'info'):
#ssid = ( len(p.info) > 0 and p.info != "\x00" ) and p.info or '<hidden>'
activeAp = 1
if p.addr3 not in self.apPresent:
self.apPresent.insert(0,p.addr3)
#self.apPresent[p.addr3] = []
self.essid[p.addr3] = p.info
self.createArrayAndUpdateInfo(p.addr3, "", Message.NUM_PACK)
self.setFrequency(p, p.addr3, p.addr2)
#self.checkChannel(p.addr2, p.Channel)
self.contForAP += 1
if hasattr(p, 'type') and p.type == 0 and hasattr(p, 'subtype') and p.subtype == 8: #BEACON
if p.addr2 not in self.apPresent:
self.apPresent.insert(0,p.addr2)
#self.apPresent[p.addr2] = []
if not from_DS and to_DS:
self.createArrayAndUpdateInfo(p.addr1, p.addr2, Message.BEACON)
self.setFrequency(p, p.addr1, p.addr2)
#self.checkChannel(p.addr2, p.Channel)
self.createArrayAndUpdateInfo(p.addr1, p.addr3, Message.BEACON, False)
self.setFrequency(p, p.addr1, p.addr3)
#self.checkChannel(p.addr3, p.Channel)
elif from_DS and not to_DS:
self.createArrayAndUpdateInfo(p.addr2, p.addr1, Message.BEACON)
self.setFrequency(p, p.addr2, p.addr1)
#self.checkChannel(p.addr1, p.Channel)
self.createArrayAndUpdateInfo(p.addr2, p.addr3, Message.BEACON, False)
self.setFrequency(p, p.addr2, p.addr3)
#self.checkChannel(p.addr3, p.Channel)
elif not from_DS and not to_DS:
isDifferent = False
if p.addr3 != p.addr2:
isDifferent = True
self.createArrayAndUpdateInfo(p.addr3, p.addr2, Message.BEACON)
self.setFrequency(p, p.addr3, p.addr2)
#self.checkChannel(p.addr2, p.Channel)
if not isDifferent:
self.createArrayAndUpdateInfo(p.addr3, None, Message.BEACON)
else:
self.createArrayAndUpdateInfo(p.addr3, p.addr1, Message.BEACON, False)
self.setFrequency(p, p.addr3, p.addr1)
#self.checkChannel(p.addr1, p.Channel)
self.contForAP += 1
return<|fim▁end|> | #elif hasattr(p, 'type') and p.type == 2 and hasattr(p, 'subtype') and p.subtype == 0: #DATA
elif hasattr(p, 'type') and p.type == 2: #DATA
isDifferent = False |
<|file_name|>move-fragments-9.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test moving array structures, e.g. `[T; 3]` as well as moving
// elements in and out of such arrays.
//
// Note also that the `test_move_array_then_overwrite` tests represent
// cases that we probably should make illegal.
pub struct D { d: isize }
impl Drop for D { fn drop(&mut self) { } }
#[rustc_move_fragments]
pub fn test_move_array_via_return(a: [D; 3]) -> [D; 3] {
//~^ ERROR assigned_leaf_path: `$(local a)`
//~| ERROR moved_leaf_path: `$(local a)`
return a;
}
#[rustc_move_fragments]
pub fn test_move_array_into_recv(a: [D; 3], recv: &mut [D; 3]) {
//~^ ERROR parent_of_fragments: `$(local recv)`
//~| ERROR assigned_leaf_path: `$(local a)`
//~| ERROR moved_leaf_path: `$(local a)`
//~| ERROR assigned_leaf_path: `$(local recv).*`
*recv = a;
}
#[rustc_move_fragments]
pub fn test_extract_array_elem(a: [D; 3], i: usize) -> D {
//~^ ERROR parent_of_fragments: `$(local a)`
//~| ERROR assigned_leaf_path: `$(local i)`
//~| ERROR moved_leaf_path: `$(local a).[]`
//~| ERROR unmoved_fragment: `$(allbutone $(local a).[])`
a[i]<|fim▁hole|>#[rustc_move_fragments]
pub fn test_overwrite_array_elem(mut a: [D; 3], i: usize, d: D) {
//~^ ERROR parent_of_fragments: `$(local mut a)`
//~| ERROR assigned_leaf_path: `$(local i)`
//~| ERROR assigned_leaf_path: `$(local d)`
//~| ERROR moved_leaf_path: `$(local d)`
//~| ERROR assigned_leaf_path: `$(local mut a).[]`
//~| ERROR unmoved_fragment: `$(allbutone $(local mut a).[])`
a[i] = d;
}
// FIXME (pnkfelix): Both test_move_array_then_overwrite_elem1 and
// test_move_array_then_overwrite_elem2 illustrate a behavior that
// we need to make illegal if we want to get rid of drop-flags.
// See RFC PR 320 for more discussion.
#[rustc_move_fragments]
pub fn test_move_array_then_overwrite_elem1(mut a: [D; 3], i: usize, recv: &mut [D; 3], d: D) {
//~^ ERROR parent_of_fragments: `$(local mut a)`
//~| ERROR parent_of_fragments: `$(local recv)`
//~| ERROR assigned_leaf_path: `$(local recv).*`
//~| ERROR assigned_leaf_path: `$(local i)`
//~| ERROR assigned_leaf_path: `$(local d)`
//~| ERROR moved_leaf_path: `$(local d)`
//~| ERROR assigned_leaf_path: `$(local mut a).[]`
//~| ERROR unmoved_fragment: `$(allbutone $(local mut a).[])`
// This test covers the case where the array contents have been all moved away, but
// we still need to deal with new initializing writes into the array.
*recv = a;
a[i] = d;
}
#[rustc_move_fragments]
pub fn test_move_array_then_overwrite_elem2(mut a: [D; 3], i: usize, j: usize,
recv: &mut [D; 3], d1: D, d2: D) {
//~^^ ERROR parent_of_fragments: `$(local mut a)`
//~| ERROR parent_of_fragments: `$(local recv)`
//~| ERROR assigned_leaf_path: `$(local recv).*`
//~| ERROR assigned_leaf_path: `$(local i)`
//~| ERROR assigned_leaf_path: `$(local j)`
//~| ERROR assigned_leaf_path: `$(local d1)`
//~| ERROR assigned_leaf_path: `$(local d2)`
//~| ERROR moved_leaf_path: `$(local d1)`
//~| ERROR moved_leaf_path: `$(local d2)`
//~| ERROR assigned_leaf_path: `$(local mut a).[]`
//~| ERROR unmoved_fragment: `$(allbutone $(local mut a).[])`
// This test covers the case where the array contents have been all moved away, but
// we still need to deal with new initializing writes into the array.
*recv = a;
a[i] = d1;
a[j] = d2;
}
pub fn main() { }<|fim▁end|> | }
|
<|file_name|>test_track_job_track_line_cxx.cpp<|end_file_name|><|fim▁begin|>#include "sixtracklib/cuda/track_job.hpp"
#include <iomanip>
#include <cstddef>
#include <cstdint>
#include <cstdlib>
#include <cstring>
#include <iostream>
#include <iomanip>
#include <string>
#include <vector>
#include <gtest/gtest.h>
#include "sixtracklib/testlib.h"
#include "sixtracklib/common/definitions.h"
#include "sixtracklib/common/control/definitions.h"
#include "sixtracklib/common/track/definitions.h"
#include "sixtracklib/common/control/node_info.hpp"
#include "sixtracklib/common/buffer.hpp"
#include "sixtracklib/common/particles.hpp"
#include "sixtracklib/common/be_monitor/be_monitor.hpp"
TEST( CXX_CudaTrackJobTrackUntilTests, TrackUntilSingleParticleSetSimpleTest )
{
namespace st = SIXTRL_CXX_NAMESPACE;
using particles_t = st::Particles;
using track_job_t = st::CudaTrackJob;
using buffer_t = track_job_t::buffer_t;
using buf_size_t = track_job_t::size_type;
using track_status_t = track_job_t::track_status_t;
using ctrl_status_t = track_job_t::status_t;
using node_id_t = track_job_t::node_id_t;
using real_t = particles_t::real_t;
using pindex_t = particles_t::index_t;
real_t const ABS_TOLERANCE = real_t{ 1e-14 };<|fim▁hole|>
buffer_t cmp_track_pb;
SIXTRL_ASSERT( in_particles.get< particles_t >(
buf_size_t{ 0 } ) != nullptr );
particles_t* cmp_particles = cmp_track_pb.addCopy(
*in_particles.get< particles_t >( buf_size_t{ 0 } ) );
SIXTRL_ASSERT( cmp_particles != nullptr );
/* --------------------------------------------------------------------- */
/* Perform comparison tracking over lattice: */
buf_size_t const NUM_PSETS = buf_size_t{ 1 };
buf_size_t track_pset_index = buf_size_t{ 0 };
buf_size_t const UNTIL_TURN = buf_size_t{ 20 };
track_status_t track_status =
::NS(TestTrackCpu_track_particles_until_turn_cpu)(
cmp_track_pb.getCApiPtr(), NUM_PSETS, &track_pset_index,
beam_elem_buffer.getCApiPtr(), UNTIL_TURN );
SIXTRL_ASSERT( track_status == st::TRACK_SUCCESS );
/* -------------------------------------------------------------------- */
/* Retrieve list of available nodes */
buf_size_t const num_avail_nodes = st::CudaTrackJob::NumAvailableNodes();
if( num_avail_nodes == buf_size_t{ 0 } )
{
std::cerr << "[ ] [ INFO ] \r\n"
<< "[ ] [ INFO ] "
<< "!!!!!!!! No cuda nodes found -> skipping test !!!!!!\r\n"
<< "[ ] [ INFO ]" << std::endl;
return;
}
std::vector< node_id_t > avail_node_ids( num_avail_nodes );
buf_size_t const num_nodes = st::CudaTrackJob::GetAvailableNodeIdsList(
avail_node_ids.size(), avail_node_ids.data() );
ASSERT_TRUE( num_nodes == num_avail_nodes );
for( auto const& node_id : avail_node_ids )
{
/* Create a dedicated buffer for tracking */
buffer_t track_pb;
particles_t* particles = track_pb.addCopy(
*in_particles.get< particles_t >( buf_size_t{ 0 } ) );
SIXTRL_ASSERT( particles != nullptr );
/* Create a track job on the current node */
track_job_t track_job( node_id.toString(), track_pb, beam_elem_buffer );
ASSERT_TRUE( track_job.ptrCudaController() != nullptr );
ASSERT_TRUE( track_job.hasSelectedNode() );
ASSERT_TRUE( track_job.ptrCudaController()->isSelectedNode( node_id ) );
auto node_info = track_job.ptrCudaController()->ptrNodeInfo( node_id );
ASSERT_TRUE( node_info != nullptr );
std::cout << "[ ] [ INFO ] Selected Node \r\n";
node_info->printOut();
if( !track_job.isInDebugMode() )
{
track_job.enableDebugMode();
}
ASSERT_TRUE( track_job.requiresCollecting() );
ASSERT_TRUE( track_job.isInDebugMode() );
/* ***************************************************************** */
buf_size_t const num_beam_elements = beam_elem_buffer.getNumObjects();
buf_size_t const chunk_length = buf_size_t{ 10 };
buf_size_t num_chunks = num_beam_elements / chunk_length;
if( ( num_beam_elements % chunk_length ) != buf_size_t{ 0 } )
{
++num_chunks;
}
pindex_t min_particle_id, max_particle_id;
pindex_t min_at_element_id, max_at_element_id;
pindex_t min_at_turn_id, max_at_turn_id;
::NS(Particles_init_min_max_attributes_for_find)(
&min_particle_id, &max_particle_id, &min_at_element_id,
&max_at_element_id, &min_at_turn_id, &max_at_turn_id );
ctrl_status_t status = ::NS(Particles_find_min_max_attributes)(
particles, &min_particle_id, &max_particle_id,
&min_at_element_id, &max_at_element_id, &min_at_turn_id,
&max_at_turn_id );
SIXTRL_ASSERT( status == st::ARCH_STATUS_SUCCESS );
SIXTRL_ASSERT( min_at_turn_id == max_at_turn_id );
SIXTRL_ASSERT( min_at_element_id == max_at_element_id );
for( buf_size_t kk = min_at_turn_id ; kk < UNTIL_TURN ; ++kk )
{
for( buf_size_t jj = buf_size_t{ 0 } ; jj < num_chunks ; ++jj )
{
buf_size_t const be_begin_idx = jj * chunk_length;
buf_size_t const be_end_idx = std::min(
be_begin_idx + chunk_length, num_beam_elements );
bool const finish_turn = be_end_idx >= num_beam_elements;
track_status = track_job.trackLine(
be_begin_idx, be_end_idx, finish_turn );
ASSERT_TRUE( track_status == ::NS(TRACK_SUCCESS) );
}
}
/* ***************************************************************** */
status = track_job.collectParticles();
ASSERT_TRUE( status == st::ARCH_STATUS_SUCCESS );
SIXTRL_ASSERT( track_job.ptrParticlesBuffer() == &track_pb );
particles = track_pb.get< particles_t >( buf_size_t{ 0 } );
SIXTRL_ASSERT( particles != nullptr );
ASSERT_TRUE( ( cmp_particles != nullptr ) &&
( particles != nullptr ) &&
( ( 0 == ::NS(Particles_compare_values)(
cmp_particles->getCApiPtr(), particles->getCApiPtr() ) ) ||
( ( ABS_TOLERANCE > real_t{ 0 } ) &&
( 0 == ::NS(Particles_compare_values_with_treshold)(
cmp_particles->getCApiPtr(), particles->getCApiPtr(),
ABS_TOLERANCE ) ) ) ) );
status = particles->copy(
in_particles.get< particles_t >( buf_size_t{ 0 } ) );
SIXTRL_ASSERT( status == st::ARCH_STATUS_SUCCESS );
track_job.disableDebugMode();
ASSERT_TRUE( !track_job.isInDebugMode() );
status = track_job.reset( track_pb, beam_elem_buffer );
ASSERT_TRUE( status == st::ARCH_STATUS_SUCCESS );
/* Check whether the update of the particles state has worked */
status = track_job.collectParticles();
SIXTRL_ASSERT( status == st::ARCH_STATUS_SUCCESS );
particles = track_pb.get< particles_t >( buf_size_t{ 0 } );
SIXTRL_ASSERT( particles != nullptr );
SIXTRL_ASSERT( 0 == ::NS(Particles_compare_values)(
particles->getCApiPtr(),
in_particles.get< particles_t >( buf_size_t{ 0 } )->getCApiPtr() ) );
/* ***************************************************************** */
/* Perform tracking again, this time not in debug mode */
::NS(Particles_init_min_max_attributes_for_find)(
&min_particle_id, &max_particle_id, &min_at_element_id,
&max_at_element_id, &min_at_turn_id, &max_at_turn_id );
status = ::NS(Particles_find_min_max_attributes)(
particles, &min_particle_id, &max_particle_id,
&min_at_element_id, &max_at_element_id, &min_at_turn_id,
&max_at_turn_id );
SIXTRL_ASSERT( status == st::ARCH_STATUS_SUCCESS );
SIXTRL_ASSERT( min_at_turn_id == max_at_turn_id );
SIXTRL_ASSERT( min_at_element_id == max_at_element_id );
for( buf_size_t kk = min_at_turn_id ; kk < UNTIL_TURN ; ++kk )
{
for( buf_size_t jj = buf_size_t{ 0 } ; jj < num_chunks ; ++jj )
{
buf_size_t const be_begin_idx = jj * chunk_length;
buf_size_t const be_end_idx = std::min(
be_begin_idx + chunk_length, num_beam_elements );
bool const finish_turn = be_end_idx >= num_beam_elements;
track_status = track_job.trackLine(
be_begin_idx, be_end_idx, finish_turn );
ASSERT_TRUE( track_status == st::TRACK_SUCCESS );
}
}
/* ***************************************************************** */
/* Collect the results again and ... */
status = track_job.collectParticles();
ASSERT_TRUE( status == st::ARCH_STATUS_SUCCESS );
SIXTRL_ASSERT( track_job.ptrParticlesBuffer() == &track_pb );
particles = track_pb.get< particles_t >( buf_size_t{ 0 } );
/* ... compare against the cpu tracking result */
ASSERT_TRUE( ( particles != nullptr ) && ( cmp_particles != nullptr ) &&
( ( 0 == ::NS(Particles_compare_values)(
cmp_particles->getCApiPtr(), particles->getCApiPtr() ) ) ||
( ( ABS_TOLERANCE > real_t{ 0 } ) &&
( 0 == ::NS(Particles_compare_values_with_treshold)(
cmp_particles->getCApiPtr(), particles->getCApiPtr(),
ABS_TOLERANCE ) ) ) ) );
}
}
/* end: tests/sixtracklib/cuda/track/test_track_job_track_line_cxx.cpp */<|fim▁end|> |
buffer_t in_particles( ::NS(PATH_TO_BEAMBEAM_PARTICLES_DUMP) );
buffer_t beam_elem_buffer( ::NS(PATH_TO_BEAMBEAM_BEAM_ELEMENTS) ); |
<|file_name|>resolver.js<|end_file_name|><|fim▁begin|>import Resolver from 'ember/resolver';
var resolver = Resolver.create();
resolver.namespace = {<|fim▁hole|>};
export default resolver;<|fim▁end|> | modulePrefix: 'todo-app' |
<|file_name|>server_extension.py<|end_file_name|><|fim▁begin|>from functools import wraps
import json
import os
import traceback
import validators
from jinja2 import Environment, PackageLoader
from notebook.utils import url_path_join
from notebook.base.handlers import IPythonHandler
import requests
from requests.auth import HTTPBasicAuth
env = Environment(
loader=PackageLoader('saagie', 'jinja2'),
)
SAAGIE_ROOT_URL = os.environ.get("SAAGIE_ROOT_URL", None)
SAAGIE_USERNAME = None
PLATFORMS_URL = None
SAAGIE_BASIC_AUTH_TOKEN = None
JOBS_URL_PATTERN = None
JOB_URL_PATTERN = None
JOB_UPGRADE_URL_PATTERN = None
SCRIPT_UPLOAD_URL_PATTERN = None
def get_absolute_saagie_url(saagie_url):
if saagie_url.startswith('/'):
return SAAGIE_ROOT_URL + saagie_url
return saagie_url
class ResponseError(Exception):
def __init__(self, status_code):
self.status_code = status_code
super(ResponseError, self).__init__(status_code)
class SaagieHandler(IPythonHandler):
def handle_request(self, method):
data = {k: v[0].decode() for k, v in self.request.arguments.items()}
if 'view' not in data:
self.send_error(404)
return
view_name = data.pop('view')
notebook_path = data.pop('notebook_path', None)
notebook_json = data.pop('notebook_json', None)
notebook = Notebook(notebook_path, notebook_json)
try:
template_name, template_data = views.render(
view_name, notebook=notebook, data=data, method=method)
except ResponseError as e:
self.send_error(e.status_code)
return
except:
template_name = 'internal_error.html'
template_data = {'error': traceback.format_exc()}
self.set_status(500)
template_data.update(
notebook=notebook,
)
template = env.get_template(template_name)
self.finish(template.render(template_data))
def get(self):
self.handle_request('GET')
def post(self):
self.handle_request('POST')
def check_xsrf_cookie(self):
return
class SaagieCheckHandler(IPythonHandler):
def get(self):
self.finish()
class SaagieJobRun:
def __init__(self, job, run_data):
self.job = job
self.id = run_data['id']
self.status = run_data['status']
self.stderr = run_data.get('logs_err', '')
self.stdout = run_data.get('logs_out', '')
class SaagieJob:
@classmethod
def from_id(cls, notebook, platform_id, job_id):
return SaagieJob(
notebook,
requests.get(JOB_URL_PATTERN % (platform_id, job_id), auth=SAAGIE_BASIC_AUTH_TOKEN).json())
def __init__(self, notebook, job_data):
self.notebook = notebook
self.data = job_data
self.platform_id = job_data['platform_id']
self.capsule_type = job_data['capsule_code']
self.id = job_data['id']
self.name = job_data['name']
self.last_run = None
def set_as_current(self):
self.notebook.current_job = self
@property
def url(self):
return (JOBS_URL_PATTERN + '/%s') % (self.platform_id, self.id)
@property
def admin_url(self):
return get_absolute_saagie_url('/#/manager/%s/job/%s'
% (self.platform_id, self.id))
@property
def logs_url(self):
return self.admin_url + '/logs'
@property
def is_started(self):
return self.last_run is not None
def fetch_logs(self):
job_data = requests.get(self.url, auth=SAAGIE_BASIC_AUTH_TOKEN).json()
run_data = job_data.get('last_instance')
if run_data is None or run_data['status'] not in ('SUCCESS', 'FAILED'):
return
run_data = requests.get(
get_absolute_saagie_url('/api/v1/jobtask/%s'
% run_data['id']), auth=SAAGIE_BASIC_AUTH_TOKEN).json()
self.last_run = SaagieJobRun(self, run_data)
@property
def details_template_name(self):
return 'include/python_job_details.html'
def __str__(self):
return self.name
def __eq__(self, other):
if other is None:
return False
return self.platform_id == other.platform_id and self.id == other.id
def __lt__(self, other):
if other is None:
return False
return self.id < other.id
class SaagiePlatform:
SUPPORTED_CAPSULE_TYPES = {'python'}
def __init__(self, notebook, platform_data):
self.notebook = notebook
self.id = platform_data['id']
self.name = platform_data['name']
self.capsule_types = {c['code'] for c in platform_data['capsules']}
@property
def is_supported(self):
return not self.capsule_types.isdisjoint(self.SUPPORTED_CAPSULE_TYPES)
def get_jobs(self):
if not self.is_supported:
return []
jobs_data = requests.get(JOBS_URL_PATTERN % self.id, auth=SAAGIE_BASIC_AUTH_TOKEN).json()
return [SaagieJob(self.notebook, job_data) for job_data in jobs_data
if job_data['category'] == 'processing' and
job_data['capsule_code'] in self.SUPPORTED_CAPSULE_TYPES]
def __eq__(self, other):
return self.id == other.id
class Notebook:
CACHE = {}
def __new__(cls, path, json):
if path in cls.CACHE:
return cls.CACHE[path]
cls.CACHE[path] = new = super(Notebook, cls).__new__(cls)
return new
def __init__(self, path, json_data):
if path is None:
path = 'Untitled.ipynb'
if json_data is None:
json_data = json.dumps({
'cells': [],
'metadata': {'kernelspec': {'name': 'python3'}}})
self.path = path
self.json = json.loads(json_data)
# In cached instances, current_job is already defined.
if not hasattr(self, 'current_job'):
self.current_job = None
@property
def name(self):
return os.path.splitext(os.path.basename(self.path))[0]
@property
def kernel_name(self):
return self.json['metadata']['kernelspec']['name']
@property
def kernel_display_name(self):
return self.json['metadata']['kernelspec']['display_name']
def get_code_cells(self):
return [cell['source'] for cell in self.json['cells']
if cell['cell_type'] == 'code']
def get_code(self, indices=None):
cells = self.get_code_cells()
if indices is None:
indices = list(range(len(cells)))
return '\n\n\n'.join([cells[i] for i in indices])
def get_platforms(self):
return [SaagiePlatform(self, platform_data)
for platform_data in requests.get(PLATFORMS_URL, auth=SAAGIE_BASIC_AUTH_TOKEN).json()]
class ViewsCollection(dict):
def add(self, func):
self[func.__name__] = func
return func
def render(self, view_name, notebook, data=None, method='GET', **kwargs):
if data is None:
data = {}
try:
view = views[view_name]
except KeyError:
raise ResponseError(404)
template_data = view(method, notebook, data, **kwargs)
if isinstance(template_data, tuple):
template_name, template_data = template_data
else:
template_name = view.__name__ + '.html'
return template_name, template_data
views = ViewsCollection()
@views.add
def modal(method, notebook, data):
return {}
def clear_basic_auth_token():
global SAAGIE_BASIC_AUTH_TOKEN
SAAGIE_BASIC_AUTH_TOKEN = None
# Init an empty Basic Auth token on first launch
clear_basic_auth_token()
def is_logged():
if SAAGIE_ROOT_URL is None or SAAGIE_BASIC_AUTH_TOKEN is None:
return False
else:
# Check if Basic token is still valid
is_logged_in = False
try:
response = requests.get(SAAGIE_ROOT_URL + '/api/v1/user-current', auth=SAAGIE_BASIC_AUTH_TOKEN, allow_redirects=False)
is_logged_in = response.ok
except (requests.ConnectionError, requests.RequestException, requests.HTTPError, requests.Timeout) as err:
print ('Error while trying to connect to Saagie: ', err)
if is_logged_in is not True:
# Remove Basic Auth token from globals. It will force a new login phase.
clear_basic_auth_token()
return is_logged_in
def define_globals(saagie_root_url, saagie_username):
if saagie_root_url is not None:
global SAAGIE_ROOT_URL
global SAAGIE_USERNAME
global PLATFORMS_URL
global JOBS_URL_PATTERN
global JOB_URL_PATTERN
global JOB_UPGRADE_URL_PATTERN
global SCRIPT_UPLOAD_URL_PATTERN
SAAGIE_USERNAME = saagie_username
SAAGIE_ROOT_URL = saagie_root_url.strip("/")
PLATFORMS_URL = SAAGIE_ROOT_URL + '/api/v1/platform'
JOBS_URL_PATTERN = PLATFORMS_URL + '/%s/job'
JOB_URL_PATTERN = JOBS_URL_PATTERN + '/%s'
JOB_UPGRADE_URL_PATTERN = JOBS_URL_PATTERN + '/%s/version'
SCRIPT_UPLOAD_URL_PATTERN = JOBS_URL_PATTERN + '/upload'
@views.add
def login_form(method, notebook, data):
if method == 'POST':
# check if the given Saagie URL is well formed
if not validators.url(data['saagie_root_url']):
return {'error': 'Invalid URL', 'saagie_root_url': data['saagie_root_url'] or '', 'username': data['username'] or ''}
define_globals(data['saagie_root_url'], data['username'])
try:
basic_token = HTTPBasicAuth(data['username'], data['password'])
current_user_response = requests.get(SAAGIE_ROOT_URL + '/api/v1/user-current', auth=basic_token, allow_redirects=False)
if current_user_response.ok:
# Login succeeded, keep the basic token for future API calls
global SAAGIE_BASIC_AUTH_TOKEN
SAAGIE_BASIC_AUTH_TOKEN = basic_token
except (requests.ConnectionError, requests.RequestException, requests.HTTPError, requests.Timeout) as err:
print ('Error while trying to connect to Saagie: ', err)
return {'error': 'Connection error', 'saagie_root_url': SAAGIE_ROOT_URL, 'username': SAAGIE_USERNAME or ''}
if SAAGIE_BASIC_AUTH_TOKEN is not None:
return views.render('capsule_type_chooser', notebook)
return {'error': 'Invalid URL, username or password.', 'saagie_root_url': SAAGIE_ROOT_URL, 'username': SAAGIE_USERNAME or ''}
if is_logged():
return views.render('capsule_type_chooser', notebook)
return {'error': None, 'saagie_root_url': SAAGIE_ROOT_URL or '', 'username': SAAGIE_USERNAME or ''}
def login_required(view):
@wraps(view)
def inner(method, notebook, data, *args, **kwargs):
if not is_logged():
return views.render('login_form', notebook)
return view(method, notebook, data, *args, **kwargs)
return inner
@views.add
@login_required
def capsule_type_chooser(method, notebook, data):
return {'username': SAAGIE_USERNAME}
def get_job_form(method, notebook, data):
context = {'platforms': notebook.get_platforms()}
context['values'] = ({'current': {'options': {}}} if notebook.current_job is None
else notebook.current_job.data)
return context
def create_job_base_data(data):
return {
'platform_id': data['saagie-platform'],
'category': 'processing',
'name': data['job-name'],
'description': data['description'],
'current': {
'cpu': data['cpu'],
'disk': data['disk'],
'memory': data['ram'],
'isInternalSubDomain': False,
'isInternalPort': False,
'options': {}
}
}
def upload_python_script(notebook, data):
code = notebook.get_code(map(int, data.get('code-lines', '').split('|')))
files = {'file': (data['job-name'] + '.py', code)}
return requests.post(
SCRIPT_UPLOAD_URL_PATTERN % data['saagie-platform'],
files=files, auth=SAAGIE_BASIC_AUTH_TOKEN).json()['fileName']
@views.add
@login_required
def python_job_form(method, notebook, data):
if method == 'POST':
platform_id = data['saagie-platform']
job_data = create_job_base_data(data)
job_data['capsule_code'] = 'python'
job_data['always_email'] = False
job_data['manual'] = True
job_data['retry'] = ''
current = job_data['current']
current['options']['language_version'] = data['language-version']
current['releaseNote'] = data['release-note']
current['template'] = data['shell-command']
current['file'] = upload_python_script(notebook, data)
new_job_data = requests.post(JOBS_URL_PATTERN % platform_id,
json=job_data, auth=SAAGIE_BASIC_AUTH_TOKEN).json()
job = SaagieJob(notebook, new_job_data)
job.set_as_current()
return views.render('starting_job', notebook, {'job': job})
context = get_job_form(method, notebook, data)
context['action'] = '/saagie?view=python_job_form'
context['username'] = SAAGIE_USERNAME
return context
@views.add
@login_required
def update_python_job(method, notebook, data):
if method == 'POST':
job = notebook.current_job
platform_id = job.platform_id
data['saagie-platform'] = platform_id
data['job-name'] = job.name
data['description'] = ''
current = create_job_base_data(data)['current']
current['options']['language_version'] = data['language-version']
current['releaseNote'] = data['release-note']
current['template'] = data['shell-command']
current['file'] = upload_python_script(notebook, data)
requests.post(JOB_UPGRADE_URL_PATTERN % (platform_id, job.id),
json={'current': current}, auth=SAAGIE_BASIC_AUTH_TOKEN)
job.last_run = None
return views.render('starting_job', notebook, {'job': job})
context = get_job_form(method, notebook, data)
context['action'] = '/saagie?view=update_python_job'
context['username'] = SAAGIE_USERNAME
return context
@views.add
@login_required
def select_python_job(method, notebook, data):
if method == 'POST':
platform_id, job_id = data['job'].split('-')
notebook.current_job = SaagieJob.from_id(notebook, platform_id, job_id)
return views.render('update_python_job', notebook, data)
jobs_by_platform = []
for platform in notebook.get_platforms():
jobs = platform.get_jobs()
if jobs:
jobs_by_platform.append((platform,
list(sorted(jobs, reverse=True))))
return {'jobs_by_platform': jobs_by_platform,
'action': '/saagie?view=select_python_job', 'username': SAAGIE_USERNAME}
@views.add
@login_required
def unsupported_kernel(method, notebook, data):
return {'username': SAAGIE_USERNAME}
@views.add
@login_required
def starting_job(method, notebook, data):
job = notebook.current_job
job.fetch_logs()<|fim▁hole|> return {'job': job, 'username': SAAGIE_USERNAME}
@views.add
@login_required
def started_job(method, notebook, data):
return {'job': notebook.current_job, 'username': SAAGIE_USERNAME}
@views.add
def logout(method, notebook, data):
global SAAGIE_BASIC_AUTH_TOKEN
global SAAGIE_ROOT_URL
global SAAGIE_USERNAME
SAAGIE_BASIC_AUTH_TOKEN = None
SAAGIE_ROOT_URL = None
SAAGIE_USERNAME = None
return {}
def load_jupyter_server_extension(nb_app):
web_app = nb_app.web_app
base_url = web_app.settings['base_url']
route_pattern = url_path_join(base_url, '/saagie')
web_app.add_handlers('.*$', [(route_pattern, SaagieHandler)])
route_pattern = url_path_join(base_url, '/saagie/check')
web_app.add_handlers('.*$', [(route_pattern, SaagieCheckHandler)])<|fim▁end|> | if job.is_started:
return views.render('started_job', notebook, {'job': job}) |
<|file_name|>cemetery.component.spec.ts<|end_file_name|><|fim▁begin|>import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { CemeteryComponent } from './cemetery.component';
describe('CemeteryComponent', () => {
let component: CemeteryComponent;
let fixture: ComponentFixture<CemeteryComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({<|fim▁hole|> }));
beforeEach(() => {
fixture = TestBed.createComponent(CemeteryComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should be created', () => {
expect(component).toBeTruthy();
});
});<|fim▁end|> | declarations: [ CemeteryComponent ]
})
.compileComponents(); |
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>/*
* @license Apache-2.0
*
* Copyright (c) 2019 The Stdlib Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// TypeScript Version: 2.0
/**
* Returns the excess kurtosis of a uniform distribution.
*
* ## Notes
*
* - If provided `a >= b`, the function returns `NaN`.
*
* @param a - minimum support
* @param b - maximum support
* @returns excess kurtosis
*
* @example
* var v = kurtosis( 0.0, 1.0 );
* // returns -1.2
*
* @example
* var v = kurtosis( 4.0, 12.0 );
* // returns -1.2
*
* @example
* var v = kurtosis( -4.0, 4.0 );
* // returns -1.2<|fim▁hole|>*
* @example
* var v = kurtosis( 1.0, -0.1 );
* // returns NaN
*
* @example
* var v = kurtosis( 2.0, NaN );
* // returns NaN
*
* @example
* var v = kurtosis( NaN, 2.0 );
* // returns NaN
*/
declare function kurtosis( a: number, b: number ): number;
// EXPORTS //
export = kurtosis;<|fim▁end|> | |
<|file_name|>debuggable_http_response.py<|end_file_name|><|fim▁begin|>try:
# Python 3
from http.client import HTTPResponse, IncompleteRead
str_cls = str
except (ImportError):
# Python 2
from httplib import HTTPResponse, IncompleteRead
str_cls = unicode
from ..console_write import console_write
class DebuggableHTTPResponse(HTTPResponse):
"""
A custom HTTPResponse that formats debugging info for Sublime Text
"""
_debug_protocol = 'HTTP'
def __init__(self, sock, debuglevel=0, method=None, **kwargs):
# We have to use a positive debuglevel to get it passed to here,
# however we don't want to use it because by default debugging prints
# to the stdout and we can't capture it, so we use a special -1 value
if debuglevel == 5:
debuglevel = -1
HTTPResponse.__init__(self, sock, debuglevel=debuglevel, method=method)
def begin(self):
return_value = HTTPResponse.begin(self)
if self.debuglevel == -1:
# Python 2
if hasattr(self.msg, 'headers'):
headers = [line.rstrip() for line in self.msg.headers]
# Python 3
else:
headers = []
for header in self.msg:
headers.append("%s: %s" % (header, self.msg[header]))
versions = {<|fim▁hole|> 10: u'HTTP/1.0',
11: u'HTTP/1.1'
}
status_line = u'%s %s %s' % (versions[self.version], str_cls(self.status), self.reason)
headers.insert(0, status_line)
indented_headers = u'\n '.join(headers)
console_write(
u'''
Urllib %s Debug Read
%s
''',
(self._debug_protocol, indented_headers)
)
return return_value
def is_keep_alive(self):
# Python 2
if hasattr(self.msg, 'headers'):
connection = self.msg.getheader('connection')
# Python 3
else:
connection = self.msg['connection']
if connection and connection.lower() == 'keep-alive':
return True
return False
def read(self, *args):
try:
return HTTPResponse.read(self, *args)
except (IncompleteRead) as e:
return e.partial<|fim▁end|> | 9: u'HTTP/0.9', |
<|file_name|>demo.js<|end_file_name|><|fim▁begin|>FusionCharts.ready(function () {
var gradientCheckBox = document.getElementById('useGradient');
//Set event listener for radio button
if (gradientCheckBox.addEventListener) {
gradientCheckBox.addEventListener("click", changeGradient);
}
function changeGradient(evt, obj) {
//Set gradient fill for chart using usePlotGradientColor attribute
(gradientCheckBox.checked) ?revenueChart.setChartAttribute('usePlotGradientColor', 1) : revenueChart.setChartAttribute('usePlotGradientColor', 0);
};
var revenueChart = new FusionCharts({
type: 'column2d',
renderAt: 'chart-container',
width: '400',
height: '300',
dataFormat: 'json',
dataSource: {
"chart": {
"caption": "Quarterly Revenue",
"subCaption": "Last year",
"xAxisName": "Quarter",
"yAxisName": "Amount (In USD)",
"theme": "fint",
"numberPrefix": "$",
//Removing default gradient fill from columns
"usePlotGradientColor": "1"
},
"data": [{
"label": "Q1",
"value": "1950000",
"color": "#008ee4"
}, {
"label": "Q2",
"value": "1450000",
"color": "#9b59b6"
}, {
"label": "Q3",
"value": "1730000",
"color": "#6baa01"
}, {
"label": "Q4",
"value": "2120000",
"color": "#e44a00"
}]
}
}).render();<|fim▁hole|><|fim▁end|> |
}); |
<|file_name|>SecureStoreManager.java<|end_file_name|><|fim▁begin|>/*
* This file is part of the QuickServer library
* Copyright (C) QuickServer.org
*
* Use, modification, copying and distribution of this software is subject to
* the terms and conditions of the GNU Lesser General Public License.
* You should have received a copy of the GNU LGP License along with this
* library; if not, you can download a copy from <http://www.quickserver.org/>.
*
* For questions, suggestions, bug-reports, enhancement-requests etc.
* visit http://www.quickserver.org
*
*/
package org.quickserver.security;
import java.io.*;
import java.util.logging.*;
import org.quickserver.util.xmlreader.*;
import org.quickserver.util.io.*;
import javax.net.ssl.*;
import java.security.*;
import org.quickserver.swing.*;
/**<|fim▁hole|> * in <secure-store> to set new manger to load your SecureStore. This
* class can be overridden to change the way QuickServer configures the
* secure mode.
* @see org.quickserver.util.xmlreader.SecureStore
* @author Akshathkumar Shetty
* @since 1.4
*/
public class SecureStoreManager {
private static Logger logger = Logger.getLogger(
SecureStoreManager.class.getName());
private SensitiveInput sensitiveInput = null;
/**
* Loads KeyManagers. KeyManagers are responsible for managing
* the key material which is used to authenticate the local
* SSLSocket to its peer. Can return null.
*/
public KeyManager[] loadKeyManagers(QuickServerConfig config)
throws GeneralSecurityException, IOException {
Secure secure = config.getSecure();
SecureStore secureStore = secure.getSecureStore();
if(secureStore==null) {
logger.fine("SecureStore configuration not set! "+
"So returning null for KeyManager");
return null;
}
KeyStoreInfo keyStoreInfo = secureStore.getKeyStoreInfo();
if(keyStoreInfo==null) {
logger.fine("KeyStoreInfo configuration not set! "+
"So returning null for KeyManager");
return null;
}
logger.finest("Loading KeyManagers");
KeyStore ks = getKeyStoreForKey(secureStore.getType(),
secureStore.getProvider());
logger.info("KeyManager Provider: "+ks.getProvider());
char storepass[] = null;
if(keyStoreInfo.getStorePassword()!=null) {
logger.finest("KeyStore: Store password was present!");
storepass = keyStoreInfo.getStorePassword().toCharArray();
} else {
logger.finest("KeyStore: Store password was not set.. so asking!");
if(sensitiveInput==null) {
sensitiveInput = new SensitiveInput(config.getName()+" - Input Prompt");
}
storepass = sensitiveInput.getInput("Store password for KeyStore");
if(storepass==null) {
logger.finest("No password entered.. will pass null");
}
}
InputStream keyStoreStream = null;
try {
if(keyStoreInfo.getStoreFile().equalsIgnoreCase("none")==false) {
logger.finest("KeyStore location: "+
ConfigReader.makeAbsoluteToConfig(keyStoreInfo.getStoreFile(),
config));
keyStoreStream = new FileInputStream(
ConfigReader.makeAbsoluteToConfig(keyStoreInfo.getStoreFile(),
config));
}
ks.load(keyStoreStream, storepass);
logger.finest("KeyStore loaded");
} finally {
if(keyStoreStream != null) {
keyStoreStream.close();
keyStoreStream = null;
}
}
char keypass[] = null;
if(keyStoreInfo.getKeyPassword()!=null) {
logger.finest("KeyStore: key password was present!");
keypass = keyStoreInfo.getKeyPassword().toCharArray();
} else {
logger.finest("KeyStore: Key password was not set.. so asking!");
if(sensitiveInput==null) {
sensitiveInput = new SensitiveInput(config.getName()+" - Input Prompt");
}
keypass = sensitiveInput.getInput("Key password for KeyStore");
if(keypass==null) {
logger.finest("No password entered.. will pass blank");
keypass = "".toCharArray();
}
}
KeyManagerFactory kmf = KeyManagerFactory.getInstance(
secureStore.getAlgorithm());
kmf.init(ks, keypass);
storepass = " ".toCharArray();
storepass = null;
keypass = " ".toCharArray();
keypass = null;
return kmf.getKeyManagers();
}
/**
* Loads TrustManagers. TrustManagers are responsible for managing the
* trust material that is used when making trust decisions, and for
* deciding whether credentials presented by a peer should be accepted.
* Can return null.
*/
public TrustManager[] loadTrustManagers(QuickServerConfig config)
throws GeneralSecurityException, IOException {
Secure secure = config.getSecure();
SecureStore secureStore = secure.getSecureStore();
TrustStoreInfo trustStoreInfo = secureStore.getTrustStoreInfo();
if(trustStoreInfo==null) {
return null;
}
logger.finest("Loading TrustManagers");
String type = null;
if(trustStoreInfo.getType()!=null && trustStoreInfo.getType().trim().length()!=0)
type = trustStoreInfo.getType();
else
type = secureStore.getType();
String provider = null;
if(trustStoreInfo.getProvider()!=null && trustStoreInfo.getProvider().trim().length()!=0)
provider = trustStoreInfo.getProvider();
else
provider = secureStore.getProvider();
KeyStore ts = getKeyStoreForTrust(type, provider);
char trustpass[] = null;
if(trustStoreInfo.getStorePassword()!=null) {
logger.finest("TrustStore: Store password was present!");
trustpass = trustStoreInfo.getStorePassword().toCharArray();
} else {
logger.finest("TrustStore: Store password was not set.. so asking!");
if(sensitiveInput==null) {
sensitiveInput = new SensitiveInput(config.getName()+" - Input Prompt");
}
trustpass = sensitiveInput.getInput("Store password for TrustStore");
if(trustpass==null) {
logger.finest("No password entered.. will pass null");
}
}
InputStream trustStoreStream = null;
try {
if(trustStoreInfo.getStoreFile().equalsIgnoreCase("none")==false) {
logger.finest("TrustStore location: "+
ConfigReader.makeAbsoluteToConfig(
trustStoreInfo.getStoreFile(), config));
trustStoreStream = new FileInputStream(
ConfigReader.makeAbsoluteToConfig(
trustStoreInfo.getStoreFile(), config));
}
ts.load(trustStoreStream, trustpass);
logger.finest("TrustStore loaded");
} finally {
if(trustStoreStream!=null) {
trustStoreStream.close();
trustStoreStream = null;
}
}
TrustManagerFactory tmf = TrustManagerFactory.getInstance(
secureStore.getAlgorithm());
tmf.init(ts);
return tmf.getTrustManagers();
}
/**
* Generates a SSLContext object that implements the specified secure
* socket protocol.
*/
public SSLContext getSSLContext(String protocol)
throws NoSuchAlgorithmException {
return SSLContext.getInstance(protocol);
}
public SSLContext getSSLContext(QuickServerConfig config)
throws NoSuchAlgorithmException, NoSuchProviderException {
if(config.getSecure().getSecureStore().getProvider()!=null) {
return SSLContext.getInstance(
config.getSecure().getProtocol(),
config.getSecure().getSecureStore().getProvider());
} else {
return SSLContext.getInstance(config.getSecure().getProtocol());
}
}
/**
* Generates a keystore object for the specified keystore type from
* the specified provider to be used for loading/storeing keys.
* @param type the type of keystore
* @param provider the name of the provider if <code>null</code> any
* provider package that implements this type of key may be given based
* on the priority.
*/
protected KeyStore getKeyStoreForKey(String type, String provider)
throws KeyStoreException, NoSuchProviderException {
if(provider==null)
return KeyStore.getInstance(type);
return KeyStore.getInstance(type, provider);
}
/**
* Generates a keystore object for the specified keystore type from
* the specified provider to be used for loading/storing trusted
* keys/certificates.
* @param type the type of keystore
* @param provider the name of the provider if <code>null</code> any
* provider package that implements this type of key may be given based
* on the priority.
*/
protected KeyStore getKeyStoreForTrust(String type, String provider)
throws KeyStoreException, NoSuchProviderException {
if(provider==null)
return KeyStore.getInstance(type);
return KeyStore.getInstance(type, provider);
}
/**
* Returns a SSLSocketFactory object to be used for creating SSLSockets.
*/
public SSLSocketFactory getSocketFactory(SSLContext context) {
return context.getSocketFactory();
}
/**
* Can be used to log details about the SSLServerSocket used to
* create a secure server [SSL/TLS]. This method can also be
* overridden to change the enabled cipher suites and/or enabled protocols.
*/
public void logSSLServerSocketInfo(SSLServerSocket sslServerSocket) {
if(logger.isLoggable(Level.FINEST)==false) {
return;
}
logger.finest("SecureServer Info: ClientAuth: "+
sslServerSocket.getNeedClientAuth());
logger.finest("SecureServer Info: ClientMode: "+
sslServerSocket.getUseClientMode());
String supportedSuites[] = sslServerSocket.getSupportedCipherSuites();
logger.finest("SecureServer Info: Supported Cipher Suites --------");
for(int i=0;i<supportedSuites.length;i++)
logger.finest(supportedSuites[i]);
logger.finest("---------------------------------------------------");
String enabledSuites[] = sslServerSocket.getEnabledCipherSuites();
logger.finest("SecureServer Info: Enabled Cipher Suites ----------");
for(int i=0;i<enabledSuites.length;i++)
logger.finest(enabledSuites[i]);
logger.finest("---------------------------------------------------");
String supportedProtocols[] = sslServerSocket.getSupportedProtocols();
logger.finest("SecureServer Info: Supported Protocols ------------");
for(int i=0;i<supportedProtocols.length;i++)
logger.finest(supportedProtocols[i]);
logger.finest("---------------------------------------------------");
String enabledProtocols[] = sslServerSocket.getEnabledProtocols();
logger.finest("SecureServer Info: Enabled Protocols --------------");
for(int i=0;i<enabledProtocols.length;i++)
logger.finest(enabledProtocols[i]);
logger.finest("---------------------------------------------------");
}
}<|fim▁end|> | * Class that loads Key Managers, Trust Managers, SSLContext and other secure
* objects from QuickServer configuration passed. See <secure-store-manager> |
<|file_name|>SpamExtract.java<|end_file_name|><|fim▁begin|>/*
* ***** BEGIN LICENSE BLOCK *****
* Zimbra Collaboration Suite Server
* Copyright (C) 2006, 2007, 2008, 2009, 2010 Zimbra, Inc.
*
* The contents of this file are subject to the Zimbra Public License
* Version 1.3 ("License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
* http://www.zimbra.com/license.
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied.
* ***** END LICENSE BLOCK *****
*/
package com.zimbra.cs.util;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Iterator;
import java.util.Properties;
import javax.mail.BodyPart;
import javax.mail.MessagingException;
import javax.mail.Part;
import javax.mail.Session;
import javax.mail.internet.MimeMessage;
import javax.mail.internet.MimeMultipart;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.httpclient.Cookie;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.HttpException;
import org.apache.commons.httpclient.HttpState;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.httpclient.methods.GetMethod;
import org.apache.commons.httpclient.protocol.Protocol;
import com.zimbra.common.account.Key.AccountBy;
import com.zimbra.common.httpclient.HttpClientUtil;
import com.zimbra.common.localconfig.LC;
import com.zimbra.common.service.ServiceException;
import com.zimbra.common.soap.AccountConstants;
import com.zimbra.common.soap.AdminConstants;
import com.zimbra.common.soap.Element;<|fim▁hole|>import com.zimbra.common.util.ByteUtil;
import com.zimbra.common.util.CliUtil;
import com.zimbra.common.util.Log;
import com.zimbra.common.util.LogFactory;
import com.zimbra.common.util.ZimbraCookie;
import com.zimbra.common.zmime.ZMimeMessage;
import com.zimbra.common.zmime.ZSharedFileInputStream;
import com.zimbra.cs.account.Account;
import com.zimbra.cs.account.Config;
import com.zimbra.cs.account.Provisioning;
import com.zimbra.cs.account.Server;
import com.zimbra.cs.service.mail.ItemAction;
public class SpamExtract {
private static Log mLog = LogFactory.getLog(SpamExtract.class);
private static Options mOptions = new Options();
static {
mOptions.addOption("s", "spam", false, "extract messages from configured spam mailbox");
mOptions.addOption("n", "notspam", false, "extract messages from configured notspam mailbox");
mOptions.addOption("m", "mailbox", true, "extract messages from specified mailbox");
mOptions.addOption("d", "delete", false, "delete extracted messages (default is to keep)");
mOptions.addOption("o", "outdir", true, "directory to store extracted messages");
mOptions.addOption("a", "admin", true, "admin user name for auth (default is zimbra_ldap_userdn)");
mOptions.addOption("p", "password", true, "admin password for auth (default is zimbra_ldap_password)");
mOptions.addOption("u", "url", true, "admin SOAP service url (default is target mailbox's server's admin service port)");
mOptions.addOption("q", "query", true, "search query whose results should be extracted (default is in:inbox)");
mOptions.addOption("r", "raw", false, "extract raw message (default: gets message/rfc822 attachments)");
mOptions.addOption("h", "help", false, "show this usage text");
mOptions.addOption("D", "debug", false, "enable debug level logging");
mOptions.addOption("v", "verbose", false, "be verbose while running");
}
private static void usage(String errmsg) {
if (errmsg != null) {
mLog.error(errmsg);
}
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("zmspamextract [options] ",
"where [options] are one of:", mOptions,
"SpamExtract retrieve messages that may have been marked as spam or not spam in the Zimbra Web Client.");
System.exit((errmsg == null) ? 0 : 1);
}
private static CommandLine parseArgs(String args[]) {
CommandLineParser parser = new GnuParser();
CommandLine cl = null;
try {
cl = parser.parse(mOptions, args);
} catch (ParseException pe) {
usage(pe.getMessage());
}
if (cl.hasOption("h")) {
usage(null);
}
return cl;
}
private static boolean mVerbose = false;
public static void main(String[] args) throws ServiceException, HttpException, SoapFaultException, IOException {
CommandLine cl = parseArgs(args);
if (cl.hasOption('D')) {
CliUtil.toolSetup("DEBUG");
} else {
CliUtil.toolSetup("INFO");
}
if (cl.hasOption('v')) {
mVerbose = true;
}
boolean optDelete = cl.hasOption('d');
if (!cl.hasOption('o')) {
usage("must specify directory to extract messages to");
}
String optDirectory = cl.getOptionValue('o');
File outputDirectory = new File(optDirectory);
if (!outputDirectory.exists()) {
mLog.info("Creating directory: " + optDirectory);
outputDirectory.mkdirs();
if (!outputDirectory.exists()) {
mLog.error("could not create directory " + optDirectory);
System.exit(2);
}
}
String optAdminUser;
if (cl.hasOption('a')) {
optAdminUser = cl.getOptionValue('a');
} else {
optAdminUser = LC.zimbra_ldap_user.value();
}
String optAdminPassword;
if (cl.hasOption('p')) {
optAdminPassword = cl.getOptionValue('p');
} else {
optAdminPassword = LC.zimbra_ldap_password.value();
}
String optQuery = "in:inbox";
if (cl.hasOption('q')) {
optQuery = cl.getOptionValue('q');
}
Account account = getAccount(cl);
if (account == null) {
System.exit(1);
}
boolean optRaw = cl.hasOption('r');
if (mVerbose) mLog.info("Extracting from account " + account.getName());
Server server = Provisioning.getInstance().getServer(account);
String optAdminURL;
if (cl.hasOption('u')) {
optAdminURL = cl.getOptionValue('u');
} else {
optAdminURL = getSoapURL(server, true);
}
String adminAuthToken = getAdminAuthToken(optAdminURL, optAdminUser, optAdminPassword);
String authToken = getDelegateAuthToken(optAdminURL, account, adminAuthToken);
extract(authToken, account, server, optQuery, outputDirectory, optDelete, optRaw);
}
public static final String TYPE_MESSAGE = "message";
private static void extract(String authToken, Account account, Server server, String query, File outdir, boolean delete, boolean raw) throws ServiceException, HttpException, SoapFaultException, IOException {
String soapURL = getSoapURL(server, false);
URL restURL = getServerURL(server, false);
HttpClient hc = new HttpClient(); // CLI only, don't need conn mgr
HttpState state = new HttpState();
GetMethod gm = new GetMethod();
gm.setFollowRedirects(true);
Cookie authCookie = new Cookie(restURL.getHost(), ZimbraCookie.COOKIE_ZM_AUTH_TOKEN, authToken, "/", -1, false);
state.addCookie(authCookie);
hc.setState(state);
hc.getHostConfiguration().setHost(restURL.getHost(), restURL.getPort(), Protocol.getProtocol(restURL.getProtocol()));
gm.getParams().setSoTimeout(60000);
if (mVerbose) mLog.info("Mailbox requests to: " + restURL);
SoapHttpTransport transport = new SoapHttpTransport(soapURL);
transport.setRetryCount(1);
transport.setTimeout(0);
transport.setAuthToken(authToken);
int totalProcessed = 0;
boolean haveMore = true;
int offset = 0;
while (haveMore) {
Element searchReq = new Element.XMLElement(MailConstants.SEARCH_REQUEST);
searchReq.addElement(MailConstants.A_QUERY).setText(query);
searchReq.addAttribute(MailConstants.A_SEARCH_TYPES, TYPE_MESSAGE);
searchReq.addAttribute(MailConstants.A_QUERY_OFFSET, offset);
try {
if (mLog.isDebugEnabled()) mLog.debug(searchReq.prettyPrint());
Element searchResp = transport.invoke(searchReq, false, true, account.getId());
if (mLog.isDebugEnabled()) mLog.debug(searchResp.prettyPrint());
StringBuilder deleteList = new StringBuilder();
for (Iterator<Element> iter = searchResp.elementIterator(MailConstants.E_MSG); iter.hasNext();) {
offset++;
Element e = iter.next();
String mid = e.getAttribute(MailConstants.A_ID);
if (mid == null) {
mLog.warn("null message id SOAP response");
continue;
}
String path = "/service/user/" + account.getName() + "/?id=" + mid;
if (extractMessage(hc, gm, path, outdir, raw)) {
deleteList.append(mid).append(',');
}
totalProcessed++;
}
haveMore = false;
String more = searchResp.getAttribute(MailConstants.A_QUERY_MORE);
if (more != null && more.length() > 0) {
try {
int m = Integer.parseInt(more);
if (m > 0) {
haveMore = true;
}
} catch (NumberFormatException nfe) {
mLog.warn("more flag from server not a number: " + more, nfe);
}
}
if (delete && deleteList.length() > 0) {
deleteList.deleteCharAt(deleteList.length()-1); // -1 removes trailing comma
Element msgActionReq = new Element.XMLElement(MailConstants.MSG_ACTION_REQUEST);
Element action = msgActionReq.addElement(MailConstants.E_ACTION);
action.addAttribute(MailConstants.A_ID, deleteList.toString());
action.addAttribute(MailConstants.A_OPERATION, ItemAction.OP_HARD_DELETE);
if (mLog.isDebugEnabled()) mLog.debug(msgActionReq.prettyPrint());
Element msgActionResp = transport.invoke(msgActionReq, false, true, account.getId());
if (mLog.isDebugEnabled()) mLog.debug(msgActionResp.prettyPrint());
}
} finally {
gm.releaseConnection();
}
}
mLog.info("Total messages processed: " + totalProcessed);
}
private static Session mJMSession;
private static String mOutputPrefix;
static {
Properties props = new Properties();
props.setProperty("mail.mime.address.strict", "false");
mJMSession = Session.getInstance(props);
mOutputPrefix = Long.toHexString(System.currentTimeMillis());
}
private static boolean extractMessage(HttpClient hc, GetMethod gm, String path, File outdir, boolean raw) {
try {
extractMessage0(hc, gm, path, outdir, raw);
return true;
} catch (MessagingException me) {
mLog.warn("exception occurred fetching message", me);
} catch (IOException ioe) {
mLog.warn("exception occurred fetching message", ioe);
}
return false;
}
private static int mExtractIndex;
private static final int MAX_BUFFER_SIZE = 10 * 1024 * 1024;
private static void extractMessage0(HttpClient hc, GetMethod gm, String path, File outdir, boolean raw) throws IOException, MessagingException {
gm.setPath(path);
if (mLog.isDebugEnabled()) mLog.debug("Fetching " + path);
HttpClientUtil.executeMethod(hc, gm);
if (gm.getStatusCode() != HttpStatus.SC_OK) {
throw new IOException("HTTP GET failed: " + gm.getPath() + ": " + gm.getStatusCode() + ": " + gm.getStatusText());
}
if (raw) {
// Write the message as-is.
File file = new File(outdir, mOutputPrefix + "-" + mExtractIndex++);
OutputStream os = null;
try {
os = new BufferedOutputStream(new FileOutputStream(file));
ByteUtil.copy(gm.getResponseBodyAsStream(), true, os, false);
if (mVerbose) mLog.info("Wrote: " + file);
} catch (java.io.IOException e) {
String fileName = outdir + "/" + mOutputPrefix + "-" + mExtractIndex;
mLog.error("Cannot write to " + fileName, e);
} finally {
if (os != null)
os.close();
}
return;
}
// Write the attached message to the output directory.
BufferStream buffer = new BufferStream(gm.getResponseContentLength(), MAX_BUFFER_SIZE);
buffer.setSequenced(false);
MimeMessage mm = null;
InputStream fis = null;
try {
ByteUtil.copy(gm.getResponseBodyAsStream(), true, buffer, false);
if (buffer.isSpooled()) {
fis = new ZSharedFileInputStream(buffer.getFile());
mm = new ZMimeMessage(mJMSession, fis);
} else {
mm = new ZMimeMessage(mJMSession, buffer.getInputStream());
}
writeAttachedMessages(mm, outdir, gm.getPath());
} finally {
ByteUtil.closeStream(fis);
}
}
private static void writeAttachedMessages(MimeMessage mm, File outdir, String msgUri)
throws IOException, MessagingException {
// Not raw - ignore the spam report and extract messages that are in attachments...
if (!(mm.getContent() instanceof MimeMultipart)) {
mLog.warn("Spam/notspam messages must have attachments (skipping " + msgUri + ")");
return;
}
MimeMultipart mmp = (MimeMultipart)mm.getContent();
int nAttachments = mmp.getCount();
boolean foundAtleastOneAttachedMessage = false;
for (int i = 0; i < nAttachments; i++) {
BodyPart bp = mmp.getBodyPart(i);
if (!bp.isMimeType("message/rfc822")) {
// Let's ignore all parts that are not messages.
continue;
}
foundAtleastOneAttachedMessage = true;
Part msg = (Part) bp.getContent(); // the actual message
File file = new File(outdir, mOutputPrefix + "-" + mExtractIndex++);
OutputStream os = null;
try {
os = new BufferedOutputStream(new FileOutputStream(file));
msg.writeTo(os);
} finally {
os.close();
}
if (mVerbose) mLog.info("Wrote: " + file);
}
if (!foundAtleastOneAttachedMessage) {
String msgid = mm.getHeader("Message-ID", " ");
mLog.warn("message uri=" + msgUri + " message-id=" + msgid + " had no attachments");
}
}
public static URL getServerURL(Server server, boolean admin) throws ServiceException {
String host = server.getAttr(Provisioning.A_zimbraServiceHostname);
if (host == null) {
throw ServiceException.FAILURE("invalid " + Provisioning.A_zimbraServiceHostname + " in server " + server.getName(), null);
}
String protocol = "http";
String portAttr = Provisioning.A_zimbraMailPort;
if (admin) {
protocol = "https";
portAttr = Provisioning.A_zimbraAdminPort;
} else {
String mode = server.getAttr(Provisioning.A_zimbraMailMode);
if (mode == null) {
throw ServiceException.FAILURE("null " + Provisioning.A_zimbraMailMode + " in server " + server.getName(), null);
}
if (mode.equalsIgnoreCase("https")) {
protocol = "https";
portAttr = Provisioning.A_zimbraMailSSLPort;
}
if (mode.equalsIgnoreCase("redirect")) {
protocol = "https";
portAttr = Provisioning.A_zimbraMailSSLPort;
}
}
int port = server.getIntAttr(portAttr, -1);
if (port < 1) {
throw ServiceException.FAILURE("invalid " + portAttr + " in server " + server.getName(), null);
}
try {
return new URL(protocol, host, port, "");
} catch (MalformedURLException mue) {
throw ServiceException.FAILURE("exception creating url (protocol=" + protocol + " host=" + host + " port=" + port + ")", mue);
}
}
public static String getSoapURL(Server server, boolean admin) throws ServiceException {
String url = getServerURL(server, admin).toString();
String file = admin ? AdminConstants.ADMIN_SERVICE_URI : AccountConstants.USER_SERVICE_URI;
return url + file;
}
public static String getAdminAuthToken(String adminURL, String adminUser, String adminPassword) throws ServiceException {
SoapHttpTransport transport = new SoapHttpTransport(adminURL);
transport.setRetryCount(1);
transport.setTimeout(0);
Element authReq = new Element.XMLElement(AdminConstants.AUTH_REQUEST);
authReq.addAttribute(AdminConstants.E_NAME, adminUser, Element.Disposition.CONTENT);
authReq.addAttribute(AdminConstants.E_PASSWORD, adminPassword, Element.Disposition.CONTENT);
try {
if (mVerbose) mLog.info("Auth request to: " + adminURL);
if (mLog.isDebugEnabled()) mLog.debug(authReq.prettyPrint());
Element authResp = transport.invokeWithoutSession(authReq);
if (mLog.isDebugEnabled()) mLog.debug(authResp.prettyPrint());
String authToken = authResp.getAttribute(AdminConstants.E_AUTH_TOKEN);
return authToken;
} catch (Exception e) {
throw ServiceException.FAILURE("admin auth failed url=" + adminURL, e);
}
}
public static String getDelegateAuthToken(String adminURL, Account account, String adminAuthToken) throws ServiceException {
SoapHttpTransport transport = new SoapHttpTransport(adminURL);
transport.setRetryCount(1);
transport.setTimeout(0);
transport.setAuthToken(adminAuthToken);
Element daReq = new Element.XMLElement(AdminConstants.DELEGATE_AUTH_REQUEST);
Element acctElem = daReq.addElement(AdminConstants.E_ACCOUNT);
acctElem.addAttribute(AdminConstants.A_BY, AdminConstants.BY_ID);
acctElem.setText(account.getId());
try {
if (mVerbose) mLog.info("Delegate auth request to: " + adminURL);
if (mLog.isDebugEnabled()) mLog.debug(daReq.prettyPrint());
Element daResp = transport.invokeWithoutSession(daReq);
if (mLog.isDebugEnabled()) mLog.debug(daResp.prettyPrint());
String authToken = daResp.getAttribute(AdminConstants.E_AUTH_TOKEN);
return authToken;
} catch (Exception e) {
throw ServiceException.FAILURE("Delegate auth failed url=" + adminURL, e);
}
}
private static Account getAccount(CommandLine cl) throws ServiceException {
Provisioning prov = Provisioning.getInstance();
Config conf;
try {
conf = prov.getConfig();
} catch (ServiceException e) {
throw ServiceException.FAILURE("Unable to connect to LDAP directory", e);
}
String name = null;
if (cl.hasOption('s')) {
if (cl.hasOption('n') || cl.hasOption('m')) {
mLog.error("only one of s, n or m options can be specified");
return null;
}
name = conf.getAttr(Provisioning.A_zimbraSpamIsSpamAccount);
if (name == null || name.length() == 0) {
mLog.error("no account configured for spam");
return null;
}
} else if (cl.hasOption('n')) {
if (cl.hasOption('m')) {
mLog.error("only one of s, n, or m options can be specified");
return null;
}
name = conf.getAttr(Provisioning.A_zimbraSpamIsNotSpamAccount);
if (name == null || name.length() == 0) {
mLog.error("no account configured for ham");
return null;
}
} else if (cl.hasOption('m')) {
name = cl.getOptionValue('m');
if (name.length() == 0) {
mLog.error("illegal argument to m option");
return null;
}
} else {
mLog.error("one of s, n or m options must be specified");
return null;
}
Account account = prov.get(AccountBy.name, name);
if (account == null) {
mLog.error("can not find account " + name);
return null;
}
return account;
}
}<|fim▁end|> | import com.zimbra.common.soap.MailConstants;
import com.zimbra.common.soap.SoapFaultException;
import com.zimbra.common.soap.SoapHttpTransport;
import com.zimbra.common.util.BufferStream; |
<|file_name|>ApplicationRoles.java<|end_file_name|><|fim▁begin|>package br.eti.arthurgregorio.fulljeearch.domain.security;
/**<|fim▁hole|>public interface ApplicationRoles {
public final String USER = "Usuario";
public final String ADMINISTRATOR = "Administrador";
}<|fim▁end|> | *
* @author Arthur
*/ |
<|file_name|>providerbootstrapper.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# providerbootstrapper.py
# Copyright (C) 2013 LEAP
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Provider bootstrapping
"""
import logging
import socket
import os
import sys
import requests
from leap.bitmask import provider
from leap.bitmask import util
from leap.bitmask.config import flags
from leap.bitmask.config.providerconfig import ProviderConfig, MissingCACert
from leap.bitmask.provider import get_provider_path
from leap.bitmask.provider.pinned import PinnedProviders
from leap.bitmask.services.abstractbootstrapper import AbstractBootstrapper
from leap.bitmask.util.constants import REQUEST_TIMEOUT
from leap.bitmask.util.request_helpers import get_content
from leap.common import ca_bundle
from leap.common.certs import get_digest
from leap.common.check import leap_assert, leap_assert_type, leap_check
from leap.common.files import check_and_fix_urw_only, get_mtime, mkdir_p
logger = logging.getLogger(__name__)
class UnsupportedProviderAPI(Exception):
"""
Raised when attempting to use a provider with an incompatible API.
"""
pass
class UnsupportedClientVersionError(Exception):
"""
Raised when attempting to use a provider with an older
client than supported.
"""
pass
class WrongFingerprint(Exception):
"""
Raised when a fingerprint comparison does not match.
"""
pass
class ProviderBootstrapper(AbstractBootstrapper):
"""
Given a provider URL performs a series of checks and emits signals
after they are passed.
If a check fails, the subsequent checks are not executed
"""
MIN_CLIENT_VERSION = 'x-minimum-client-version'
def __init__(self, signaler=None, bypass_checks=False):
"""
Constructor for provider bootstrapper object
:param signaler: Signaler object used to receive notifications
from the backend
:type signaler: Signaler
:param bypass_checks: Set to true if the app should bypass
first round of checks for CA
certificates at bootstrap
:type bypass_checks: bool
"""
AbstractBootstrapper.__init__(self, signaler, bypass_checks)
self._domain = None
self._provider_config = None
self._download_if_needed = False
if signaler is not None:
self._cancel_signal = signaler.prov_cancelled_setup
@property
def verify(self):
"""
Verify parameter for requests.
:returns: either False, if checks are skipped, or the
path to the ca bundle.
:rtype: bool or str
"""
if self._bypass_checks:
return False
cert = flags.CA_CERT_FILE
if cert is not None:
verify = cert
else:
verify = ca_bundle.where()
return verify
def _check_name_resolution(self):
"""
Checks that the name resolution for the provider name works
"""
leap_assert(self._domain, "Cannot check DNS without a domain")
logger.debug("Checking name resolution for %r" % (self._domain))
# We don't skip this check, since it's basic for the whole
# system to work
# err --- but we can do it after a failure, to diagnose what went
# wrong. Right now we're just adding connection overhead. -- kali
socket.gethostbyname(self._domain.encode('idna'))
<|fim▁hole|> Checks that https is working and that the provided certificate
checks out
"""
leap_assert(self._domain, "Cannot check HTTPS without a domain")
logger.debug("Checking https for %r" % (self._domain))
# We don't skip this check, since it's basic for the whole
# system to work.
# err --- but we can do it after a failure, to diagnose what went
# wrong. Right now we're just adding connection overhead. -- kali
verify = self.verify
if verify:
verify = self.verify.encode(sys.getfilesystemencoding())
try:
uri = "https://{0}".format(self._domain.encode('idna'))
res = self._session.get(uri, verify=verify,
timeout=REQUEST_TIMEOUT)
res.raise_for_status()
except requests.exceptions.SSLError as exc:
self._err_msg = self.tr("Provider certificate could "
"not be verified")
raise
except Exception as exc:
# XXX careful!. The error might be also a SSL handshake
# timeout error, in which case we should retry a couple of times
# more, for cases where the ssl server gives high latencies.
self._err_msg = self.tr("Provider does not support HTTPS")
raise
def _download_provider_info(self, *args):
"""
Downloads the provider.json defition
"""
leap_assert(self._domain,
"Cannot download provider info without a domain")
logger.debug("Downloading provider info for %r" % (self._domain))
# --------------------------------------------------------------
# TODO factor out with the download routines in services.
# Watch out! We're handling the verify paramenter differently here.
headers = {}
domain = self._domain.encode(sys.getfilesystemencoding())
provider_json = os.path.join(util.get_path_prefix(),
get_provider_path(domain))
if domain in PinnedProviders.domains() and \
not os.path.exists(provider_json):
mkdir_p(os.path.join(os.path.dirname(provider_json),
"keys", "ca"))
cacert = os.path.join(os.path.dirname(provider_json),
"keys", "ca", "cacert.pem")
PinnedProviders.save_hardcoded(domain, provider_json, cacert)
mtime = get_mtime(provider_json)
if self._download_if_needed and mtime:
headers['if-modified-since'] = mtime
uri = "https://%s/%s" % (self._domain, "provider.json")
verify = self.verify
if mtime: # the provider.json exists
# So, we're getting it from the api.* and checking against
# the provider ca.
try:
provider_config = ProviderConfig()
provider_config.load(provider_json)
uri = provider_config.get_api_uri() + '/provider.json'
verify = provider_config.get_ca_cert_path()
except MissingCACert:
# no ca? then download from main domain again.
pass
if verify:
verify = verify.encode(sys.getfilesystemencoding())
logger.debug("Requesting for provider.json... "
"uri: {0}, verify: {1}, headers: {2}".format(
uri, verify, headers))
res = self._session.get(uri.encode('idna'), verify=verify,
headers=headers, timeout=REQUEST_TIMEOUT)
res.raise_for_status()
logger.debug("Request status code: {0}".format(res.status_code))
min_client_version = res.headers.get(self.MIN_CLIENT_VERSION, '0')
# Not modified
if res.status_code == 304:
logger.debug("Provider definition has not been modified")
# --------------------------------------------------------------
# end refactor, more or less...
# XXX Watch out, have to check the supported api yet.
else:
if flags.APP_VERSION_CHECK:
# TODO split
if not provider.supports_client(min_client_version):
if self._signaler is not None:
self._signaler.signal(
self._signaler.prov_unsupported_client)
raise UnsupportedClientVersionError()
provider_definition, mtime = get_content(res)
provider_config = ProviderConfig()
provider_config.load(data=provider_definition, mtime=mtime)
provider_config.save(["leap", "providers",
domain, "provider.json"])
if flags.API_VERSION_CHECK:
# TODO split
api_version = provider_config.get_api_version()
if provider.supports_api(api_version):
logger.debug("Provider definition has been modified")
else:
api_supported = ', '.join(provider.SUPPORTED_APIS)
error = ('Unsupported provider API version. '
'Supported versions are: {0}. '
'Found: {1}.').format(api_supported, api_version)
logger.error(error)
if self._signaler is not None:
self._signaler.signal(
self._signaler.prov_unsupported_api)
raise UnsupportedProviderAPI(error)
def run_provider_select_checks(self, domain, download_if_needed=False):
"""
Populates the check queue.
:param domain: domain to check
:type domain: unicode
:param download_if_needed: if True, makes the checks do not
overwrite already downloaded data
:type download_if_needed: bool
"""
leap_assert(domain and len(domain) > 0, "We need a domain!")
self._domain = ProviderConfig.sanitize_path_component(domain)
self._download_if_needed = download_if_needed
name_resolution = None
https_connection = None
down_provider_info = None
if self._signaler is not None:
name_resolution = self._signaler.prov_name_resolution
https_connection = self._signaler.prov_https_connection
down_provider_info = self._signaler.prov_download_provider_info
cb_chain = [
(self._check_name_resolution, name_resolution),
(self._check_https, https_connection),
(self._download_provider_info, down_provider_info)
]
return self.addCallbackChain(cb_chain)
def _should_proceed_cert(self):
"""
Returns False if the certificate already exists for the given
provider. True otherwise
:rtype: bool
"""
leap_assert(self._provider_config, "We need a provider config!")
if not self._download_if_needed:
return True
return not os.path.exists(self._provider_config
.get_ca_cert_path(about_to_download=True))
def _download_ca_cert(self, *args):
"""
Downloads the CA cert that is going to be used for the api URL
"""
# XXX maybe we can skip this step if
# we have a fresh one.
leap_assert(self._provider_config, "Cannot download the ca cert "
"without a provider config!")
logger.debug("Downloading ca cert for %r at %r" %
(self._domain, self._provider_config.get_ca_cert_uri()))
if not self._should_proceed_cert():
check_and_fix_urw_only(
self._provider_config
.get_ca_cert_path(about_to_download=True))
return
res = self._session.get(self._provider_config.get_ca_cert_uri(),
verify=self.verify,
timeout=REQUEST_TIMEOUT)
res.raise_for_status()
cert_path = self._provider_config.get_ca_cert_path(
about_to_download=True)
cert_dir = os.path.dirname(cert_path)
mkdir_p(cert_dir)
with open(cert_path, "w") as f:
f.write(res.content)
check_and_fix_urw_only(cert_path)
def _check_ca_fingerprint(self, *args):
"""
Checks the CA cert fingerprint against the one provided in the
json definition
"""
leap_assert(self._provider_config, "Cannot check the ca cert "
"without a provider config!")
logger.debug("Checking ca fingerprint for %r and cert %r" %
(self._domain,
self._provider_config.get_ca_cert_path()))
if not self._should_proceed_cert():
return
parts = self._provider_config.get_ca_cert_fingerprint().split(":")
error_msg = "Wrong fingerprint format"
leap_check(len(parts) == 2, error_msg, WrongFingerprint)
method = parts[0].strip()
fingerprint = parts[1].strip()
cert_data = None
with open(self._provider_config.get_ca_cert_path()) as f:
cert_data = f.read()
leap_assert(len(cert_data) > 0, "Could not read certificate data")
digest = get_digest(cert_data, method)
error_msg = "Downloaded certificate has a different fingerprint!"
leap_check(digest == fingerprint, error_msg, WrongFingerprint)
def _check_api_certificate(self, *args):
"""
Tries to make an API call with the downloaded cert and checks
if it validates against it
"""
leap_assert(self._provider_config, "Cannot check the ca cert "
"without a provider config!")
logger.debug("Checking api certificate for %s and cert %s" %
(self._provider_config.get_api_uri(),
self._provider_config.get_ca_cert_path()))
if not self._should_proceed_cert():
return
test_uri = "%s/%s/cert" % (self._provider_config.get_api_uri(),
self._provider_config.get_api_version())
ca_cert_path = self._provider_config.get_ca_cert_path()
ca_cert_path = ca_cert_path.encode(sys.getfilesystemencoding())
res = self._session.get(test_uri, verify=ca_cert_path,
timeout=REQUEST_TIMEOUT)
res.raise_for_status()
def run_provider_setup_checks(self,
provider_config,
download_if_needed=False):
"""
Starts the checks needed for a new provider setup.
:param provider_config: Provider configuration
:type provider_config: ProviderConfig
:param download_if_needed: if True, makes the checks do not
overwrite already downloaded data.
:type download_if_needed: bool
"""
leap_assert(provider_config, "We need a provider config!")
leap_assert_type(provider_config, ProviderConfig)
self._provider_config = provider_config
self._download_if_needed = download_if_needed
download_ca_cert = None
check_ca_fingerprint = None
check_api_certificate = None
if self._signaler is not None:
download_ca_cert = self._signaler.prov_download_ca_cert
check_ca_fingerprint = self._signaler.prov_check_ca_fingerprint
check_api_certificate = self._signaler.prov_check_api_certificate
cb_chain = [
(self._download_ca_cert, download_ca_cert),
(self._check_ca_fingerprint, check_ca_fingerprint),
(self._check_api_certificate, check_api_certificate)
]
return self.addCallbackChain(cb_chain)<|fim▁end|> | def _check_https(self, *args):
""" |
<|file_name|>test_basics.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import, print_function, division
from petl.test.helpers import ieq
from petl.util import expr, empty, coalesce
from petl.transform.basics import cut, cat, addfield, rowslice, head, tail, \
cutout, skipcomments, annex, addrownumbers, addcolumn, \
addfieldusingcontext, movefield, stack
def test_cut():
table = (('foo', 'bar', 'baz'),
('A', 1, 2),
('B', '2', '3.4'),
(u'B', u'3', u'7.8', True),
('D', 'xyz', 9.0),
('E', None))
cut1 = cut(table, 'foo')
expectation = (('foo',),
('A',),
('B',),
(u'B',),
('D',),
('E',))
ieq(expectation, cut1)
cut2 = cut(table, 'foo', 'baz')
expectation = (('foo', 'baz'),
('A', 2),
('B', '3.4'),
(u'B', u'7.8'),
('D', 9.0),
('E', None))
ieq(expectation, cut2)
cut3 = cut(table, 0, 2)
expectation = (('foo', 'baz'),
('A', 2),
('B', '3.4'),
(u'B', u'7.8'),
('D', 9.0),
('E', None))
ieq(expectation, cut3)
cut4 = cut(table, 'bar', 0)
expectation = (('bar', 'foo'),
(1, 'A'),
('2', 'B'),
(u'3', u'B'),
('xyz', 'D'),
(None, 'E'))
ieq(expectation, cut4)
cut5 = cut(table, ('foo', 'baz'))
expectation = (('foo', 'baz'),
('A', 2),
('B', '3.4'),
(u'B', u'7.8'),
('D', 9.0),
('E', None))
ieq(expectation, cut5)
def test_cut_empty():
table = (('foo', 'bar'),)
expect = (('bar',),)
actual = cut(table, 'bar')
ieq(expect, actual)
def test_cutout():
table = (('foo', 'bar', 'baz'),
('A', 1, 2),
('B', '2', '3.4'),
(u'B', u'3', u'7.8', True),
('D', 'xyz', 9.0),
('E', None))
cut1 = cutout(table, 'bar', 'baz')
expectation = (('foo',),
('A',),
('B',),
(u'B',),
('D',),
('E',))
ieq(expectation, cut1)
cut2 = cutout(table, 'bar')
expectation = (('foo', 'baz'),
('A', 2),
('B', '3.4'),
(u'B', u'7.8'),
('D', 9.0),
('E', None))
ieq(expectation, cut2)
cut3 = cutout(table, 1)
expectation = (('foo', 'baz'),
('A', 2),
('B', '3.4'),
(u'B', u'7.8'),
('D', 9.0),
('E', None))
ieq(expectation, cut3)
def test_cat():
table1 = (('foo', 'bar'),
(1, 'A'),
(2, 'B'))
table2 = (('bar', 'baz'),
('C', True),
('D', False))
cat1 = cat(table1, table2, missing=None)
expectation = (('foo', 'bar', 'baz'),
(1, 'A', None),
(2, 'B', None),
(None, 'C', True),
(None, 'D', False))
ieq(expectation, cat1)
# how does cat cope with uneven rows?
table3 = (('foo', 'bar', 'baz'),
('A', 1, 2),
('B', '2', '3.4'),
(u'B', u'3', u'7.8', True),
('D', 'xyz', 9.0),
('E', None))
cat3 = cat(table3, missing=None)
expectation = (('foo', 'bar', 'baz'),
('A', 1, 2),
('B', '2', '3.4'),
(u'B', u'3', u'7.8'),
('D', 'xyz', 9.0),
('E', None, None))
ieq(expectation, cat3)
# cat more than two tables?
cat4 = cat(table1, table2, table3)
expectation = (('foo', 'bar', 'baz'),
(1, 'A', None),
(2, 'B', None),
(None, 'C', True),
(None, 'D', False),
('A', 1, 2),
('B', '2', '3.4'),
(u'B', u'3', u'7.8'),<|fim▁hole|> ieq(expectation, cat4)
def test_cat_with_header():
table1 = (('bar', 'foo'),
('A', 1),
('B', 2))
table2 = (('bar', 'baz'),
('C', True),
('D', False))
actual = cat(table1, header=['A', 'foo', 'B', 'bar', 'C'])
expect = (('A', 'foo', 'B', 'bar', 'C'),
(None, 1, None, 'A', None),
(None, 2, None, 'B', None))
ieq(expect, actual)
ieq(expect, actual)
actual = cat(table1, table2, header=['A', 'foo', 'B', 'bar', 'C'])
expect = (('A', 'foo', 'B', 'bar', 'C'),
(None, 1, None, 'A', None),
(None, 2, None, 'B', None),
(None, None, None, 'C', None),
(None, None, None, 'D', None))
ieq(expect, actual)
ieq(expect, actual)
def test_cat_empty():
table1 = (('foo', 'bar'),
(1, 'A'),
(2, 'B'))
table2 = (('bar', 'baz'),)
expect = (('foo', 'bar', 'baz'),
(1, 'A', None),
(2, 'B', None))
actual = cat(table1, table2)
ieq(expect, actual)
def test_cat_dupfields():
table1 = (('foo', 'foo'),
(1, 'A'),
(2,),
(3, 'B', True))
# these cases are pathological, including to confirm expected behaviour,
# but user needs to rename fields to get something sensible
actual = cat(table1)
expect = (('foo', 'foo'),
(1, 1),
(2, 2),
(3, 3))
ieq(expect, actual)
table2 = (('foo', 'foo', 'bar'),
(4, 'C', True),
(5, 'D', False))
actual = cat(table1, table2)
expect = (('foo', 'foo', 'bar'),
(1, 1, None),
(2, 2, None),
(3, 3, None),
(4, 4, True),
(5, 5, False))
ieq(expect, actual)
def test_stack_dupfields():
table1 = (('foo', 'foo'),
(1, 'A'),
(2,),
(3, 'B', True))
actual = stack(table1)
expect = (('foo', 'foo'),
(1, 'A'),
(2, None),
(3, 'B'))
ieq(expect, actual)
table2 = (('foo', 'foo', 'bar'),
(4, 'C', True),
(5, 'D', False))
actual = stack(table1, table2)
expect = (('foo', 'foo'),
(1, 'A'),
(2, None),
(3, 'B'),
(4, 'C'),
(5, 'D'))
ieq(expect, actual)
def test_addfield():
table = (('foo', 'bar'),
('M', 12),
('F', 34),
('-', 56))
result = addfield(table, 'baz', 42)
expectation = (('foo', 'bar', 'baz'),
('M', 12, 42),
('F', 34, 42),
('-', 56, 42))
ieq(expectation, result)
ieq(expectation, result)
result = addfield(table, 'baz', lambda row: '%s,%s' % (row.foo, row.bar))
expectation = (('foo', 'bar', 'baz'),
('M', 12, 'M,12'),
('F', 34, 'F,34'),
('-', 56, '-,56'))
ieq(expectation, result)
ieq(expectation, result)
result = addfield(table, 'baz', lambda rec: rec['bar'] * 2)
expectation = (('foo', 'bar', 'baz'),
('M', 12, 24),
('F', 34, 68),
('-', 56, 112))
ieq(expectation, result)
ieq(expectation, result)
result = addfield(table, 'baz', expr('{bar} * 2'))
expectation = (('foo', 'bar', 'baz'),
('M', 12, 24),
('F', 34, 68),
('-', 56, 112))
ieq(expectation, result)
ieq(expectation, result)
result = addfield(table, 'baz', 42, index=0)
expectation = (('baz', 'foo', 'bar'),
(42, 'M', 12),
(42, 'F', 34),
(42, '-', 56))
ieq(expectation, result)
ieq(expectation, result)
def test_addfield_empty():
table = (('foo', 'bar'),)
expect = (('foo', 'bar', 'baz'),)
actual = addfield(table, 'baz', 42)
ieq(expect, actual)
ieq(expect, actual)
def test_addfield_coalesce():
table = (('foo', 'bar', 'baz', 'quux'),
('M', 12, 23, 44),
('F', None, 23, 11),
('-', None, None, 42))
result = addfield(table, 'spong', coalesce('bar', 'baz', 'quux'))
expect = (('foo', 'bar', 'baz', 'quux', 'spong'),
('M', 12, 23, 44, 12),
('F', None, 23, 11, 23),
('-', None, None, 42, 42))
ieq(expect, result)
ieq(expect, result)
result = addfield(table, 'spong', coalesce(1, 2, 3))
expect = (('foo', 'bar', 'baz', 'quux', 'spong'),
('M', 12, 23, 44, 12),
('F', None, 23, 11, 23),
('-', None, None, 42, 42))
ieq(expect, result)
ieq(expect, result)
def test_addfield_uneven_rows():
table = (('foo', 'bar'),
('M',),
('F', 34),
('-', 56, 'spong'))
result = addfield(table, 'baz', 42)
expectation = (('foo', 'bar', 'baz'),
('M', None, 42),
('F', 34, 42),
('-', 56, 42))
ieq(expectation, result)
ieq(expectation, result)
def test_addfield_dupfield():
table = (('foo', 'foo'),
('M', 12),
('F', 34),
('-', 56))
result = addfield(table, 'bar', 42)
expectation = (('foo', 'foo', 'bar'),
('M', 12, 42),
('F', 34, 42),
('-', 56, 42))
ieq(expectation, result)
ieq(expectation, result)
def test_rowslice():
table = (('foo', 'bar', 'baz'),
('A', 1, 2),
('B', '2', '3.4'),
(u'B', u'3', u'7.8', True),
('D', 'xyz', 9.0),
('E', None))
result = rowslice(table, 2)
expectation = (('foo', 'bar', 'baz'),
('A', 1, 2),
('B', '2', '3.4'))
ieq(expectation, result)
result = rowslice(table, 1, 2)
expectation = (('foo', 'bar', 'baz'),
('B', '2', '3.4'))
ieq(expectation, result)
result = rowslice(table, 1, 5, 2)
expectation = (('foo', 'bar', 'baz'),
('B', '2', '3.4'),
('D', 'xyz', 9.0))
ieq(expectation, result)
def test_rowslice_empty():
table = (('foo', 'bar'),)
expect = (('foo', 'bar'),)
actual = rowslice(table, 1, 2)
ieq(expect, actual)
def test_head():
table1 = (('foo', 'bar'),
('a', 1),
('b', 2),
('c', 5),
('d', 7),
('f', 42),
('f', 3),
('h', 90),
('k', 12),
('l', 77),
('q', 2))
table2 = head(table1, 4)
expect = (('foo', 'bar'),
('a', 1),
('b', 2),
('c', 5),
('d', 7))
ieq(expect, table2)
def test_tail():
table1 = (('foo', 'bar'),
('a', 1),
('b', 2),
('c', 5),
('d', 7),
('f', 42),
('f', 3),
('h', 90),
('k', 12),
('l', 77),
('q', 2))
table2 = tail(table1, 4)
expect = (('foo', 'bar'),
('h', 90),
('k', 12),
('l', 77),
('q', 2))
ieq(expect, table2)
def test_tail_empty():
table = (('foo', 'bar'),)
expect = (('foo', 'bar'),)
actual = tail(table)
ieq(expect, actual)
def test_skipcomments():
table1 = (('##aaa', 'bbb', 'ccc'),
('##mmm',),
('#foo', 'bar'),
('##nnn', 1),
('a', 1),
('b', 2))
table2 = skipcomments(table1, '##')
expect2 = (('#foo', 'bar'),
('a', 1),
('b', 2))
ieq(expect2, table2)
ieq(expect2, table2) # can iterate twice?
def test_skipcomments_empty():
table1 = (('##aaa', 'bbb', 'ccc'),
('##mmm',),
('#foo', 'bar'),
('##nnn', 1))
table2 = skipcomments(table1, '##')
expect2 = (('#foo', 'bar'),)
ieq(expect2, table2)
def test_annex():
table1 = (('foo', 'bar'),
('A', 9),
('C', 2),
('F', 1))
table2 = (('foo', 'baz'),
('B', 3),
('D', 10))
expect = (('foo', 'bar', 'foo', 'baz'),
('A', 9, 'B', 3),
('C', 2, 'D', 10),
('F', 1, None, None))
actual = annex(table1, table2)
ieq(expect, actual)
ieq(expect, actual)
expect21 = (('foo', 'baz', 'foo', 'bar'),
('B', 3, 'A', 9),
('D', 10, 'C', 2),
(None, None, 'F', 1))
actual21 = annex(table2, table1)
ieq(expect21, actual21)
ieq(expect21, actual21)
def test_annex_uneven_rows():
table1 = (('foo', 'bar'),
('A', 9, True),
('C', 2),
('F',))
table2 = (('foo', 'baz'),
('B', 3),
('D', 10))
expect = (('foo', 'bar', 'foo', 'baz'),
('A', 9, 'B', 3),
('C', 2, 'D', 10),
('F', None, None, None))
actual = annex(table1, table2)
ieq(expect, actual)
ieq(expect, actual)
def test_addrownumbers():
table1 = (('foo', 'bar'),
('A', 9),
('C', 2),
('F', 1))
expect = (('row', 'foo', 'bar'),
(1, 'A', 9),
(2, 'C', 2),
(3, 'F', 1))
actual = addrownumbers(table1)
ieq(expect, actual)
ieq(expect, actual)
def test_addcolumn():
table1 = (('foo', 'bar'),
('A', 1),
('B', 2))
col = [True, False]
expect2 = (('foo', 'bar', 'baz'),
('A', 1, True),
('B', 2, False))
table2 = addcolumn(table1, 'baz', col)
ieq(expect2, table2)
ieq(expect2, table2)
# test short column
table3 = (('foo', 'bar'),
('A', 1),
('B', 2),
('C', 2))
expect4 = (('foo', 'bar', 'baz'),
('A', 1, True),
('B', 2, False),
('C', 2, None))
table4 = addcolumn(table3, 'baz', col)
ieq(expect4, table4)
# test short table
col = [True, False, False]
expect5 = (('foo', 'bar', 'baz'),
('A', 1, True),
('B', 2, False),
(None, None, False))
table5 = addcolumn(table1, 'baz', col)
ieq(expect5, table5)
def test_empty_addcolumn():
table1 = empty()
table2 = addcolumn(table1, 'foo', ['A', 'B'])
table3 = addcolumn(table2, 'bar', [1, 2])
expect = (('foo', 'bar'),
('A', 1),
('B', 2))
ieq(expect, table3)
ieq(expect, table3)
def test_addfieldusingcontext():
table1 = (('foo', 'bar'),
('A', 1),
('B', 4),
('C', 5),
('D', 9))
expect = (('foo', 'bar', 'baz', 'quux'),
('A', 1, None, 3),
('B', 4, 3, 1),
('C', 5, 1, 4),
('D', 9, 4, None))
def upstream(prv, cur, nxt):
if prv is None:
return None
else:
return cur.bar - prv.bar
def downstream(prv, cur, nxt):
if nxt is None:
return None
else:
return nxt.bar - cur.bar
table2 = addfieldusingcontext(table1, 'baz', upstream)
table3 = addfieldusingcontext(table2, 'quux', downstream)
ieq(expect, table3)
ieq(expect, table3)
def test_addfieldusingcontext_stateful():
table1 = (('foo', 'bar'),
('A', 1),
('B', 4),
('C', 5),
('D', 9))
expect = (('foo', 'bar', 'baz', 'quux'),
('A', 1, 1, 5),
('B', 4, 5, 10),
('C', 5, 10, 19),
('D', 9, 19, 19))
def upstream(prv, cur, nxt):
if prv is None:
return cur.bar
else:
return cur.bar + prv.baz
def downstream(prv, cur, nxt):
if nxt is None:
return prv.quux
elif prv is None:
return nxt.bar + cur.bar
else:
return nxt.bar + prv.quux
table2 = addfieldusingcontext(table1, 'baz', upstream)
table3 = addfieldusingcontext(table2, 'quux', downstream)
ieq(expect, table3)
ieq(expect, table3)
def test_movefield():
table1 = (('foo', 'bar', 'baz'),
(1, 'A', True),
(2, 'B', False))
expect = (('bar', 'foo', 'baz'),
('A', 1, True),
('B', 2, False))
actual = movefield(table1, 'bar', 0)
ieq(expect, actual)
ieq(expect, actual)
actual = movefield(table1, 'foo', 1)
ieq(expect, actual)
ieq(expect, actual)<|fim▁end|> | ('D', 'xyz', 9.0),
('E', None, None)) |
<|file_name|>AutoFocus.js<|end_file_name|><|fim▁begin|>import React, { PropTypes } from 'react';
class AutoFocus extends React.Component {
constructor( props ) {
super( props );
this.receiveRef = this.receiveRef.bind( this );
}
<|fim▁hole|> if ( this.ref ) {
this.ref.focus();
}
}
receiveRef( node ) {
this.ref = node;
}
render() {
return this.props.children( this.receiveRef );
}
}
AutoFocus.propTypes = {
children: PropTypes.func.isRequired,
};
export default AutoFocus;<|fim▁end|> | componentDidMount() { |
<|file_name|>privateFieldAssignabilityFromUnknown.ts<|end_file_name|><|fim▁begin|>export class Class {
#field: any
}
<|fim▁hole|><|fim▁end|> |
const task: Class = {} as unknown; |
<|file_name|>ExtraJumpPlugin.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2015-2018 Igor Maznitsa.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software<|fim▁hole|> */
package com.igormaznitsa.mindmap.plugins.processors;
import com.igormaznitsa.mindmap.model.Extra;
import com.igormaznitsa.mindmap.model.Topic;
import com.igormaznitsa.mindmap.plugins.PopUpSection;
import com.igormaznitsa.mindmap.plugins.api.AbstractFocusedTopicPlugin;
import com.igormaznitsa.mindmap.plugins.api.ExternallyExecutedPlugin;
import com.igormaznitsa.mindmap.plugins.api.PluginContext;
import com.igormaznitsa.mindmap.swing.panel.Texts;
import com.igormaznitsa.mindmap.swing.services.IconID;
import com.igormaznitsa.mindmap.swing.services.ImageIconServiceProvider;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.swing.Icon;
public class ExtraJumpPlugin extends AbstractFocusedTopicPlugin implements ExternallyExecutedPlugin {
private static final Icon ICO = ImageIconServiceProvider.findInstance().getIconForId(IconID.POPUP_EXTRAS_JUMP);
@Override
public int getOrder() {
return 4;
}
@Override
@Nullable
protected Icon getIcon(@Nonnull final PluginContext contextl, @Nullable final Topic activeTopic) {
return ICO;
}
@Override
@Nonnull
protected String getName(@Nonnull final PluginContext context, @Nullable final Topic activeTopic) {
if (activeTopic == null) {
return "...";
}
return activeTopic.getExtras().containsKey(Extra.ExtraType.TOPIC) ? Texts.getString("MMDGraphEditor.makePopUp.miEditTransition") :
Texts.getString("MMDGraphEditor.makePopUp.miAddTransition");
}
@Override
@Nonnull
public PopUpSection getSection() {
return PopUpSection.EXTRAS;
}
}<|fim▁end|> | * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. |
<|file_name|>cpp_compile.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
from pants.base.build_environment import get_buildroot
from pants.base.workunit import WorkUnit
from pants.util.dirutil import safe_mkdir
from pants.contrib.cpp.tasks.cpp_task import CppTask
class CppCompile(CppTask):
"""Compiles object files from C++ sources."""
@classmethod
def register_options(cls, register):
super(CppCompile, cls).register_options(register)
register('--cc-options',
help='Append these options to the compiler command line.')
register('--cc-extensions',
default=['cc', 'cxx', 'cpp'],
help=('The list of extensions (without the .) to consider when '
'determining if a file is a C++ source file.'))
@classmethod
def product_types(cls):
return ['objs']
@property
def cache_target_dirs(self):
return True
def execute(self):
"""Compile all sources in a given target to object files."""
def is_cc(source):
_, ext = os.path.splitext(source)
return ext[1:] in self.get_options().cc_extensions
targets = self.context.targets(self.is_cpp)
# Compile source files to objects.
with self.invalidated(targets, invalidate_dependents=True) as invalidation_check:
obj_mapping = self.context.products.get('objs')
for vt in invalidation_check.all_vts:
for source in vt.target.sources_relative_to_buildroot():
if is_cc(source):
if not vt.valid:
with self.context.new_workunit(name='cpp-compile', labels=[WorkUnit.MULTITOOL]):
# TODO: Parallelise the compilation.
# TODO: Only recompile source files that have changed since the
# object file was last written. Also use the output from
# gcc -M to track dependencies on headers.
self._compile(vt.target, vt.results_dir, source)
objpath = self._objpath(vt.target, vt.results_dir, source)
obj_mapping.add(vt.target, vt.results_dir).append(objpath)
def _objpath(self, target, results_dir, source):
abs_source_root = os.path.join(get_buildroot(), target.target_base)
abs_source = os.path.join(get_buildroot(), source)
rel_source = os.path.relpath(abs_source, abs_source_root)
root, _ = os.path.splitext(rel_source)
obj_name = root + '.o'
return os.path.join(results_dir, obj_name)
def _compile(self, target, results_dir, source):
"""Compile given source to an object file."""
obj = self._objpath(target, results_dir, source)
abs_source = os.path.join(get_buildroot(), source)
# TODO: include dir should include dependent work dir when headers are copied there.
include_dirs = []
for dep in target.dependencies:
if self.is_library(dep):<|fim▁hole|> cmd = [self.cpp_toolchain.compiler]
cmd.extend(['-c'])
cmd.extend(('-I{0}'.format(i) for i in include_dirs))
cmd.extend(['-o' + obj, abs_source])
if self.get_options().cc_options != None:
cmd.extend([self.get_options().cc_options])
# TODO: submit_async_work with self.run_command, [(cmd)] as a Work object.
with self.context.new_workunit(name='cpp-compile', labels=[WorkUnit.COMPILER]) as workunit:
self.run_command(cmd, workunit)
self.context.log.info('Built c++ object: {0}'.format(obj))<|fim▁end|> | include_dirs.extend([os.path.join(get_buildroot(), dep.target_base)])
|
<|file_name|>email.type.d.ts<|end_file_name|><|fim▁begin|>/**
* Email object for Opening Email Composer
*/
export interface Email {
app?: string;
to?: string | Array<string>;
cc?: string | Array<string>;<|fim▁hole|> body?: string;
isHtml?: boolean;
}<|fim▁end|> | bcc?: string | Array<string>;
attachments?: Array<any>;
subject?: string; |
<|file_name|>test_variable.py<|end_file_name|><|fim▁begin|># Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
class TestVariable(unittest.TestCase):
PROJECT = 'PROJECT'
CONFIG_NAME = 'config_name'
VARIABLE_NAME = 'variable_name'
PATH = 'projects/%s/configs/%s/variables/%s' % (
PROJECT, CONFIG_NAME, VARIABLE_NAME)
@staticmethod
def _get_target_class():
from google.cloud.runtimeconfig.variable import Variable
return Variable
def _make_one(self, *args, **kw):
return self._get_target_class()(*args, **kw)
def _verifyResourceProperties(self, variable, resource):
import base64
from google.cloud._helpers import _rfc3339_to_datetime
if 'name' in resource:
self.assertEqual(variable.full_name, resource['name'])
if 'value' in resource:
self.assertEqual(
variable.value, base64.b64decode(resource['value']))
else:
self.assertIsNone(variable.value)
if 'state' in resource:
self.assertEqual(variable.state, resource['state'])
if 'updateTime' in resource:
self.assertEqual(
variable.update_time,
_rfc3339_to_datetime(resource['updateTime']))
else:
self.assertIsNone(variable.update_time)
def test_ctor(self):
from google.cloud.runtimeconfig.config import Config
client = _Client(project=self.PROJECT)
config = Config(name=self.CONFIG_NAME, client=client)
variable = self._make_one(name=self.VARIABLE_NAME, config=config)
self.assertEqual(variable.name, self.VARIABLE_NAME)
self.assertEqual(variable.full_name, self.PATH)
self.assertEqual(variable.path, '/%s' % (self.PATH,))
self.assertIs(variable.client, client)
def test_ctor_w_no_name(self):
from google.cloud.runtimeconfig.config import Config
client = _Client(project=self.PROJECT)
config = Config(name=self.CONFIG_NAME, client=client)
variable = self._make_one(name=None, config=config)
with self.assertRaises(ValueError):
getattr(variable, 'full_name')
def test_exists_miss_w_bound_client(self):
from google.cloud.runtimeconfig.config import Config
conn = _Connection()
client = _Client(project=self.PROJECT, connection=conn)
config = Config(name=self.CONFIG_NAME, client=client)
variable = self._make_one(name=self.VARIABLE_NAME, config=config)
self.assertFalse(variable.exists())
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req['method'], 'GET')
self.assertEqual(req['path'], '/%s' % (self.PATH,))
self.assertEqual(req['query_params'], {'fields': 'name'})
def test_exists_hit_w_alternate_client(self):
from google.cloud.runtimeconfig.config import Config
conn1 = _Connection()
CLIENT1 = _Client(project=self.PROJECT, connection=conn1)
CONFIG1 = Config(name=self.CONFIG_NAME, client=CLIENT1)
conn2 = _Connection({})
CLIENT2 = _Client(project=self.PROJECT, connection=conn2)
variable = self._make_one(name=self.VARIABLE_NAME, config=CONFIG1)
self.assertTrue(variable.exists(client=CLIENT2))
self.assertEqual(len(conn1._requested), 0)
self.assertEqual(len(conn2._requested), 1)
req = conn2._requested[0]
self.assertEqual(req['method'], 'GET')
self.assertEqual(req['path'], '/%s' % (self.PATH,))
self.assertEqual(req['query_params'], {'fields': 'name'})
def test_reload_w_bound_client(self):
from google.cloud.runtimeconfig.config import Config
RESOURCE = {
'name': self.PATH,
'value': 'bXktdmFyaWFibGUtdmFsdWU=', # base64 my-variable-value
'updateTime': '2016-04-14T21:21:54.5000Z',
'state': 'VARIABLE_STATE_UNSPECIFIED',
}
conn = _Connection(RESOURCE)
client = _Client(project=self.PROJECT, connection=conn)
config = Config(name=self.CONFIG_NAME, client=client)
variable = self._make_one(name=self.VARIABLE_NAME, config=config)
variable.reload()
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req['method'], 'GET')<|fim▁hole|> self.assertEqual(req['path'], '/%s' % (self.PATH,))
self._verifyResourceProperties(variable, RESOURCE)
def test_reload_w_empty_resource(self):
from google.cloud.runtimeconfig.config import Config
RESOURCE = {}
conn = _Connection(RESOURCE)
client = _Client(project=self.PROJECT, connection=conn)
config = Config(name=self.CONFIG_NAME, client=client)
variable = self._make_one(name=self.VARIABLE_NAME, config=config)
variable.reload()
# Name should not be overwritten.
self.assertEqual(self.VARIABLE_NAME, variable.name)
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req['method'], 'GET')
self.assertEqual(req['path'], '/%s' % (self.PATH,))
self._verifyResourceProperties(variable, RESOURCE)
def test_reload_w_alternate_client(self):
from google.cloud.runtimeconfig.config import Config
RESOURCE = {
'name': self.PATH,
'value': 'bXktdmFyaWFibGUtdmFsdWU=', # base64 my-variable-value
'updateTime': '2016-04-14T21:21:54.5000Z',
'state': 'VARIABLE_STATE_UNSPECIFIED',
}
conn1 = _Connection()
CLIENT1 = _Client(project=self.PROJECT, connection=conn1)
CONFIG1 = Config(name=self.CONFIG_NAME, client=CLIENT1)
conn2 = _Connection(RESOURCE)
CLIENT2 = _Client(project=self.PROJECT, connection=conn2)
variable = self._make_one(name=self.VARIABLE_NAME, config=CONFIG1)
variable.reload(client=CLIENT2)
self.assertEqual(len(conn1._requested), 0)
self.assertEqual(len(conn2._requested), 1)
req = conn2._requested[0]
self.assertEqual(req['method'], 'GET')
self.assertEqual(req['path'], '/%s' % (self.PATH,))
self._verifyResourceProperties(variable, RESOURCE)
class _Client(object):
_connection = None
def __init__(self, project, connection=None):
self.project = project
self._connection = connection
class _Connection(object):
def __init__(self, *responses):
self._responses = responses
self._requested = []
def api_request(self, **kw):
from google.cloud.exceptions import NotFound
self._requested.append(kw)
try:
response, self._responses = self._responses[0], self._responses[1:]
except:
raise NotFound('miss')
else:
return response<|fim▁end|> | |
<|file_name|>raw.go<|end_file_name|><|fim▁begin|>package raw
import (
"fmt"
"../../platforms/common"
)
type FieldMacros struct {}
func (FieldMacros) DecodeDW0() {
macro := common.GetMacro()
// Do not decode, print as is.
macro.Add(fmt.Sprintf("0x%0.8x", macro.Register(common.PAD_CFG_DW0).ValueGet()))
}
func (FieldMacros) DecodeDW1() {
macro := common.GetMacro()<|fim▁hole|>}
// GenerateString - generates the entire string of bitfield macros.
func (bitfields FieldMacros) GenerateString() {
macro := common.GetMacro()
macro.Add("_PAD_CFG_STRUCT(").Id().Add(", ")
bitfields.DecodeDW0()
macro.Add(", ")
bitfields.DecodeDW1()
macro.Add("),")
}<|fim▁end|> | // Do not decode, print as is.
macro.Add(fmt.Sprintf("0x%0.8x", macro.Register(common.PAD_CFG_DW1).ValueGet())) |
<|file_name|>provider.py<|end_file_name|><|fim▁begin|>from allauth.socialaccount import providers
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth.provider import OAuthProvider
from allauth.socialaccount.models import SocialApp, SocialToken
import oauth2 as oauth
import urllib, urllib2, json
class TwitterAccount(ProviderAccount):
def get_screen_name(self):
return self.account.extra_data.get('screen_name')
def get_profile_url(self):
ret = None
screen_name = self.get_screen_name()
if screen_name:
ret = 'http://twitter.com/' + screen_name
return ret
def get_avatar_url(self):
ret = None
profile_image_url = self.account.extra_data.get('profile_image_url')
if profile_image_url:
# Hmm, hack to get our hands on the large image. Not
# really documented, but seems to work.
ret = profile_image_url.replace('_normal', '')
return ret
def has_valid_authentication(self):
account = self.account
app = SocialApp.objects.get_current(self.account.get_provider().id)
tokens = SocialToken.objects.filter(app=app, account=account).order_by('-id')
if tokens:
token = tokens[0]
consumer = oauth.Consumer(key=app.key, secret=app.secret)
access_token = oauth.Token(key=token.token, secret=token.token_secret)<|fim▁hole|> return True
except urllib2.HTTPError:
return False
return False
def request_url(self, url, args={}, callback=None):
account = self.account
app = SocialApp.objects.get_current(self.account.get_provider().id)
tokens = SocialToken.objects.filter(app=app, account=account).order_by('-id')
if tokens:
token = tokens[0]
consumer = oauth.Consumer(key=app.key, secret=app.secret)
access_token = oauth.Token(key=token.token, secret=token.token_secret)
client = oauth.Client(consumer, access_token)
full_url = '%s?%s' % (url, urllib.urlencode(args))
response, data = client.request(full_url)
if callback: callback(full_url, data)
return json.loads(data)
return None
def __unicode__(self):
screen_name = self.get_screen_name()
return screen_name or super(TwitterAccount, self).__unicode__()
class TwitterProvider(OAuthProvider):
id = 'twitter'
name = 'Twitter'
package = 'allauth.socialaccount.providers.twitter'
account_class = TwitterAccount
providers.registry.register(TwitterProvider)<|fim▁end|> | client = oauth.Client(consumer, access_token)
try:
response, data = client.request('https://api.twitter.com/1.1/account/verify_credentials.json') |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.