prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>deprecated.go<|end_file_name|><|fim▁begin|>package nova import ( "fmt" "net/http" "gopkg.in/goose.v2/client" "gopkg.in/goose.v2/errors" goosehttp "gopkg.in/goose.v2/http" ) // The following API requests found in this file are officially deprecated by // the upstream openstack project. // The API requests will be left as is, but marked as deprecated and will be // removed in the v3 release of goose. Migrating API calls to the new API // requests is recommended. const ( // Deprecated. // https://docs.openstack.org/api-ref/compute/?expanded=list-security-groups-detail#list-security-groups apiSecurityGroups = "os-security-groups" // Deprecated. // https://docs.openstack.org/api-ref/compute/?expanded=list-security-groups-detail#create-security-group-rule apiSecurityGroupRules = "os-security-group-rules" // Deprecated. // https://docs.openstack.org/api-ref/compute/?expanded=list-security-groups-detail#show-fixed-ip-details apiFloatingIPs = "os-floating-ips" ) // SecurityGroupRef refers to an existing named security group type SecurityGroupRef struct { TenantId string `json:"tenant_id"` Name string `json:"name"` } // SecurityGroupRule describes a rule of a security group. There are 2 // basic rule types: ingress and group rules (see RuleInfo struct). type SecurityGroupRule struct { FromPort *int `json:"from_port"` // Can be nil IPProtocol *string `json:"ip_protocol"` // Can be nil ToPort *int `json:"to_port"` // Can be nil ParentGroupId string `json:"-"` IPRange map[string]string `json:"ip_range"` // Can be empty Id string `json:"-"` Group SecurityGroupRef } // SecurityGroup describes a single security group in OpenStack. type SecurityGroup struct { Rules []SecurityGroupRule TenantId string `json:"tenant_id"` Id string `json:"-"` Name string Description string } // ListSecurityGroups lists IDs, names, and other details for all security groups. func (c *Client) ListSecurityGroups() ([]SecurityGroup, error) { var resp struct { Groups []SecurityGroup `json:"security_groups"` } requestData := goosehttp.RequestData{RespValue: &resp} err := c.client.SendRequest(client.GET, "compute", "v2", apiSecurityGroups, &requestData) if err != nil { return nil, errors.Newf(err, "failed to list security groups") } return resp.Groups, nil } // SecurityGroupByName returns the named security group. // Note: due to lack of filtering support when querying security groups, this is not an efficient implementation // but it's all we can do for now. func (c *Client) SecurityGroupByName(name string) (*SecurityGroup, error) { // OpenStack does not support group filtering, so we need to load them all and manually search by name. groups, err := c.ListSecurityGroups() if err != nil { return nil, err } for _, group := range groups { if group.Name == name { return &group, nil } } return nil, errors.NewNotFoundf(nil, "", "Security group %s not found.", name) } // GetServerSecurityGroups list security groups for a specific server. func (c *Client) GetServerSecurityGroups(serverId string) ([]SecurityGroup, error) { var resp struct { Groups []SecurityGroup `json:"security_groups"` } url := fmt.Sprintf("%s/%s/%s", apiServers, serverId, apiSecurityGroups) requestData := goosehttp.RequestData{RespValue: &resp} err := c.client.SendRequest(client.GET, "compute", "v2", url, &requestData) if err != nil { // Sadly HP Cloud lacks the necessary API and also doesn't provide full SecurityGroup lookup. // The best we can do for now is to use just the Name from the group entities. if errors.IsNotFound(err) { serverDetails, err := c.GetServer(serverId) if err == nil && serverDetails.Groups != nil { result := make([]SecurityGroup, len(*serverDetails.Groups)) for i, e := range *serverDetails.Groups { result[i] = SecurityGroup{Name: e.Name} } return result, nil } } return nil, errors.Newf(err, "failed to list server (%s) security groups", serverId) } return resp.Groups, nil } // CreateSecurityGroup creates a new security group. func (c *Client) CreateSecurityGroup(name, description string) (*SecurityGroup, error) { var req struct { SecurityGroup struct { Name string `json:"name"` Description string `json:"description"` } `json:"security_group"` } req.SecurityGroup.Name = name req.SecurityGroup.Description = description var resp struct { SecurityGroup SecurityGroup `json:"security_group"` } requestData := goosehttp.RequestData{ReqValue: req, RespValue: &resp, ExpectedStatus: []int{http.StatusOK}} err := c.client.SendRequest(client.POST, "compute", "v2", apiSecurityGroups, &requestData) if err != nil { return nil, errors.Newf(err, "failed to create a security group with name: %s", name) } return &resp.SecurityGroup, nil } // DeleteSecurityGroup deletes the specified security group. func (c *Client) DeleteSecurityGroup(groupId string) error { url := fmt.Sprintf("%s/%s", apiSecurityGroups, groupId) requestData := goosehttp.RequestData{ExpectedStatus: []int{http.StatusAccepted}} err := c.client.SendRequest(client.DELETE, "compute", "v2", url, &requestData) if err != nil { err = errors.Newf(err, "failed to delete security group with id: %s", groupId) } return err } // UpdateSecurityGroup updates the name and description of the given group. func (c *Client) UpdateSecurityGroup(groupId, name, description string) (*SecurityGroup, error) { var req struct { SecurityGroup struct { Name string `json:"name"` Description string `json:"description"` } `json:"security_group"` } req.SecurityGroup.Name = name req.SecurityGroup.Description = description var resp struct { SecurityGroup SecurityGroup `json:"security_group"` } url := fmt.Sprintf("%s/%s", apiSecurityGroups, groupId) requestData := goosehttp.RequestData{ReqValue: req, RespValue: &resp, ExpectedStatus: []int{http.StatusOK}} err := c.client.SendRequest(client.PUT, "compute", "v2", url, &requestData) if err != nil { return nil, errors.Newf(err, "failed to update security group with Id %s to name: %s", groupId, name) } return &resp.SecurityGroup, nil } // RuleInfo allows the callers of CreateSecurityGroupRule() to // create 2 types of security group rules: ingress rules and group // rules. The difference stems from how the "source" is defined. // It can be either: // 1. Ingress rules - specified directly with any valid subnet mask // in CIDR format (e.g. "192.168.0.0/16"); // 2. Group rules - specified indirectly by giving a source group, // which can be any user's group (different tenant ID). // // Every rule works as an iptables ACCEPT rule, thus a group/ with no // rules does not allow ingress at all. Rules can be added and removed // while the server(s) are running. The set of security groups that // apply to a server is changed only when the server is // started. Adding or removing a security group on a running server // will not take effect until that server is restarted. However, // changing rules of existing groups will take effect immediately. // // For more information: // http://docs.openstack.org/developer/nova/nova.concepts.html#concept-security-groups // Nova source: https://github.com/openstack/nova.git type RuleInfo struct { /// IPProtocol is optional, and if specified must be "tcp", "udp" or // "icmp" (in this case, both FromPort and ToPort can be -1). IPProtocol string `json:"ip_protocol"` // FromPort and ToPort are both optional, and if specifed must be // integers between 1 and 65535 (valid TCP port numbers). -1 is a // special value, meaning "use default" (e.g. for ICMP). FromPort int `json:"from_port"`<|fim▁hole|> // Cidr cannot be specified with GroupId. Ingress rules need a valid // subnet mast in CIDR format here, while if GroupID is specifed, it // means you're adding a group rule, specifying source group ID, which // must exist already and can be equal to ParentGroupId). // need Cidr, while Cidr string `json:"cidr"` GroupId *string `json:"-"` // ParentGroupId is always required and specifies the group to which // the rule is added. ParentGroupId string `json:"-"` } // CreateSecurityGroupRule creates a security group rule. // It can either be an ingress rule or group rule (see the // description of RuleInfo). func (c *Client) CreateSecurityGroupRule(ruleInfo RuleInfo) (*SecurityGroupRule, error) { var req struct { SecurityGroupRule RuleInfo `json:"security_group_rule"` } req.SecurityGroupRule = ruleInfo var resp struct { SecurityGroupRule SecurityGroupRule `json:"security_group_rule"` } requestData := goosehttp.RequestData{ReqValue: req, RespValue: &resp} err := c.client.SendRequest(client.POST, "compute", "v2", apiSecurityGroupRules, &requestData) if err != nil { return nil, errors.Newf(err, "failed to create a rule for the security group with id: %v", ruleInfo.GroupId) } return &resp.SecurityGroupRule, nil } // DeleteSecurityGroupRule deletes the specified security group rule. func (c *Client) DeleteSecurityGroupRule(ruleId string) error { url := fmt.Sprintf("%s/%s", apiSecurityGroupRules, ruleId) requestData := goosehttp.RequestData{ExpectedStatus: []int{http.StatusAccepted}} err := c.client.SendRequest(client.DELETE, "compute", "v2", url, &requestData) if err != nil { err = errors.Newf(err, "failed to delete security group rule with id: %s", ruleId) } return err } // FloatingIP describes a floating (public) IP address, which can be // assigned to a server, thus allowing connections from outside. type FloatingIP struct { // FixedIP holds the private IP address of the machine (when assigned) FixedIP *string `json:"fixed_ip"` Id string `json:"-"` // InstanceId holds the instance id of the machine, if this FIP is assigned to one InstanceId *string `json:"-"` IP string `json:"ip"` Pool string `json:"pool"` } // ListFloatingIPs lists floating IP addresses associated with the tenant or account. func (c *Client) ListFloatingIPs() ([]FloatingIP, error) { var resp struct { FloatingIPs []FloatingIP `json:"floating_ips"` } requestData := goosehttp.RequestData{RespValue: &resp} err := c.client.SendRequest(client.GET, "compute", "v2", apiFloatingIPs, &requestData) if err != nil { return nil, errors.Newf(err, "failed to list floating ips") } return resp.FloatingIPs, nil } // GetFloatingIP lists details of the floating IP address associated with specified id. func (c *Client) GetFloatingIP(ipId string) (*FloatingIP, error) { var resp struct { FloatingIP FloatingIP `json:"floating_ip"` } url := fmt.Sprintf("%s/%s", apiFloatingIPs, ipId) requestData := goosehttp.RequestData{RespValue: &resp} err := c.client.SendRequest(client.GET, "compute", "v2", url, &requestData) if err != nil { return nil, errors.Newf(err, "failed to get floating ip %s details", ipId) } return &resp.FloatingIP, nil } // AllocateFloatingIP allocates a new floating IP address to a tenant or account. func (c *Client) AllocateFloatingIP() (*FloatingIP, error) { var resp struct { FloatingIP FloatingIP `json:"floating_ip"` } requestData := goosehttp.RequestData{RespValue: &resp} err := c.client.SendRequest(client.POST, "compute", "v2", apiFloatingIPs, &requestData) if err != nil { return nil, errors.Newf(err, "failed to allocate a floating ip") } return &resp.FloatingIP, nil } // DeleteFloatingIP deallocates the floating IP address associated with the specified id. func (c *Client) DeleteFloatingIP(ipId string) error { url := fmt.Sprintf("%s/%s", apiFloatingIPs, ipId) requestData := goosehttp.RequestData{ExpectedStatus: []int{http.StatusAccepted}} err := c.client.SendRequest(client.DELETE, "compute", "v2", url, &requestData) if err != nil { err = errors.Newf(err, "failed to delete floating ip %s details", ipId) } return err }<|fim▁end|>
ToPort int `json:"to_port"`
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models # Create your models here. class Tipo_Vehiculo(models.Model): """docstring for Tipo_Vehiculo""" def __init__(self, *args, **kwargs): super(Tipo_Vehiculo, self).__init__(*args, **kwargs) tipo_vehiculo = models.CharField(max_length=100, unique=True) adicional1 = models.CharField(max_length=250, blank=True) adicional2 = models.CharField(max_length=250, blank=True) adicional3 = models.CharField(max_length=250, blank=True) adicional4 = models.CharField(max_length=250, blank=True) activo = models.BooleanField(default=True) def __unicode__(self): return self.tipo_vehiculo class Meta: verbose_name_plural = "Tipos de Vehiculos" class Modelo_Vehiculo(models.Model): """docstring for Modelo_Vehiculo""" def __init__(self, *args, **kwargs): super(Modelo_Vehiculo, self).__init__(*args, **kwargs) modelo_vehiculo = models.CharField(max_length=100) capacidad_peso = models.IntegerField() capacidad_x = models.DecimalField(max_digits=6, decimal_places=2) capacidad_y = models.DecimalField(max_digits=6, decimal_places=2) capacidad_z = models.DecimalField(max_digits=6, decimal_places=2) capacidad_m3 = models.DecimalField(max_digits=6, decimal_places=2) adicional1 = models.CharField(max_length=250, blank=True) adicional2 = models.CharField(max_length=250, blank=True) adicional3 = models.CharField(max_length=250, blank=True) adicional4 = models.CharField(max_length=250, blank=True) activo = models.BooleanField(default=True) def __unicode__(self): return self.modelo_vehiculo <|fim▁hole|> verbose_name_plural = "Modelos de Vehiculos" class Vehiculo(models.Model): """docstring for Vehiculo""" def __init__(self, *args, **kwargs): super(Vehiculo, self).__init__(*args, **kwargs) numero_vehiculo = models.CharField(max_length=10) #mantenimiento_vehiculo = models.ForeignKey() vehiculo = models.CharField(max_length=100) patente = models.CharField(max_length=100) tipo_vehiculo = models.ForeignKey(Tipo_Vehiculo) modelo_vehiculo = models.ForeignKey(Modelo_Vehiculo) adicional1 = models.CharField(max_length=250, blank=True) adicional2 = models.CharField(max_length=250, blank=True) adicional3 = models.CharField(max_length=250, blank=True) adicional4 = models.CharField(max_length=250, blank=True) activo = models.BooleanField(default=True) def __unicode__(self): return self.vehiculo class Meta: verbose_name_plural = "Vehiculos"<|fim▁end|>
class Meta:
<|file_name|>parser.rs<|end_file_name|><|fim▁begin|>use lexer::{Keyword, Lexer, Pos, Symbol, Token, TokenKind}; use node::{ASTKind, Bits, AST}; use node; use types::{Sign, StorageClass, Type}; use std::str; use std::boxed::Box; use std::io::{stderr, Write}; use std::collections::{hash_map, HashMap, VecDeque}; // use CODEGEN; extern crate llvm_sys as llvm; extern crate rand; use self::rand::Rng; extern crate ansi_term; use self::ansi_term::Colour; // TODO: add more error kinds pub enum Error { Something, EOF, } pub struct Qualifiers { pub q_restrict: bool, pub q_const: bool, pub q_constexpr: bool, pub q_volatile: bool, pub q_inline: bool, pub q_noreturn: bool, } impl Qualifiers { pub fn new() -> Qualifiers { Qualifiers { q_restrict: false, q_const: false, q_constexpr: false, q_volatile: false, q_inline: false, q_noreturn: false, } } } pub type ParseR<T> = Result<T, Error>; pub struct Parser<'a> { pub lexer: &'a mut Lexer, pub err_counts: usize, env: Env<AST>, tags: Env<Type>, // constexpr_func_map: HashSet<String>, } pub struct Env<T: Clone>(pub VecDeque<HashMap<String, T>>); impl<T: Clone> Env<T> { fn new() -> Env<T> { let mut env = VecDeque::new(); env.push_back(HashMap::new()); Env(env) } fn push(&mut self) { let localenv = (*self.0.back().unwrap()).clone(); self.0.push_back(localenv); } fn pop(&mut self) { self.0.pop_back(); } fn add(&mut self, name: String, val: T) { self.0.back_mut().unwrap().insert(name, val); } fn add_globally(&mut self, name: String, val: T) { self.0[0].insert(name.clone(), val.clone()); self.0.back_mut().unwrap().insert(name, val); } fn is_local(&self) -> bool { self.0.len() > 1 } fn back_mut(&mut self) -> Option<&mut HashMap<String, T>> { self.0.back_mut() } fn get(&mut self, name: &str) -> Option<&T> { self.0.back_mut().unwrap().get(name) } fn contains(&mut self, name: &str) -> bool { self.0.back_mut().unwrap().contains_key(name) } } macro_rules! matches { ($e:expr, $p:pat) => { match $e { $p => true, _ => false } } } macro_rules! ident_val { ($e:expr) => { match &$e.kind { &TokenKind::Identifier(ref ident) => ident.to_string(), _ => "".to_string() } } } macro_rules! expect_symbol_error { ($slf:expr, $sym:expr, $msg:expr) => {{ if !try!($slf.lexer.skip_symbol($sym)) { let peek = $slf.lexer.peek(); $slf.show_error_token(&try!(peek), $msg); } }} } impl<'a> Parser<'a> { pub fn new(lexer: &'a mut Lexer) -> Parser<'a> { Parser { lexer: lexer, err_counts: 0, env: Env::new(), tags: Env::new(), // constexpr_func_map: HashSet::new(), } } fn show_error(&mut self, msg: &str) { self.err_counts += 1; writeln!( &mut stderr(), "{}: {} {}: {}", self.lexer.get_filename(), Colour::Red.bold().paint("error:"), self.lexer.get_cur_line(), msg ).unwrap(); } fn show_error_token(&mut self, token: &Token, msg: &str) { self.err_counts += 1; writeln!( &mut stderr(), "{}: {} {}: {}", self.lexer.get_filename(), Colour::Red.bold().paint("error:"), token.pos.line, msg ).unwrap(); writeln!( &mut stderr(), "{}", self.lexer .get_surrounding_code_with_err_point(token.pos.pos,) ).unwrap(); } pub fn run_file(filename: String) -> Vec<AST> { let mut nodes: Vec<AST> = Vec::new(); let mut lexer = Lexer::new(filename.to_string()); // TODO: for debugging // loop { // let tok = lexer.get(); // match tok { // Some(t) => { // println!("t:{}{:?} {}", if t.space { " " } else { "" }, t.kind, t.val); // } // None => break, // } // } // // // Debug: (parsing again is big cost?) // lexer = Lexer::new(filename.to_string(), s.as_str()); Parser::new(&mut lexer).run(&mut nodes); nodes } pub fn run(&mut self, node: &mut Vec<AST>) { while matches!(self.read_toplevel(node), Ok(_)) {} self.show_total_errors(); } pub fn run_as_expr(&mut self) -> ParseR<AST> { self.read_expr() } pub fn show_total_errors(&mut self) { if self.err_counts > 0 { println!( "{} error{} generated.", self.err_counts, if self.err_counts > 1 { "s" } else { "" } ); ::std::process::exit(-1); } } pub fn read_toplevel(&mut self, ast: &mut Vec<AST>) -> ParseR<()> { // TODO: refine if try!(self.is_function_def()) { match self.read_func_def() { Ok(ok) => ast.push(ok), Err(Error::EOF) => self.show_error("expected a token, but reached EOF"), Err(e) => return Err(e), } } else { match self.read_decl(ast) { Err(Error::EOF) => self.show_error("expected a token, but reached EOF"), Err(e) => return Err(e), _ => {} } } Ok(()) } fn read_func_def(&mut self) -> ParseR<AST> { self.env.push(); self.tags.push(); let (ret_ty, _, _qualifiers) = try!(self.read_type_spec()); let (functy, name, param_names) = try!(self.read_declarator(ret_ty)); // if qualifiers.q_constexpr { // self.constexpr_func_map.insert(name.clone()); // } self.env.add_globally( name.clone(), AST::new( ASTKind::Variable(functy.clone(), name.clone()), Pos::new(0, 0), ), ); self.env.add( "__func__".to_string(), AST::new(ASTKind::String(name.clone()), Pos::new(0, 0)), ); expect_symbol_error!(self, Symbol::OpeningBrace, "expected '('"); let body = try!(self.read_func_body(&functy)); self.env.pop(); self.tags.pop(); Ok(AST::new( ASTKind::FuncDef( functy, if param_names.is_none() { Vec::new() } else { param_names.unwrap() }, name, Box::new(body), ), Pos::new(0, 0), )) } fn read_func_body(&mut self, _functy: &Type) -> ParseR<AST> { self.read_compound_stmt() } fn read_compound_stmt(&mut self) -> ParseR<AST> { let mut stmts: Vec<AST> = Vec::new(); loop { if try!(self.lexer.skip_symbol(Symbol::ClosingBrace).or_else(|eof| { self.show_error("expected '}'"); Err(eof) })) { break; } let peek_tok = try!(self.lexer.peek()); if self.is_type(&peek_tok) { // variable declaration try!(self.read_decl(&mut stmts)); } else { match self.read_stmt() { Ok(stmt) => stmts.push(stmt), Err(_) => {} } } } Ok(AST::new(ASTKind::Block(stmts), Pos::new(0, 0))) } fn read_stmt(&mut self) -> ParseR<AST> { let tok = try!(self.lexer.get()); if let &TokenKind::Keyword(ref keyw) = &tok.kind { match *keyw { Keyword::If => return self.read_if_stmt(), Keyword::For => return self.read_for_stmt(), Keyword::While => return self.read_while_stmt(), Keyword::Do => return self.read_do_while_stmt(), Keyword::Switch => return self.read_switch_stmt(), Keyword::Case => return self.read_case_label(), Keyword::Default => return self.read_default_label(), Keyword::Goto => return self.read_goto_stmt(), Keyword::Continue => return self.read_continue_stmt(), Keyword::Break => return self.read_break_stmt(), Keyword::Return => return self.read_return_stmt(), _ => {} } } else if let &TokenKind::Symbol(Symbol::OpeningBrace) = &tok.kind { return self.read_compound_stmt(); } if matches!(tok.kind, TokenKind::Identifier(_)) && try!(self.lexer.peek_symbol_token_is(Symbol::Colon)) { return self.read_label(tok); } self.lexer.unget(tok); let expr = self.read_opt_expr(); expect_symbol_error!(self, Symbol::Semicolon, "expected ';'"); expr } fn read_if_stmt(&mut self) -> ParseR<AST> { expect_symbol_error!(self, Symbol::OpeningParen, "expected '('"); let cond = try!(self.read_expr()); expect_symbol_error!(self, Symbol::ClosingParen, "expected ')'"); let then_stmt = Box::new(try!(self.read_stmt())); let else_stmt = if try!(self.lexer.skip_keyword(Keyword::Else)) { Box::new(try!(self.read_stmt())) } else { Box::new(AST::new(ASTKind::Block(Vec::new()), Pos::new(0, 0))) }; Ok(AST::new( ASTKind::If(Box::new(cond), then_stmt, else_stmt), Pos::new(0, 0), )) } fn read_for_stmt(&mut self) -> ParseR<AST> { expect_symbol_error!(self, Symbol::OpeningParen, "expected '('"); let init = try!(self.read_opt_decl_or_stmt()); // TODO: make read_expr return Option<AST>. // when cur tok is ';', returns None. let cond = try!(self.read_opt_expr()); expect_symbol_error!(self, Symbol::Semicolon, "expected ';'"); let step = if try!(self.lexer.peek_symbol_token_is(Symbol::ClosingParen)) { AST::new(ASTKind::Compound(Vec::new()), self.lexer.get_cur_pos()) } else { try!(self.read_opt_expr()) }; expect_symbol_error!(self, Symbol::ClosingParen, "expected ')'"); let body = try!(self.read_stmt()); Ok(AST::new( ASTKind::For( Box::new(init), Box::new(cond), Box::new(step), Box::new(body), ), Pos::new(0, 0), )) } fn read_while_stmt(&mut self) -> ParseR<AST> { expect_symbol_error!(self, Symbol::OpeningParen, "expected '('"); let cond = try!(self.read_expr()); expect_symbol_error!(self, Symbol::ClosingParen, "expected ')'"); let body = try!(self.read_stmt()); Ok(AST::new( ASTKind::While(Box::new(cond), Box::new(body)), Pos::new(0, 0), )) } fn read_do_while_stmt(&mut self) -> ParseR<AST> { let body = try!(self.read_stmt()); if !try!(self.lexer.skip_keyword(Keyword::While)) { let peek = self.lexer.peek(); self.show_error_token(&try!(peek), "expected 'while'"); } expect_symbol_error!(self, Symbol::OpeningParen, "expected '('"); let cond = try!(self.read_expr()); expect_symbol_error!(self, Symbol::ClosingParen, "expected ')'"); expect_symbol_error!(self, Symbol::Semicolon, "expected ';'"); Ok(AST::new( ASTKind::DoWhile(Box::new(cond), Box::new(body)), Pos::new(0, 0), )) } fn read_switch_stmt(&mut self) -> ParseR<AST> { expect_symbol_error!(self, Symbol::OpeningParen, "expected '('"); let cond = try!(self.read_expr()); expect_symbol_error!(self, Symbol::ClosingParen, "expected ')'"); let body = Box::new(try!(self.read_stmt())); Ok(AST::new( ASTKind::Switch(Box::new(cond), body), Pos::new(0, 0), )) } fn read_case_label(&mut self) -> ParseR<AST> { let expr = try!(self.read_expr()); expect_symbol_error!(self, Symbol::Colon, "expected ':'"); Ok(AST::new(ASTKind::Case(Box::new(expr)), Pos::new(0, 0))) } fn read_default_label(&mut self) -> ParseR<AST> { expect_symbol_error!(self, Symbol::Colon, "expected ':'"); Ok(AST::new(ASTKind::DefaultL, Pos::new(0, 0))) } fn read_goto_stmt(&mut self) -> ParseR<AST> { let pos = self.lexer.get_cur_pos(); let label_name = ident_val!(try!(self.lexer.get())); expect_symbol_error!(self, Symbol::Semicolon, "expected ';'"); Ok(AST::new(ASTKind::Goto(label_name), pos)) } fn read_label(&mut self, tok: Token) -> ParseR<AST> { let pos = self.lexer.get_cur_pos(); let label_name = ident_val!(tok); expect_symbol_error!(self, Symbol::Colon, "expected ':'"); Ok(AST::new(ASTKind::Label(label_name), pos)) } fn read_continue_stmt(&mut self) -> ParseR<AST> { let pos = self.lexer.get_cur_pos(); expect_symbol_error!(self, Symbol::Semicolon, "expected ';'"); Ok(AST::new(ASTKind::Continue, pos)) } fn read_break_stmt(&mut self) -> ParseR<AST> { let pos = self.lexer.get_cur_pos(); expect_symbol_error!(self, Symbol::Semicolon, "expected ';'"); Ok(AST::new(ASTKind::Break, pos)) } fn read_return_stmt(&mut self) -> ParseR<AST> { let pos = self.lexer.get_cur_pos(); if try!(self.lexer.skip_symbol(Symbol::Semicolon)) { Ok(AST::new(ASTKind::Return(None), pos)) } else { let retval = Some(Box::new(try!(self.read_expr()))); expect_symbol_error!(self, Symbol::Semicolon, "expected ';'"); Ok(AST::new(ASTKind::Return(retval), pos)) } } fn is_function_def(&mut self) -> ParseR<bool> { let mut buf = Vec::new(); let mut is_funcdef = false; loop { let mut tok = try!(self.lexer.get()); buf.push(tok.clone()); if tok.kind == TokenKind::Symbol(Symbol::Semicolon) { break; } if self.is_type(&tok) { continue; } if tok.kind == TokenKind::Symbol(Symbol::OpeningParen) { try!(self.skip_parens(&tok, &mut buf)); continue; } if !matches!(tok.kind, TokenKind::Identifier(_)) { continue; } if try!(self.lexer.peek()).kind != TokenKind::Symbol(Symbol::OpeningParen) { continue; } let opening_paren = try!(self.lexer.get()); buf.push(opening_paren.clone()); try!(self.skip_parens(&opening_paren, &mut buf)); tok = try!(self.lexer.peek()); is_funcdef = tok.kind == TokenKind::Symbol(Symbol::OpeningBrace); break; } self.lexer.unget_all(&buf); Ok(is_funcdef) } fn skip_parens(&mut self, opening_paren: &Token, buf: &mut Vec<Token>) -> ParseR<()> { loop { let tok = try!(self.lexer.get().or_else(|_| { self.show_error_token(&opening_paren, "expected ')', but reach EOF"); return Err(Error::Something); })); buf.push(tok.clone()); match tok.kind { TokenKind::Symbol(Symbol::OpeningParen) => try!(self.skip_parens(&tok, buf)), TokenKind::Symbol(Symbol::ClosingParen) => break, _ => {} }; } Ok(()) } fn skip_until(&mut self, sym: Symbol) { let ts = TokenKind::Symbol(sym); while match self.lexer.get() { Ok(tok) => tok.kind != ts, Err(_) => false, } {} } fn get_typedef(&mut self, name: &str) -> ParseR<Option<Type>> { match self.env.get(name) { Some(ast) => match ast.kind { ASTKind::Typedef(ref from, ref _to) => { let ty = match from { &Type::Struct(ref name, ref fields) | &Type::Union(ref name, ref fields, _) => { if fields.is_empty() { self.tags.get(name.as_str()).unwrap().clone() } else { from.clone() } } _ => from.clone(), }; return Ok(Some(ty)); } _ => {} }, None => return Ok(None), } Ok(None) } fn is_type(&mut self, token: &Token) -> bool { if let TokenKind::Keyword(ref keyw) = token.kind { match *keyw { Keyword::Typedef | Keyword::Extern | Keyword::Static | Keyword::Auto | Keyword::Register | Keyword::Const | Keyword::Volatile | Keyword::Void | Keyword::Signed | Keyword::Unsigned | Keyword::Char | Keyword::Int | Keyword::Short | Keyword::Long | Keyword::Float | Keyword::Double | Keyword::Struct | Keyword::Enum | Keyword::Union | Keyword::Noreturn | Keyword::Inline | Keyword::Restrict => true, _ => false, } } else if let TokenKind::Identifier(ref ident) = token.kind { match self.env.get(ident.as_str()) { Some(ast) => match ast.kind { ASTKind::Typedef(_, _) => true, _ => false, }, None => false, } } else { false } } fn is_string(&self, ty: &Type) -> bool { if let &Type::Array(ref elem_ty, _) = ty { if matches!(**elem_ty, Type::Char(Sign::Signed)) { return true; } } false } fn read_decl_init(&mut self, ty: &mut Type) -> ParseR<AST> { // TODO: implement for like 'int a[] = {...}, char *s="str";' if try!(self.lexer.peek_symbol_token_is(Symbol::OpeningBrace)) { return self.read_initializer_list(ty); } else if self.is_string(ty) { let tok = try!(self.lexer.get()); if let TokenKind::String(s) = tok.kind { return self.read_string_initializer(ty, s); } self.lexer.unget(tok); } self.read_assign() } fn read_initializer_elem(&mut self, ty: &mut Type) -> ParseR<AST> { if match *ty { Type::Array(_, _) | Type::Struct(_, _) | Type::Union(_, _, _) => true, _ => false, } { self.read_initializer_list(ty) } else if try!(self.lexer.peek_symbol_token_is(Symbol::OpeningBrace)) { let elem = self.read_initializer_elem(ty); expect_symbol_error!(self, Symbol::ClosingBrace, "expected '}'"); elem } else { self.read_assign() } } fn read_initializer_list(&mut self, ty: &mut Type) -> ParseR<AST> { if self.is_string(ty) { let tok = try!(self.lexer.get()); if let TokenKind::String(s) = tok.kind { return self.read_string_initializer(ty, s); } self.lexer.unget(tok); } match ty { &mut Type::Array(_, _) => self.read_array_initializer(ty), &mut Type::Struct(_, _) | &mut Type::Union(_, _, _) => self.read_struct_initializer(ty), _ => self.read_assign(), } } fn read_string_initializer(&mut self, ty: &mut Type, string: String) -> ParseR<AST> { let char_ary = string .chars() .map(|c| AST::new(ASTKind::Char(c as i32), Pos::new(0, 0))) .collect::<Vec<AST>>(); if let &mut Type::Array(_, ref mut len) = ty { *len = char_ary.len() as i32 + 1; } else { panic!() } Ok(AST::new( ASTKind::ConstArray(char_ary), self.lexer.get_cur_pos(), )) } fn read_array_initializer(&mut self, ty: &mut Type) -> ParseR<AST> { let has_brace = try!(self.lexer.skip_symbol(Symbol::OpeningBrace)); if let &mut Type::Array(ref elem_ty, ref mut len) = ty { let is_flexible = *len < 0; let mut elems = Vec::new(); let mut elem_ty = (**elem_ty).clone(); loop { let tok = try!(self.lexer.get()); if let TokenKind::Symbol(Symbol::ClosingBrace) = tok.kind { if !has_brace { self.lexer.unget(tok); } break; } self.lexer.unget(tok); let elem = try!(self.read_initializer_elem((&mut elem_ty))); elems.push(elem); try!(self.lexer.skip_symbol(Symbol::Comma)); } if is_flexible { *len = elems.len() as i32; } Ok(AST::new( ASTKind::ConstArray(elems), self.lexer.get_cur_pos(), )) } else { panic!() } } fn read_struct_initializer(&mut self, ty: &mut Type) -> ParseR<AST> { let tok = try!(self.lexer.get()); let has_brace = tok.kind == TokenKind::Symbol(Symbol::OpeningBrace); let mut fields_types = if let Some(fields_types) = ty.get_all_fields_types() { fields_types } else { self.show_error_token(&tok, "initializer of struct must be array"); return Err(Error::Something); }; let mut elems = Vec::new(); let mut field_type = fields_types.iter_mut(); loop { let tok = try!(self.lexer.get()); if let TokenKind::Symbol(Symbol::ClosingBrace) = tok.kind { if !has_brace { self.lexer.unget(tok); } break; } self.lexer.unget(tok); let elem = try!(self.read_initializer_elem(&mut field_type.next().unwrap().clone())); elems.push(elem); try!(self.lexer.skip_symbol(Symbol::Comma)); } Ok(AST::new( ASTKind::ConstStruct(elems), self.lexer.get_cur_pos(), )) } fn skip_type_qualifiers(&mut self) -> ParseR<()> { while try!(self.lexer.skip_keyword(Keyword::Const)) || try!(self.lexer.skip_keyword(Keyword::Volatile)) || try!(self.lexer.skip_keyword(Keyword::Restrict)) {} Ok(()) } fn read_decl(&mut self, ast: &mut Vec<AST>) -> ParseR<()> { let (basety, sclass, qualifiers) = try!(self.read_type_spec()); let is_typedef = sclass == StorageClass::Typedef; if try!(self.lexer.skip_symbol(Symbol::Semicolon)) { return Ok(()); } loop { let (mut ty, name, _) = try!(self.read_declarator(basety.clone())); // XXX if (qualifiers.q_constexpr || qualifiers.q_const) && try!(self.lexer.skip_symbol(Symbol::Assign)) { let init = try!(self.read_decl_init(&mut ty)); self.env.add(name.clone(), init); } else { if is_typedef { let typedef = AST::new( ASTKind::Typedef(ty, name.to_string()), self.lexer.get_cur_pos(), ); self.env.add(name, typedef); return Ok(()); } let init = if try!(self.lexer.skip_symbol(Symbol::Assign)) { Some(Box::new(try!(self.read_decl_init(&mut ty)))) } else { None }; self.env.add( name.clone(), AST::new(ASTKind::Variable(ty.clone(), name.clone()), Pos::new(0, 0)), ); ast.push(AST::new( ASTKind::VariableDecl(ty, name, sclass.clone(), init), self.lexer.get_cur_pos(), )); } if try!(self.lexer.skip_symbol(Symbol::Semicolon)) { return Ok(()); } if !try!(self.lexer.skip_symbol(Symbol::Comma)) { let peek = try!(self.lexer.get()); self.show_error_token(&peek, "expected ','"); self.skip_until(Symbol::Semicolon); return Err(Error::Something); } } } fn read_opt_decl_or_stmt(&mut self) -> ParseR<AST> { if try!(self.lexer.skip_symbol(Symbol::Semicolon)) { return Ok(AST::new(ASTKind::Compound(Vec::new()), Pos::new(0, 0))); } let peek_tok = try!(self.lexer.peek()); if self.is_type(&peek_tok) { // variable declaration let mut stmts = Vec::new(); let pos = self.lexer.get_cur_pos(); try!(self.read_decl(&mut stmts)); Ok(AST::new(ASTKind::Compound(stmts), pos)) } else { self.read_stmt() } } // returns (declarator type, name, params{for function}) fn read_declarator(&mut self, basety: Type) -> ParseR<(Type, String, Option<Vec<String>>)> { if try!(self.lexer.skip_symbol(Symbol::OpeningParen)) { let peek_tok = try!(self.lexer.peek()); if self.is_type(&peek_tok) { let (ty, params) = try!(self.read_declarator_func(basety)); return Ok((ty, "".to_string(), params)); } // TODO: HUH? MAKES NO SENSE!! let mut buf: Vec<Token> = Vec::new(); while !try!(self.lexer.skip_symbol(Symbol::ClosingParen)) { buf.push(try!(self.lexer.get())); } let t = try!(self.read_declarator_tail(basety)); self.lexer.unget_all(&buf); return self.read_declarator(t.0); } if try!(self.lexer.skip_symbol(Symbol::Asterisk)) { try!(self.skip_type_qualifiers()); return self.read_declarator(Type::Ptr(Box::new(basety.clone()))); } let tok = try!(self.lexer.get()); if let &TokenKind::Identifier(ref name) = &tok.kind { let (ty, params) = try!(self.read_declarator_tail(basety)); return Ok((ty, name.to_string(), params)); } self.lexer.unget(tok); let (ty, params) = try!(self.read_declarator_tail(basety)); Ok((ty, "".to_string(), params)) } fn read_declarator_tail(&mut self, basety: Type) -> ParseR<(Type, Option<Vec<String>>)> { if try!(self.lexer.skip_symbol(Symbol::OpeningBoxBracket)) { return Ok((try!(self.read_declarator_array(basety)), None)); } if try!(self.lexer.skip_symbol(Symbol::OpeningParen)) { return self.read_declarator_func(basety); } Ok((basety, None)) } fn read_declarator_array(&mut self, basety: Type) -> ParseR<Type> { let len: i32; if try!(self.lexer.skip_symbol(Symbol::ClosingBoxBracket)) { len = -1; } else { len = match try!(self.read_expr()).eval_constexpr() { Ok(len) => len as i32, Err(Error::Something) => { let peek = try!(self.lexer.peek()); self.show_error_token(&peek, "array size must be constant"); 0 } Err(e) => return Err(e), }; expect_symbol_error!(self, Symbol::ClosingBoxBracket, "expected ']'"); } let ty = try!(self.read_declarator_tail(basety)).0; Ok(Type::Array(Box::new(ty), len)) } fn read_declarator_func(&mut self, retty: Type) -> ParseR<(Type, Option<Vec<String>>)> { if try!(self.lexer.peek_keyword_token_is(Keyword::Void)) && try!(self.lexer.next_symbol_token_is(Symbol::ClosingParen)) { try!(self.lexer.expect_skip_keyword(Keyword::Void)); try!(self.lexer.expect_skip_symbol(Symbol::ClosingParen)); return Ok((Type::Func(Box::new(retty), Vec::new(), false), None)); } if try!(self.lexer.skip_symbol(Symbol::ClosingParen)) { return Ok((Type::Func(Box::new(retty), Vec::new(), false), None)); } let (paramtypes, paramnames, vararg) = try!(self.read_declarator_params()); Ok(( Type::Func(Box::new(retty), paramtypes, vararg), Some(paramnames), )) } // returns (param types, param names, vararg?) fn read_declarator_params(&mut self) -> ParseR<(Vec<Type>, Vec<String>, bool)> { let mut paramtypes: Vec<Type> = Vec::new(); let mut paramnames: Vec<String> = Vec::new(); loop { if try!(self.lexer.skip_symbol(Symbol::Vararg)) { if paramtypes.len() == 0 { let peek = self.lexer.peek(); self.show_error_token( &try!(peek), "at least one param is required before '...'", ); return Err(Error::Something); } expect_symbol_error!(self, Symbol::ClosingParen, "expected ')'"); return Ok((paramtypes, paramnames, true)); } let (ty, name) = try!(self.read_func_param()); // if reading a parameter of a function to define if self.env.is_local() { self.env.add( name.clone(), AST::new(ASTKind::Variable(ty.clone(), name.clone()), Pos::new(0, 0)), ); } paramtypes.push(ty); paramnames.push(name); if try!(self.lexer.skip_symbol(Symbol::ClosingParen)) { return Ok((paramtypes, paramnames, false)); } if !try!(self.lexer.skip_symbol(Symbol::Comma)) { let peek = self.lexer.peek(); self.show_error_token(&try!(peek), "expected ','"); self.skip_until(Symbol::ClosingParen); return Err(Error::Something); } } } fn read_func_param(&mut self) -> ParseR<(Type, String)> { let basety = try!(self.read_type_spec()).0; let (ty, name, _) = try!(self.read_declarator(basety)); match ty { Type::Array(subst, _) => Ok((Type::Ptr(subst), name)), Type::Func(_, _, _) => Ok((Type::Ptr(Box::new(ty)), name)), _ => Ok((ty, name)), } } fn read_type_spec(&mut self) -> ParseR<(Type, StorageClass, Qualifiers)> { #[derive(PartialEq, Debug, Clone)] enum Size { Short, Normal, Long, LLong, }; #[derive(PartialEq, Debug, Clone)] enum PrimitiveType { Void, Char, Int, Float, Double, }; let mut kind: Option<PrimitiveType> = None; let mut sign: Option<Sign> = None; let mut size = Size::Normal; let mut sclass = StorageClass::Auto; let mut userty: Option<Type> = None; let mut qualifiers = Qualifiers::new(); loop { let tok = try!(self.lexer.get()); if kind.is_none() { if let &TokenKind::Identifier(ref maybe_userty_name) = &tok.kind { let maybe_userty = try!(self.get_typedef(maybe_userty_name)); if maybe_userty.is_some() { return Ok((maybe_userty.unwrap(), sclass, qualifiers)); } } } if !matches!(tok.kind, TokenKind::Keyword(_)) { self.lexer.unget(tok); break; } if let TokenKind::Keyword(keyw) = tok.kind { match &keyw { &Keyword::Typedef => sclass = StorageClass::Typedef, &Keyword::Extern => sclass = StorageClass::Extern, &Keyword::Static => sclass = StorageClass::Static, &Keyword::Auto => sclass = StorageClass::Auto, &Keyword::Register => sclass = StorageClass::Register, &Keyword::Const => qualifiers.q_const = true, &Keyword::ConstExpr => qualifiers.q_constexpr = true, &Keyword::Volatile => qualifiers.q_volatile = true, &Keyword::Inline => qualifiers.q_inline = true, &Keyword::Restrict => qualifiers.q_restrict = true, &Keyword::Noreturn => qualifiers.q_noreturn = true, &Keyword::Void => { if kind.is_some() { let peek = self.lexer.peek(); self.show_error_token(&try!(peek), "type mismatch"); } kind = Some(PrimitiveType::Void); } &Keyword::Char => { if kind.is_some() { let peek = self.lexer.peek(); self.show_error_token(&try!(peek), "type mismatch"); } kind = Some(PrimitiveType::Char); } &Keyword::Int => { if kind.is_some() { let peek = self.lexer.peek(); self.show_error_token(&try!(peek), "type mismatch"); } kind = Some(PrimitiveType::Int); } &Keyword::Float => { if kind.is_some() { let peek = self.lexer.peek(); self.show_error_token(&try!(peek), "type mismatch"); } kind = Some(PrimitiveType::Float); } &Keyword::Double => { if kind.is_some() { let peek = self.lexer.peek(); self.show_error_token(&try!(peek), "type mismatch"); } kind = Some(PrimitiveType::Double); } &Keyword::Signed => { if sign.is_some() { let peek = self.lexer.peek(); self.show_error_token(&try!(peek), "type mismatch"); }; sign = Some(Sign::Signed); } &Keyword::Unsigned => { if sign.is_some() { let peek = self.lexer.peek(); self.show_error_token(&try!(peek), "type mismatch"); }; sign = Some(Sign::Unsigned); } &Keyword::Short => size = Size::Short, &Keyword::Long => { if size == Size::Normal { size = Size::Long; } else if size == Size::Long { size = Size::LLong; } } &Keyword::Struct => userty = Some(try!(self.read_struct_def())), &Keyword::Union => userty = Some(try!(self.read_union_def())), &Keyword::Enum => userty = Some(try!(self.read_enum_def())), _ => {} } } else { self.lexer.unget(tok); break; } } // if sign is not expected, // default is Signed if sign.is_none() { sign = Some(Sign::Signed); } // TODO: add err handler if userty.is_some() { return Ok((userty.unwrap(), sclass, qualifiers)); } if kind.is_some() { match kind.unwrap() { PrimitiveType::Void => return Ok((Type::Void, sclass, qualifiers)), PrimitiveType::Char => return Ok((Type::Char(sign.unwrap()), sclass, qualifiers)), PrimitiveType::Float => return Ok((Type::Float, sclass, qualifiers)), PrimitiveType::Double => return Ok((Type::Double, sclass, qualifiers)), _ => {} } } let ty = match size { Size::Short => Type::Short(sign.unwrap()), Size::Normal => Type::Int(sign.unwrap()), Size::Long => Type::Long(sign.unwrap()), Size::LLong => Type::LLong(sign.unwrap()), }; Ok((ty, sclass, qualifiers)) } fn read_struct_def(&mut self) -> ParseR<Type> { self.read_rectype_def(true) } fn read_union_def(&mut self) -> ParseR<Type> { self.read_rectype_def(false) } // rectype is abbreviation of 'record type' fn read_rectype_tag(&mut self) -> ParseR<Option<String>> { let maybe_tag = try!(self.lexer.get()); if let TokenKind::Identifier(maybe_tag_name) = maybe_tag.kind { Ok(Some(maybe_tag_name)) } else { self.lexer.unget(maybe_tag); Ok(None) } } fn read_rectype_def(&mut self, is_struct: bool) -> ParseR<Type> { let tag = { let opt_tag = try!(self.read_rectype_tag()); if opt_tag.is_some() { opt_tag.unwrap() } else { // if the rectype(struct|union) has no name(e.g. typedef struct { int a; } A), // generate a random name rand::thread_rng().gen_ascii_chars().take(8).collect() } }; let fields = try!(self.read_rectype_fields()); let cur_tags = self.tags.back_mut().unwrap(); if fields.is_empty() { Ok(match cur_tags.entry(tag) { hash_map::Entry::Occupied(o) => o.get().clone(), hash_map::Entry::Vacant(v) => { let new_struct = if is_struct { Type::Struct(v.key().to_string(), Vec::new()) } else { Type::Union(v.key().to_string(), Vec::new(), 0) }; v.insert(new_struct).clone() } }) } else { let new_rectype = if is_struct { Type::Struct(tag.to_string(), fields) } else { // if union let mut max_sz_ty_nth = 0; let mut max_sz = 0; for (i, field_decl) in (&fields).iter().enumerate() { if let ASTKind::VariableDecl(ref ty, _, _, _) = field_decl.kind { if ty.calc_size() > max_sz { max_sz = ty.calc_size(); max_sz_ty_nth = i; } } } Type::Union(tag.to_string(), fields, max_sz_ty_nth) }; Ok(match cur_tags.entry(tag) { hash_map::Entry::Occupied(o) => { *o.into_mut() = new_rectype.clone(); new_rectype } hash_map::Entry::Vacant(v) => v.insert(new_rectype).clone(), }) } } fn read_rectype_fields(&mut self) -> ParseR<Vec<AST>> { if !try!(self.lexer.skip_symbol(Symbol::OpeningBrace)) { return Ok(Vec::new()); } let mut decls: Vec<AST> = Vec::new(); loop { let peek = try!(self.lexer.peek()); if !self.is_type(&peek) { break; } let (basety, _, _) = try!(self.read_type_spec()); loop { let (ty, name, _) = try!(self.read_declarator(basety.clone())); if try!(self.lexer.skip_symbol(Symbol::Colon)) { // TODO: for now, designated bitwidth ignore try!(self.read_expr()); } decls.push(AST::new( ASTKind::VariableDecl(ty, name, StorageClass::Auto, None), self.lexer.get_cur_pos(), )); if try!(self.lexer.skip_symbol(Symbol::Comma)) { continue; } else { expect_symbol_error!(self, Symbol::Semicolon, "expected ';'"); } break; } } expect_symbol_error!(self, Symbol::ClosingBrace, "expected '}'"); Ok(decls) } fn read_enum_def(&mut self) -> ParseR<Type> { let (tag, exist_tag) = { let opt_tag = try!(self.read_rectype_tag()); if opt_tag.is_some() { (opt_tag.unwrap(), true) } else { ("".to_string(), false) } }; if exist_tag { match self.tags.get(tag.as_str()) { Some(&Type::Enum) => {} None => {} _ => { let peek = self.lexer.peek(); self.show_error_token(&try!(peek), "undefined enum"); return Err(Error::Something); } } } if !try!(self.lexer.skip_symbol(Symbol::OpeningBrace)) { if !exist_tag || !self.tags.contains(tag.as_str()) { let peek = self.lexer.peek(); self.show_error_token(&try!(peek), "do not redefine enum"); return Err(Error::Something); } return Ok(Type::Int(Sign::Signed)); } if exist_tag { self.tags.add(tag, Type::Enum); } let mut val = 0; loop { if try!(self.lexer.skip_symbol(Symbol::ClosingBrace)) { break; } let name = ident_val!(try!(self.lexer.get())); if try!(self.lexer.skip_symbol(Symbol::Assign)) { val = match try!(self.read_assign()).eval_constexpr() { Ok(val) => val, Err(Error::Something) => { let peek = try!(self.lexer.peek()); self.show_error_token(&peek, "enum initialize value must be constant"); 0 } Err(e) => return Err(e), }; } let constval = AST::new(ASTKind::Int(val, Bits::Bits32), self.lexer.get_cur_pos()); val += 1; self.env.add(name, constval); if try!(self.lexer.skip_symbol(Symbol::Comma)) { continue; } if try!(self.lexer.skip_symbol(Symbol::OpeningBrace)) { break; } } Ok(Type::Int(Sign::Signed)) } pub fn read_expr(&mut self) -> ParseR<AST> { self.read_comma()<|fim▁hole|> } pub fn read_opt_expr(&mut self) -> ParseR<AST> { if try!(self.lexer.peek()).kind == TokenKind::Symbol(Symbol::Semicolon) { Ok(AST::new( ASTKind::Compound(Vec::new()), self.lexer.get_cur_pos(), )) } else { self.read_expr() } } fn read_comma(&mut self) -> ParseR<AST> { let mut lhs = try!(self.read_assign()); while try!(self.lexer.skip_symbol(Symbol::Comma)) { let rhs = try!(self.read_assign()); lhs = AST::new( ASTKind::BinaryOp(Box::new(lhs), Box::new(rhs), node::CBinOps::Comma), self.lexer.get_cur_pos(), ) } Ok(lhs) } fn read_assign(&mut self) -> ParseR<AST> { let mut lhs = try!(self.read_logor()); if try!(self.lexer.skip_symbol(Symbol::Question)) { return self.read_ternary(lhs); } macro_rules! assign { ($lhs:expr, $rhs:expr, $pos:expr) => ( AST::new( ASTKind::BinaryOp(Box::new($lhs), Box::new($rhs), node::CBinOps::Assign), $pos ) ) } macro_rules! f { ($op:ident) => ( lhs = assign!( lhs.clone(), AST::new( ASTKind::BinaryOp( Box::new(lhs), Box::new(try!(self.read_assign())), node::CBinOps::$op, ), self.lexer.get_cur_pos(), ), self.lexer.get_cur_pos() ); ) } loop { let tok = try!(self.lexer.get()); match tok.kind { TokenKind::Symbol(Symbol::Assign) => { lhs = assign!(lhs, try!(self.read_assign()), self.lexer.get_cur_pos()); } TokenKind::Symbol(Symbol::AssignAdd) => f!(Add), TokenKind::Symbol(Symbol::AssignSub) => f!(Sub), TokenKind::Symbol(Symbol::AssignMul) => f!(Mul), TokenKind::Symbol(Symbol::AssignDiv) => f!(Div), TokenKind::Symbol(Symbol::AssignMod) => f!(Rem), TokenKind::Symbol(Symbol::AssignShl) => f!(Shl), TokenKind::Symbol(Symbol::AssignShr) => f!(Shr), TokenKind::Symbol(Symbol::AssignAnd) => f!(And), TokenKind::Symbol(Symbol::AssignOr) => f!(Or), TokenKind::Symbol(Symbol::AssignXor) => f!(Xor), // TODO: implement more op _ => { self.lexer.unget(tok); break; } } } Ok(lhs) } fn read_ternary(&mut self, cond: AST) -> ParseR<AST> { let mut then_expr = try!(self.read_expr()); expect_symbol_error!(self, Symbol::Colon, "expected ':'"); let mut else_expr = try!(self.read_assign()); let then_ty = try!(self.get_expr_returning_ty(&then_expr)); let else_ty = try!(self.get_expr_returning_ty(&else_expr)); if then_ty.is_arith_ty() && else_ty.is_arith_ty() { let ty = self.usual_binary_ty_cov(then_ty, else_ty); then_expr = self.cast_ast(&then_expr, &ty); else_expr = self.cast_ast(&else_expr, &ty); } Ok(AST::new( ASTKind::TernaryOp(Box::new(cond), Box::new(then_expr), Box::new(else_expr)), self.lexer.get_cur_pos(), )) } fn read_logor(&mut self) -> ParseR<AST> { let mut lhs = try!(self.read_logand()); while try!(self.lexer.skip_symbol(Symbol::LOr)) { let rhs = try!(self.read_logand()); lhs = AST::new( ASTKind::BinaryOp(Box::new(lhs), Box::new(rhs), node::CBinOps::LOr), self.lexer.get_cur_pos(), ); } Ok(lhs) } fn read_logand(&mut self) -> ParseR<AST> { let mut lhs = try!(self.read_or()); while try!(self.lexer.skip_symbol(Symbol::LAnd)) { let rhs = try!(self.read_or()); lhs = AST::new( ASTKind::BinaryOp(Box::new(lhs), Box::new(rhs), node::CBinOps::LAnd), self.lexer.get_cur_pos(), ); } Ok(lhs) } fn read_or(&mut self) -> ParseR<AST> { let mut lhs = try!(self.read_xor()); while try!(self.lexer.skip_symbol(Symbol::Or)) { let rhs = try!(self.read_xor()); lhs = AST::new( ASTKind::BinaryOp(Box::new(lhs), Box::new(rhs), node::CBinOps::Or), self.lexer.get_cur_pos(), ); } Ok(lhs) } fn read_xor(&mut self) -> ParseR<AST> { let mut lhs = try!(self.read_and()); while try!(self.lexer.skip_symbol(Symbol::Xor)) { let rhs = try!(self.read_and()); lhs = AST::new( ASTKind::BinaryOp(Box::new(lhs), Box::new(rhs), node::CBinOps::Xor), self.lexer.get_cur_pos(), ); } Ok(lhs) } fn read_and(&mut self) -> ParseR<AST> { let mut lhs = try!(self.read_eq_ne()); while try!(self.lexer.skip_symbol(Symbol::Ampersand)) { let rhs = try!(self.read_eq_ne()); lhs = AST::new( ASTKind::BinaryOp(Box::new(lhs), Box::new(rhs), node::CBinOps::And), self.lexer.get_cur_pos(), ); } Ok(lhs) } fn read_eq_ne(&mut self) -> ParseR<AST> { let mut lhs = try!(self.read_relation()); loop { if try!(self.lexer.skip_symbol(Symbol::Eq)) { let rhs = try!(self.read_relation()); lhs = AST::new( ASTKind::BinaryOp(Box::new(lhs), Box::new(rhs), node::CBinOps::Eq), self.lexer.get_cur_pos(), ); } else if try!(self.lexer.skip_symbol(Symbol::Ne)) { let rhs = try!(self.read_relation()); lhs = AST::new( ASTKind::BinaryOp(Box::new(lhs), Box::new(rhs), node::CBinOps::Ne), self.lexer.get_cur_pos(), ); } else { break; } } Ok(lhs) } fn read_relation(&mut self) -> ParseR<AST> { let mut lhs = try!(self.read_shl_shr()); loop { if try!(self.lexer.skip_symbol(Symbol::Lt)) { let rhs = try!(self.read_shl_shr()); lhs = AST::new( ASTKind::BinaryOp(Box::new(lhs), Box::new(rhs), node::CBinOps::Lt), self.lexer.get_cur_pos(), ); } else if try!(self.lexer.skip_symbol(Symbol::Le)) { let rhs = try!(self.read_shl_shr()); lhs = AST::new( ASTKind::BinaryOp(Box::new(lhs), Box::new(rhs), node::CBinOps::Le), self.lexer.get_cur_pos(), ); } else if try!(self.lexer.skip_symbol(Symbol::Gt)) { let rhs = try!(self.read_shl_shr()); lhs = AST::new( ASTKind::BinaryOp(Box::new(lhs), Box::new(rhs), node::CBinOps::Gt), self.lexer.get_cur_pos(), ); } else if try!(self.lexer.skip_symbol(Symbol::Ge)) { let rhs = try!(self.read_shl_shr()); lhs = AST::new( ASTKind::BinaryOp(Box::new(lhs), Box::new(rhs), node::CBinOps::Ge), self.lexer.get_cur_pos(), ); } else { break; } } Ok(lhs) } fn read_shl_shr(&mut self) -> ParseR<AST> { let mut lhs = try!(self.read_add_sub()); loop { if try!(self.lexer.skip_symbol(Symbol::Shl)) { let rhs = try!(self.read_add_sub()); lhs = AST::new( ASTKind::BinaryOp(Box::new(lhs), Box::new(rhs), node::CBinOps::Shl), self.lexer.get_cur_pos(), ); } else if try!(self.lexer.skip_symbol(Symbol::Shr)) { let rhs = try!(self.read_add_sub()); lhs = AST::new( ASTKind::BinaryOp(Box::new(lhs), Box::new(rhs), node::CBinOps::Shr), self.lexer.get_cur_pos(), ); } else { break; } } Ok(lhs) } fn read_add_sub(&mut self) -> ParseR<AST> { let mut lhs = try!(self.read_mul_div_rem()); loop { if try!(self.lexer.skip_symbol(Symbol::Add)) { let rhs = try!(self.read_mul_div_rem()); lhs = AST::new( ASTKind::BinaryOp(Box::new(lhs), Box::new(rhs), node::CBinOps::Add), self.lexer.get_cur_pos(), ); } else if try!(self.lexer.skip_symbol(Symbol::Sub)) { let rhs = try!(self.read_mul_div_rem()); lhs = AST::new( ASTKind::BinaryOp(Box::new(lhs), Box::new(rhs), node::CBinOps::Sub), self.lexer.get_cur_pos(), ); } else { break; } } Ok(lhs) } fn read_mul_div_rem(&mut self) -> ParseR<AST> { let mut lhs = try!(self.read_cast()); loop { if try!(self.lexer.skip_symbol(Symbol::Asterisk)) { let rhs = try!(self.read_cast()); lhs = AST::new( ASTKind::BinaryOp(Box::new(lhs), Box::new(rhs), node::CBinOps::Mul), self.lexer.get_cur_pos(), ); } else if try!(self.lexer.skip_symbol(Symbol::Div)) { let rhs = try!(self.read_cast()); lhs = AST::new( ASTKind::BinaryOp(Box::new(lhs), Box::new(rhs), node::CBinOps::Div), self.lexer.get_cur_pos(), ); } else if try!(self.lexer.skip_symbol(Symbol::Mod)) { let rhs = try!(self.read_cast()); lhs = AST::new( ASTKind::BinaryOp(Box::new(lhs), Box::new(rhs), node::CBinOps::Rem), self.lexer.get_cur_pos(), ); } else { break; } } Ok(lhs) } fn read_cast(&mut self) -> ParseR<AST> { let tok = try!(self.lexer.get()); let peek = try!(self.lexer.peek()); if tok.kind == TokenKind::Symbol(Symbol::OpeningParen) && self.is_type(&peek) { let basety = try!(self.read_type_spec()).0; let ty = try!(self.read_declarator(basety)).0; expect_symbol_error!(self, Symbol::ClosingParen, "expected ')'"); return Ok(AST::new( ASTKind::TypeCast(Box::new(try!(self.read_cast())), ty), self.lexer.get_cur_pos(), )); } else { self.lexer.unget(tok); } self.read_unary() } fn read_unary(&mut self) -> ParseR<AST> { let tok = try!(self.lexer.get()); match tok.kind { TokenKind::Symbol(Symbol::Not) => { return Ok(AST::new( ASTKind::UnaryOp(Box::new(try!(self.read_cast())), node::CUnaryOps::LNot), self.lexer.get_cur_pos(), )) } TokenKind::Symbol(Symbol::BitwiseNot) => { return Ok(AST::new( ASTKind::UnaryOp(Box::new(try!(self.read_cast())), node::CUnaryOps::BNot), self.lexer.get_cur_pos(), )) } TokenKind::Symbol(Symbol::Add) => return self.read_cast(), TokenKind::Symbol(Symbol::Sub) => { return Ok(AST::new( ASTKind::UnaryOp(Box::new(try!(self.read_cast())), node::CUnaryOps::Minus), self.lexer.get_cur_pos(), )) } TokenKind::Symbol(Symbol::Inc) => { let pos = self.lexer.get_cur_pos(); let var = try!(self.read_cast()); return Ok(AST::new( ASTKind::BinaryOp( Box::new(var.clone()), Box::new(AST::new( ASTKind::BinaryOp( Box::new(var), Box::new(AST::new(ASTKind::Int(1, Bits::Bits32), pos.clone())), node::CBinOps::Add, ), pos.clone(), )), node::CBinOps::Assign, ), pos, )); } TokenKind::Symbol(Symbol::Dec) => { let pos = self.lexer.get_cur_pos(); let var = try!(self.read_cast()); return Ok(AST::new( ASTKind::BinaryOp( Box::new(var.clone()), Box::new(AST::new( ASTKind::BinaryOp( Box::new(var), Box::new(AST::new(ASTKind::Int(1, Bits::Bits32), pos.clone())), node::CBinOps::Sub, ), pos.clone(), )), node::CBinOps::Assign, ), pos, )); } TokenKind::Symbol(Symbol::Asterisk) => { return Ok(AST::new( ASTKind::UnaryOp(Box::new(try!(self.read_cast())), node::CUnaryOps::Deref), self.lexer.get_cur_pos(), )) } TokenKind::Symbol(Symbol::Ampersand) => { return Ok(AST::new( ASTKind::UnaryOp(Box::new(try!(self.read_cast())), node::CUnaryOps::Addr), self.lexer.get_cur_pos(), )) } TokenKind::Symbol(Symbol::Sizeof) => { // TODO: must fix this sloppy implementation return self.read_sizeof(); } _ => {} } self.lexer.unget(tok); self.read_postfix() } fn read_sizeof(&mut self) -> ParseR<AST> { let tok = try!(self.lexer.get()); let peek = try!(self.lexer.peek()); if matches!(tok.kind, TokenKind::Symbol(Symbol::OpeningParen)) && self.is_type(&peek) { let (basety, _, _) = try!(self.read_type_spec()); let (ty, _, _) = try!(self.read_declarator(basety)); try!(self.lexer.skip_symbol(Symbol::ClosingParen)); return Ok(AST::new( ASTKind::Int(ty.calc_size() as i64, Bits::Bits32), self.lexer.get_cur_pos(), )); } self.lexer.unget(tok); let expr = try!(self.read_unary()); Ok(AST::new( ASTKind::Int(try!(self.calc_sizeof(&expr)) as i64, Bits::Bits32), self.lexer.get_cur_pos(), )) } fn read_postfix(&mut self) -> ParseR<AST> { let mut ast = try!(self.read_primary()); loop { if try!(self.lexer.skip_symbol(Symbol::OpeningParen)) { ast = try!(self.read_func_call(ast)); continue; } if try!(self.lexer.skip_symbol(Symbol::OpeningBoxBracket)) { ast = AST::new( ASTKind::Load(Box::new(try!(self.read_index(ast)))), self.lexer.get_cur_pos(), ); continue; } if try!(self.lexer.skip_symbol(Symbol::Point)) { ast = AST::new( ASTKind::Load(Box::new(try!(self.read_field(ast)))), self.lexer.get_cur_pos(), ); continue; } if try!(self.lexer.skip_symbol(Symbol::Arrow)) { let pos = self.lexer.get_cur_pos(); let field = try!(self.read_field(AST::new( ASTKind::UnaryOp(Box::new(ast), node::CUnaryOps::Deref), pos.clone() ))); ast = AST::new(ASTKind::Load(Box::new(field)), pos); continue; } if try!(self.lexer.skip_symbol(Symbol::Inc)) { return Ok(AST::new( ASTKind::UnaryOp(Box::new(ast), node::CUnaryOps::Inc), self.lexer.get_cur_pos(), )); } if try!(self.lexer.skip_symbol(Symbol::Dec)) { return Ok(AST::new( ASTKind::UnaryOp(Box::new(ast), node::CUnaryOps::Dec), self.lexer.get_cur_pos(), )); } break; } Ok(ast) } fn read_func_call(&mut self, f: AST) -> ParseR<AST> { let pos = self.lexer.get_cur_pos(); let mut args = Vec::new(); if !try!(self.lexer.skip_symbol(Symbol::ClosingParen)) { loop { match self.read_assign() { Ok(arg) => args.push(arg), Err(_) => {} } if try!(self.lexer.skip_symbol(Symbol::ClosingParen)) { break; } if !try!(self.lexer.skip_symbol(Symbol::Comma)) { let peek = self.lexer.peek(); self.show_error_token(&try!(peek), "expected ','"); self.skip_until(Symbol::ClosingParen); return Err(Error::Something); } } } Ok(AST::new(ASTKind::FuncCall(Box::new(f), args), pos)) } fn read_index(&mut self, ast: AST) -> ParseR<AST> { let idx = try!(self.read_expr()); expect_symbol_error!(self, Symbol::ClosingBoxBracket, "expected ']'"); Ok(AST::new( ASTKind::BinaryOp(Box::new(ast), Box::new(idx), node::CBinOps::Add), self.lexer.get_cur_pos(), )) } fn read_field(&mut self, ast: AST) -> ParseR<AST> { let field = try!(self.lexer.get()); if !matches!(field.kind, TokenKind::Identifier(_)) { let peek = self.lexer.peek(); self.show_error_token(&try!(peek), "expected field name"); return Err(Error::Something); } let field_name = ident_val!(field); Ok(AST::new( ASTKind::StructRef(Box::new(ast), field_name), self.lexer.get_cur_pos(), )) } fn read_primary(&mut self) -> ParseR<AST> { let tok = match self.lexer.get() { Ok(tok) => tok, Err(_) => { let peek = self.lexer.peek(); self.show_error_token( &try!(peek), "expected primary(number, string...), but reach EOF", ); return Err(Error::Something); } }; match tok.kind.clone() { TokenKind::IntNumber(n, bits) => { Ok(AST::new(ASTKind::Int(n, bits), self.lexer.get_cur_pos())) } TokenKind::FloatNumber(f) => Ok(AST::new(ASTKind::Float(f), self.lexer.get_cur_pos())), TokenKind::Identifier(ident) => { if let Some(ast) = self.env.get(ident.as_str()) { return match ast.kind { ASTKind::Variable(_, _) => Ok(AST::new( ASTKind::Load(Box::new((*ast).clone())), self.lexer.get_cur_pos(), )), _ => Ok((*ast).clone()), }; } self.show_error_token( &tok, format!("not found the variable or function '{}'", ident).as_str(), ); Err(Error::Something) } TokenKind::String(s) => Ok(AST::new(ASTKind::String(s), self.lexer.get_cur_pos())), TokenKind::Char(ch) => Ok(AST::new(ASTKind::Char(ch as i32), self.lexer.get_cur_pos())), TokenKind::Symbol(sym) => match sym { Symbol::OpeningParen => { let expr = self.read_expr(); if !try!(self.lexer.skip_symbol(Symbol::ClosingParen)) { self.show_error_token(&tok, "expected ')'"); } expr } _ => { self.show_error_token( &tok, format!("expected primary section, but got {:?}", tok.kind).as_str(), ); Err(Error::Something) } }, _ => { self.show_error_token( &tok, format!("read_primary unknown token {:?}", tok.kind).as_str(), ); Err(Error::Something) } } } fn usual_binary_ty_cov(&mut self, lhs: Type, rhs: Type) -> Type { if lhs.priority() < rhs.priority() { rhs } else { lhs } } fn get_binary_expr_ty(&mut self, lhs: &AST, rhs: &AST, op: &node::CBinOps) -> ParseR<Type> { fn cast(ty: Type) -> Type { match ty { Type::Array(elem_ty, _) => Type::Ptr(elem_ty), Type::Func(_, _, _) => Type::Ptr(Box::new(ty)), _ => ty, } } let lhs_ty = cast(try!(self.get_expr_returning_ty(lhs))); let rhs_ty = cast(try!(self.get_expr_returning_ty(rhs))); if matches!(lhs_ty, Type::Ptr(_)) && matches!(rhs_ty, Type::Ptr(_)) { if matches!(op, &node::CBinOps::Sub) { return Ok(Type::Long(Sign::Signed)); } return Ok(Type::Int(Sign::Signed)); } if matches!(lhs_ty, Type::Ptr(_)) { return Ok(lhs_ty); } if matches!(rhs_ty, Type::Ptr(_)) { return Ok(rhs_ty); } return Ok(self.usual_binary_ty_cov(lhs_ty, rhs_ty)); } fn get_expr_returning_ty(&mut self, ast: &AST) -> ParseR<Type> { let size = match ast.kind { ASTKind::Int(_, Bits::Bits32) => Type::Int(Sign::Signed), ASTKind::Int(_, Bits::Bits64) => Type::Long(Sign::Signed), ASTKind::Float(_) => Type::Double, ASTKind::Char(_) => Type::Char(Sign::Signed), ASTKind::String(ref s) => { Type::Array(Box::new(Type::Char(Sign::Signed)), s.len() as i32 + 1) } ASTKind::Load(ref v) => { (*try!(self.get_expr_returning_ty(&*v)).get_elem_ty().unwrap()).clone() } ASTKind::Variable(ref ty, _) => Type::Ptr(Box::new((*ty).clone())), ASTKind::UnaryOp(_, node::CUnaryOps::LNot) => Type::Int(Sign::Signed), ASTKind::UnaryOp(ref expr, node::CUnaryOps::Minus) | ASTKind::UnaryOp(ref expr, node::CUnaryOps::Inc) | ASTKind::UnaryOp(ref expr, node::CUnaryOps::Dec) | ASTKind::UnaryOp(ref expr, node::CUnaryOps::BNot) => { try!(self.get_expr_returning_ty(&*expr)) } ASTKind::UnaryOp(ref expr, node::CUnaryOps::Deref) => (*try!( self.get_expr_returning_ty(&*expr) ).get_elem_ty() .unwrap()) .clone(), ASTKind::UnaryOp(ref expr, node::CUnaryOps::Addr) => { Type::Ptr(Box::new(try!(self.get_expr_returning_ty(&*expr)))) } ASTKind::StructRef(ref expr, ref name) => { let ty = try!(self.get_expr_returning_ty(expr)); Type::Ptr(Box::new((*ty.get_field_ty(name.as_str()).unwrap()).clone())) } ASTKind::TypeCast(_, ref ty) => ty.clone(), ASTKind::BinaryOp(ref lhs, ref rhs, ref op) => { try!(self.get_binary_expr_ty(&*lhs, &*rhs, &*op)) } ASTKind::TernaryOp(_, ref then, _) => try!(self.get_expr_returning_ty(&*then)), ASTKind::FuncCall(ref func, _) => { let func_ty = try!(self.get_expr_returning_ty(func)); (*func_ty.get_return_ty().unwrap()).clone() } _ => panic!(format!("unsupported: {:?}", ast.kind)), }; Ok(size) } fn calc_sizeof(&mut self, ast: &AST) -> ParseR<usize> { let ty = try!(self.get_expr_returning_ty(ast)); Ok(ty.calc_size()) } fn cast_ast(&mut self, expr: &AST, ty: &Type) -> AST { AST::new( ASTKind::TypeCast(Box::new(expr.clone()), ty.clone()), expr.pos.clone(), ) } }<|fim▁end|>
<|file_name|>PostCategory.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>import {ManyToMany} from "../../../src/decorator/relations/ManyToMany"; import {AfterRemove} from "../../../src/decorator/listeners/AfterRemove"; import {BeforeRemove} from "../../../src/decorator/listeners/BeforeRemove"; import {AfterUpdate} from "../../../src/decorator/listeners/AfterUpdate"; import {BeforeUpdate} from "../../../src/decorator/listeners/BeforeUpdate"; import {AfterInsert} from "../../../src/decorator/listeners/AfterInsert"; import {BeforeInsert} from "../../../src/decorator/listeners/BeforeInsert"; @Entity("sample9_post_category") export class PostCategory { @PrimaryGeneratedColumn() id: number; @Column() name: string; @ManyToMany(type => Post, post => post.categories, { cascadeInsert: true, cascadeUpdate: true }) posts: Post[] = []; @BeforeInsert() doSomethingBeforeInsertion() { console.log(`event: PostCategory "${this.name}" will be inserted so soon...`); } @AfterInsert() doSomethingAfterInsertion() { console.log(`event: PostCategory "${this.name}" has been inserted and callback executed`); } @BeforeUpdate() doSomethingBeforeUpdate() { console.log(`event: PostCategory "${this.name}" will be updated so soon...`); } @AfterUpdate() doSomethingAfterUpdate() { console.log(`event: PostCategory "${this.name}" has been updated and callback executed`); } @BeforeRemove() doSomethingBeforeRemove() { console.log(`event: PostCategory "${this.name}" will be removed so soon...`); } @AfterRemove() doSomethingAfterRemove() { console.log(`event: PostCategory "${this.name}" has been removed and callback executed`); } }<|fim▁end|>
import {PrimaryGeneratedColumn, Column, Entity} from "../../../src/index"; import {Post} from "./Post";
<|file_name|>get_test.go<|end_file_name|><|fim▁begin|>package zfs_test import ( "fmt"<|fim▁hole|> "github.com/cerana/cerana/acomm" zfsp "github.com/cerana/cerana/providers/zfs" ) func (s *zfs) TestGet() { tests := []struct { args *zfsp.CommonArgs err string }{ {&zfsp.CommonArgs{Name: ""}, "missing arg: name"}, {&zfsp.CommonArgs{Name: "ds_no_exist"}, enoent}, {&zfsp.CommonArgs{Name: "fs"}, ""}, {&zfsp.CommonArgs{Name: "fs/1snap@snap"}, ""}, {&zfsp.CommonArgs{Name: "vol/1snap"}, ""}, } for _, test := range tests { if test.args.Name != "" { test.args.Name = filepath.Join(s.pool, test.args.Name) } argsS := fmt.Sprintf("%+v", test.args) req, err := acomm.NewRequest(acomm.RequestOptions{ Task: "zfs-get", ResponseHook: s.responseHook, Args: test.args, }) s.Require().NoError(err, argsS) res, streamURL, err := s.zfs.Get(req) s.Empty(streamURL, argsS) if test.err == "" { s.NoError(err, argsS) if !s.NotNil(res, argsS) { continue } result, ok := res.(*zfsp.DatasetResult) if !s.True(ok) { continue } if !s.NotNil(result.Dataset) { continue } s.Equal(test.args.Name, result.Dataset.Name, argsS) } else { s.Nil(res, argsS) s.EqualError(err, test.err, argsS) } } }<|fim▁end|>
"path/filepath"
<|file_name|>middlewares.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # (c) 2018 Alberto Planas <[email protected]> # # This file is part of KManga. # # KManga is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # KManga is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with KManga. If not, see <http://www.gnu.org/licenses/>. import logging import os.path import re import time import urllib.parse import scrapy from spidermonkey import Spidermonkey import django django.setup() from proxy.models import Proxy from proxy.utils import needs_proxy logger = logging.getLogger(__name__) class RetryPartial(object): """Middleware to consider partial results as errors.""" def __init__(self, settings): self.error_codes = { int(x) for x in settings.getlist('SMART_PROXY_ERROR_CODES') } @classmethod def from_crawler(cls, crawler): return cls(crawler.settings) def process_response(self, request, response, spider): logger.debug('Process respose - url: %s, status: %s, ' 'flags: %s' % (request.url, response.status, response.flags)) is_partial = 'partial' in response.flags if is_partial and response.status not in self.error_codes: # Partial results, not considered as errors, are marked as # incorrect. logger.debug('Partial result - url: %s' % request.url) response.status = 500 return response class SmartProxy(object): """Middleware to add a proxy to certain requests.""" def __init__(self, settings): self.error_codes = { int(x) for x in settings.getlist('SMART_PROXY_ERROR_CODES') } self.retry_error_codes = { int(x) for x in settings.getlist('RETRY_HTTP_CODES') } @classmethod def from_crawler(cls, crawler): return cls(crawler.settings) def process_request(self, request, spider): # The proxy only works if the request comes from a spider that # have an operation associated (`catalog`, `collection`, etc) has_operation = hasattr(spider, '_operation') operations = ('catalog', 'collection', 'latest', 'manga') if not has_operation or spider._operation not in operations: return logger.debug('Process request - proxy: %s, url: %s' % ( request.meta['proxy'] if 'proxy' in request.meta else 'no', request.url)) # If the proxy is already set, we are done if 'proxy' in request.meta: return if needs_proxy(spider.name): proxy = Proxy.objects.get_one(spider.name) if proxy: logger.info('Using proxy <%s> for request' % proxy) request.meta['proxy'] = 'http://%s' % proxy.proxy # Disable redirection when a proxy is in use request.meta['dont_redirect'] = True else: logger.error('No proxy found for %s' % spider.name) def process_response(self, request, response, spider): if 'proxy' in request.meta: logger.debug('Process respose - proxy: %s, url: %s, ' 'status: %s, flags: %s' % ( request.meta['proxy'], request.url, response.status, response.flags)) if response.status in self.retry_error_codes: self._delete_proxy_from_request(request, spider) elif response.status in self.error_codes: # Some of the error codes are redirects, we need to # check if this a valid redirect, to maintain the # proxy and enable the redirect. redirect = response.headers.get('Location', None) valid = self._valid_redirect(response.status, request.url, redirect) if valid: logger.debug('Valid redirect - proxy: %s, from: %s, ' 'to: %s, status: %s' % ( request.meta['proxy'], request.url, redirect, response.status)) # If valid, re-enable redirection if 'dont_redirect' in request.meta: del request.meta['dont_redirect'] else: # If the status is one of the error codes that is # not in the retry error code, we need to map as # one of them, like HTTP 500. logger.debug('Invalid redirect - proxy: %s, from: %s, ' 'to: %s, status: %s' % ( request.meta['proxy'], request.url, redirect, response.status)) self._map_status_error(response) self._delete_proxy_from_request(request, spider) return response def process_exception(self, request, exception, spider): if 'proxy' in request.meta: logger.debug('Process exception - proxy: %s, url: %s, ' 'exception: %s' % (request.meta['proxy'], request.url, exception)) self._delete_proxy_from_request(request, spider) def _map_status_error(self, response): """Set status code as 500 and remove the Content-Encoding.""" # Some proxies set the Content-Encoding section for partial # results, or redirects (that do not containt data). This can # cause problems in the httpcompression middleware. response.status = 500 if 'Content-Encoding' in response.headers: del response.headers['Content-Encoding'] def _delete_proxy_from_request(self, request, spider): proxy = request.meta['proxy'].lstrip('htp:/') del request.meta['proxy'] Proxy.objects.discard(proxy, spider.name) logger.warning('Removing failed proxy <%s>, %d proxies left' % ( proxy, Proxy.objects.remainings(spider=spider.name)))<|fim▁hole|> # Check that status code is a redirection if not 300 <= status < 400: return False # Same domain check bn_from = os.path.basename(urllib.parse.urlparse(url_from).path) bn_to = os.path.basename(urllib.parse.urlparse(url_to).path) if bn_from != bn_to: return False # Ends in .html check if not url_to.endswith('.html'): return False return True class VHost(object): """Middleware to replace the host name with the IP.""" def process_request(self, request, spider): """Replace the host name with the IP.""" if hasattr(spider, 'vhost_ip'): for domain in spider.allowed_domains: ip = spider.vhost_ip url = re.sub(r'(www.)?%s' % domain, ip, request.url) # During the second pass, both URL are the same (there # is not replacement) if request.url != url: request = request.replace(url=url, headers={'Host': domain}) return request def process_response(self, request, response, spider): """Replace back the IP with the host name.""" if hasattr(spider, 'vhost_ip'): headers = request.headers.to_unicode_dict() domain = headers.get('Host', spider.allowed_domains[0]) ip = spider.vhost_ip url = re.sub(ip, domain, response.url) response = response.replace(url=url) return response class CloudFlare(object): """Middleware to bypass the CloudFlare protection.""" def process_response(self, request, response, spider): """Resolve the CloudFlare challenge.""" request_response = response if hasattr(spider, 'cloudflare') and spider.cloudflare: if response.status == 503 and response.headers['Server']: logger.debug('CloudFlare challenge detected') request_response = self._cloudflare(request, response, spider) # We resolve it once per request spider.cloudflare = False return request_response def _cloudflare(self, request, response, spider): """Resolve the CloudFlare challenge.""" # Extract the URL from the form xp = '//form/@action' url = response.xpath(xp).extract_first() url = response.urljoin(url) domain = spider.allowed_domains[0] # Extract the parameters from the form xp = '//form/input[@name="jschl_vc"]/@value' jschl_vc = response.xpath(xp).extract_first() xp = '//form/input[@name="pass"]/@value' pass_ = response.xpath(xp).extract_first() if jschl_vc and pass_: # Extract the JavaScript snippets that can be evaluated xp = '//script/text()' init = response.xpath(xp).re_first(r'var s,t,o,p.*') challenge = response.xpath(xp).re_first(r'(.*;)a.value') variable = response.xpath(xp).re_first(r'\s+;(\w+\.\w+).=') result = 'print((%s+%s).toFixed(10))' % (variable, len(domain)) code = (init, challenge) proc = Spidermonkey(early_script_file='-', code=code) stdout, stderr = proc.communicate(result) jschl_answer = stdout.strip() logger.debug('Challenge response: %s', jschl_answer) # Generate the new request formdata = { 'jschl_vc': jschl_vc, 'pass': pass_, 'jschl_answer': jschl_answer, } original_url = request.url request = scrapy.FormRequest.from_response( response, formdata=formdata) request.headers['Referer'] = original_url # XXX TODO - Is there a way to delay this single request? time.sleep(4) return request else: # The challenge changed and the code is outdated logger.error('CloudFlare challenge changed. Please update') return response<|fim▁end|>
def _valid_redirect(self, status, url_from, url_to): """Implement some heuristics to detect valid redirections."""
<|file_name|>pasta.ts<|end_file_name|><|fim▁begin|>import { Recipe, RecipeContainer } from '../class/recipe'; import { Items as i } from '../constants/items'; import { Equipment as e } from '../class/equipment'; import { Categories as c } from '../constants/categories'; import { Units as u } from '../constants/units'; import { Text as text } from '../class/text'; import { Timer } from '../class/timer'; export class MealRecipe extends RecipeContainer { constructor() { super(); this.recipeGroup = c.meal;<|fim▁hole|> ] } } export class LentilPenne extends Recipe { constructor() { super(); this.steps = [ e.pot().add([ i.water(18, u.second), ]), Timer.set(10, 'm', 'Wait for water to boil'), i.Groups.mushroom(4, u.unit).cutIntoStrips(), Timer.end(), e.pot().add([ i.lentilSpaghetti(8, u.ounce), ]), Timer.set(10, 'm', 'let lentil penne cook'), e.pan().add([ i.Groups.mushroom(4, u.unit), ]), e.pan().cook(8, 'm'), e.pan().add([ i.spaghettiSauce(25, u.ounce), ]), e.pan().cook(2, 'm'), Timer.end(), Timer.end(['pan']), Timer.end(['pot']), text.set(['Top with', i.parmesanCheese(8, u.ounce)]), ]; } } export class LentilSpaghetti extends Recipe { constructor() { super(); this.steps = [ e.pot().add([ i.water(25, u.second), ]), Timer.set(15, 'm', 'Wait for water to boil'), i.Groups.mushroom(4, u.unit).cutIntoStrips(), Timer.end(), e.pot().add([ i.lentilSpaghetti(8, u.ounce), ]), Timer.set(18, 'm', 'let lentil spaghetti cook'), e.pan().add([ i.Groups.mushroom(4, u.unit), ]), e.pan().cook(8, 'm'), e.pan().add([ i.spaghettiSauce(25, u.ounce), ]), e.pan().cook(8, 'm'), Timer.end(), Timer.end(), Timer.end(), text.set(['Top with', i.parmesanCheese(8, u.ounce)]), ]; } } export class LentilSpaghettiInstantPot extends Recipe { constructor() { super(); this.steps = [ e.instantPot().add([ i.spaghettiSauce(25, u.ounce), i.water(1, u.cup), i.lentilSpaghetti(8, u.ounce), i.oliveOil(1, u.ounce) ]), text.set(['Stir instant pot and break up pasta']), e.instantPot().pressureCook(15, 13, 'm'), text.set(['Top with', i.parmesanCheese(8, u.ounce)]), ]; } } export class DontUseModernBrandLentilPenne extends Recipe { constructor() { super(); this.steps = [ e.instantPot().add([ i.spaghettiSauce(25, u.ounce), i.water(1, u.cup), i.penneLentil(8, u.ounce), i.oliveOil(1, u.ounce) ]), text.set(['Stir instant pot and break up pasta']), e.instantPot().pressureCook(15, 7, 'm'), Timer.end(), ]; } }<|fim▁end|>
this.recipeName = 'Spaghetti' this.variations = [ LentilPenne, LentilSpaghetti, LentilSpaghettiInstantPot, DontUseModernBrandLentilPenne
<|file_name|>layer_fields.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3<|fim▁hole|># # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # 3. Neither the name of the copyright holder nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # import datetime import sys import time from typing import Any, Union from pyshark.packet.fields import LayerFieldsContainer, LayerField from pyshark.packet.packet import Packet as RawPacket from pktverify.addrs import EthAddr, ExtAddr, Ipv6Addr from pktverify.bytes import Bytes from pktverify.consts import VALID_LAYER_NAMES from pktverify.null_field import nullField def _auto(v: Union[LayerFieldsContainer, LayerField]): """parse the layer field automatically according to its format""" assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 or v.get_default_value() is not None, v.fields dv = v.get_default_value() rv = v.raw_value if dv.startswith('0x'): return int(dv, 16) try: if dv == rv: return int(dv) elif int(dv) == int(rv, 16): return int(dv) except (ValueError, TypeError): pass if rv is None: try: return int(dv) except (ValueError, TypeError): pass if ':' in dv and '::' not in dv and dv.replace(':', '') == rv: # '88:00', '8800' return int(rv, 16) # timestamp: 'Jan 1, 1970 08:00:00.000000000 CST', '0000000000000000' # convert to seconds from 1970, ignore the nanosecond for now since # there are integer seconds applied in the test cases try: time_str = datetime.datetime.strptime(dv, "%b %d, %Y %H:%M:%S.%f000 %Z") time_in_sec = time.mktime(time_str.utctimetuple()) return int(time_in_sec) except (ValueError, TypeError): pass try: int(rv, 16) return int(dv) except Exception: pass raise ValueError((v, v.get_default_value(), v.raw_value)) def _payload(v: Union[LayerFieldsContainer, LayerField]) -> bytearray: """parse the layer field as a bytearray""" assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 hex_value = v.raw_value assert len(hex_value) % 2 == 0 s = bytearray() for i in range(0, len(hex_value), 2): s.append(int(hex_value[i:i + 2], 16)) return s def _hex(v: Union[LayerFieldsContainer, LayerField]) -> int: """parse the layer field as a hex string""" # split v into octets and reverse the order assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 return int(v.get_default_value(), 16) def _raw_hex(v: Union[LayerFieldsContainer, LayerField]) -> int: """parse the layer field as a raw hex string""" # split v into octets and reverse the order assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 iv = v.hex_value try: int(v.get_default_value()) assert int(v.get_default_value()) == iv, (v.get_default_value(), v.raw_value) except ValueError: pass try: int(v.get_default_value(), 16) assert int(v.get_default_value(), 16) == iv, (v.get_default_value(), v.raw_value) except ValueError: pass return iv def _raw_hex_rev(v: Union[LayerFieldsContainer, LayerField]) -> int: """parse the layer field as a reversed raw hex string""" # split v into octets and reverse the order assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 rv = v.raw_value octets = [rv[i:i + 2] for i in range(0, len(rv), 2)] iv = int(''.join(reversed(octets)), 16) try: int(v.get_default_value()) assert int(v.get_default_value()) == iv, (v.get_default_value(), v.raw_value) except ValueError: pass try: int(v.get_default_value(), 16) assert int(v.get_default_value(), 16) == iv, (v.get_default_value(), v.raw_value) except ValueError: pass return iv def _dec(v: Union[LayerFieldsContainer, LayerField]) -> int: """parse the layer field as a decimal""" assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 return int(v.get_default_value()) def _float(v: Union[LayerFieldsContainer, LayerField]) -> float: """parse the layer field as a float""" assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 return float(v.get_default_value()) def _str(v: Union[LayerFieldsContainer, LayerField]) -> str: """parse the layer field as a string""" assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 return str(v.get_default_value()) def _bytes(v: Union[LayerFieldsContainer, LayerField]) -> Bytes: """parse the layer field as raw bytes""" assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 return Bytes(v.raw_value) def _ext_addr(v: Union[LayerFieldsContainer, LayerField]) -> ExtAddr: """parse the layer field as an extended address""" assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 return ExtAddr(v.get_default_value()) def _ipv6_addr(v: Union[LayerFieldsContainer, LayerField]) -> Ipv6Addr: """parse the layer field as an IPv6 address""" assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 return Ipv6Addr(v.get_default_value()) def _eth_addr(v: Union[LayerFieldsContainer, LayerField]) -> EthAddr: """parse the layer field as an Ethernet MAC address""" assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1, v.fields return EthAddr(v.get_default_value()) def _routerid_set(v: Union[LayerFieldsContainer, LayerField]) -> set: """parse the layer field as a set of router ids Notes: the router ID mask in wireshark is a hexadecimal string separated by ':' """ assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 try: ridmask = str(v.get_default_value()) assert isinstance(ridmask, str), ridmask ridmask_int = int(ridmask.replace(':', ''), base=16) rid_set = set() count = 0 while ridmask_int: count += 1 if ridmask_int & 1: rid_set.add(64 - count) ridmask_int = ridmask_int >> 1 except ValueError: pass return rid_set class _first(object): """parse the first layer field""" def __init__(self, sub_parse): self._sub_parse = sub_parse def __call__(self, v: Union[LayerFieldsContainer, LayerField]): return self._sub_parse(v.fields[0]) class _list(object): """parse all layer fields into a list""" def __init__(self, sub_parse): self._sub_parse = sub_parse def __call__(self, v: Union[LayerFieldsContainer, LayerField]): return [self._sub_parse(f) for f in v.fields] _LAYER_FIELDS = { # WPAN 'wpan.fcf': _raw_hex_rev, 'wpan.cmd': _auto, 'wpan.security': _auto, 'wpan.frame_type': _auto, 'wpan.pending': _auto, 'wpan.ack_request': _auto, 'wpan.pan_id_compression': _auto, 'wpan.seqno_suppression': _auto, 'wpan.ie_present': _auto, 'wpan.dst_addr_mode': _auto, 'wpan.version': _auto, 'wpan.src_addr_mode': _auto, 'wpan.dst_pan': _auto, 'wpan.seq_no': _auto, 'wpan.src16': _auto, 'wpan.dst16': _auto, 'wpan.src64': _ext_addr, 'wpan.dst64': _ext_addr, 'wpan.fcs': _raw_hex_rev, 'wpan.fcs_ok': _auto, 'wpan.frame_length': _dec, 'wpan.key_number': _auto, 'wpan.aux_sec.sec_suite': _auto, 'wpan.aux_sec.security_control_field': _auto, 'wpan.aux_sec.sec_level': _auto, 'wpan.aux_sec.key_id_mode': _auto, 'wpan.aux_sec.frame_counter_suppression': _auto, 'wpan.aux_sec.asn_in_nonce': _auto, 'wpan.aux_sec.reserved': _auto, 'wpan.aux_sec.frame_counter': _auto, 'wpan.aux_sec.key_source': _auto, 'wpan.aux_sec.key_index': _auto, 'wpan.aux_sec.hdr': _str, 'wpan.mic': _auto, 'wpan.channel': _auto, 'wpan.header_ie.id': _list(_auto), 'wpan.header_ie.csl.period': _auto, 'wpan.payload_ie.vendor.oui': _auto, # MLE 'mle.cmd': _auto, 'mle.sec_suite': _hex, 'mle.tlv.type': _list(_dec), 'mle.tlv.len': _list(_dec), 'mle.tlv.mode.receiver_on_idle': _auto, 'mle.tlv.mode.reserved1': _auto, 'mle.tlv.mode.reserved2': _auto, 'mle.tlv.mode.device_type_bit': _auto, 'mle.tlv.mode.network_data': _auto, 'mle.tlv.challenge': _bytes, 'mle.tlv.scan_mask.r': _auto, 'mle.tlv.scan_mask.e': _auto, 'mle.tlv.version': _auto, 'mle.tlv.source_addr': _auto, 'mle.tlv.active_tstamp': _auto, 'mle.tlv.pending_tstamp': _auto, 'mle.tlv.leader_data.partition_id': _auto, 'mle.tlv.leader_data.weighting': _auto, 'mle.tlv.leader_data.data_version': _auto, 'mle.tlv.leader_data.stable_data_version': _auto, 'mle.tlv.leader_data.router_id': _auto, 'mle.tlv.route64.nbr_out': _list(_auto), 'mle.tlv.route64.nbr_in': _list(_auto), 'mle.tlv.route64.id_seq': _auto, 'mle.tlv.route64.id_mask': _routerid_set, 'mle.tlv.route64.cost': _list(_auto), 'mle.tlv.response': _bytes, 'mle.tlv.mle_frm_cntr': _auto, 'mle.tlv.ll_frm_cntr': _auto, 'mle.tlv.link_margin': _auto, 'mle.tlv.conn.sed_dgram_cnt': _auto, 'mle.tlv.conn.sed_buf_size': _auto, 'mle.tlv.conn.lq3': _auto, 'mle.tlv.conn.lq2': _auto, 'mle.tlv.conn.lq1': _auto, 'mle.tlv.conn.leader_cost': _auto, 'mle.tlv.conn.id_seq': _auto, 'mle.tlv.conn.flags.pp': _auto, 'mle.tlv.conn.active_rtrs': _auto, 'mle.tlv.timeout': _auto, 'mle.tlv.addr16': _auto, 'mle.tlv.channel': _auto, 'mle.tlv.addr_reg_iid': _list(_auto), 'mle.tlv.link_enh_ack_flags': _auto, 'mle.tlv.link_forward_series': _list(_auto), 'mle.tlv.link_requested_type_id_flags': _list(_hex), 'mle.tlv.link_sub_tlv': _auto, 'mle.tlv.link_status_sub_tlv': _auto, 'mle.tlv.query_id': _auto, 'mle.tlv.metric_type_id_flags.type': _list(_hex), 'mle.tlv.metric_type_id_flags.metric': _list(_hex), 'mle.tlv.metric_type_id_flags.l': _list(_hex), 'mle.tlv.link_requested_type_id_flags': _bytes, # IP 'ip.version': _auto, 'ip.src': _str, 'ip.src_host': _str, 'ip.dst': _str, 'ip.dst_host': _str, 'ip.ttl': _auto, 'ip.proto': _auto, 'ip.len': _auto, 'ip.id': _auto, 'ip.host': _list(_str), 'ip.hdr_len': _dec, 'ip.frag_offset': _auto, 'ip.flags.rb': _auto, 'ip.flags.mf': _auto, 'ip.flags.df': _auto, 'ip.dsfield.ecn': _auto, 'ip.dsfield.dscp': _auto, 'ip.checksum.status': _auto, 'ip.addr': _list(_str), 'ip.options.routeralert': _bytes, 'ip.opt.type.number': _auto, 'ip.opt.type.copy': _auto, 'ip.opt.type.class': _auto, 'ip.opt.ra': _auto, 'ip.opt.len': _auto, # UDP 'udp.stream': _auto, 'udp.srcport': _auto, 'udp.dstport': _auto, 'udp.length': _auto, 'udp.port': _list(_dec), 'udp.checksum.status': _auto, # IPv6 'ipv6.version': _auto, 'ipv6.src': _ipv6_addr, 'ipv6.src_host': _ipv6_addr, 'ipv6.dst': _ipv6_addr, 'ipv6.dst_host': _ipv6_addr, 'ipv6.addr': _list(_ipv6_addr), 'ipv6.tclass.dscp': _auto, 'ipv6.tclass.ecn': _auto, 'ipv6.flow': _auto, 'ipv6.hlim': _auto, 'ipv6.nxt': _auto, 'ipv6.hopopts.len': _auto, 'ipv6.hopopts.nxt': _auto, 'ipv6.hopopts.len_oct': _dec, 'ipv6.host': _list(_ipv6_addr), 'ipv6.plen': _auto, 'ipv6.opt.type.rest': _list(_auto), 'ipv6.opt.type.change': _list(_auto), 'ipv6.opt.type.action': _list(_auto), 'ipv6.opt.router_alert': _auto, 'ipv6.opt.padn': _str, 'ipv6.opt.length': _list(_auto), 'ipv6.opt.mpl.seed_id': _bytes, 'ipv6.opt.mpl.sequence': _auto, 'ipv6.opt.mpl.flag.v': _auto, 'ipv6.opt.mpl.flag.s': _auto, 'ipv6.opt.mpl.flag.rsv': _auto, 'ipv6.opt.mpl.flag.m': _auto, # Eth 'eth.src': _eth_addr, 'eth.src_resolved': _eth_addr, 'eth.dst': _eth_addr, 'eth.dst_resolved': _eth_addr, 'eth.type': _auto, 'eth.addr': _list(_eth_addr), 'eth.addr_resolved': _list(_eth_addr), 'eth.ig': _list(_auto), 'eth.lg': _list(_auto), # 6LOWPAN '6lowpan.src': _ipv6_addr, '6lowpan.dst': _ipv6_addr, '6lowpan.udp.src': _auto, '6lowpan.udp.dst': _auto, '6lowpan.udp.checksum': _auto, '6lowpan.frag.offset': _auto, '6lowpan.frag.tag': _auto, '6lowpan.frag.size': _auto, '6lowpan.pattern': _list(_auto), '6lowpan.hops': _auto, '6lowpan.padding': _auto, '6lowpan.next': _auto, '6lowpan.flow': _auto, '6lowpan.ecn': _auto, '6lowpan.iphc.tf': _auto, '6lowpan.iphc.m': _auto, '6lowpan.iphc.nh': _auto, '6lowpan.iphc.hlim': _auto, '6lowpan.iphc.cid': _auto, '6lowpan.iphc.sac': _auto, '6lowpan.iphc.sam': _auto, '6lowpan.iphc.dac': _auto, '6lowpan.iphc.dam': _auto, '6lowpan.iphc.sci': _auto, '6lowpan.iphc.dci': _auto, '6lowpan.iphc.sctx.prefix': _ipv6_addr, '6lowpan.iphc.dctx.prefix': _ipv6_addr, '6lowpan.mesh.v': _auto, '6lowpan.nhc.pattern': _list(_auto), '6lowpan.nhc.udp.checksum': _auto, '6lowpan.nhc.udp.ports': _auto, '6lowpan.nhc.ext.nh': _auto, '6lowpan.nhc.ext.length': _auto, '6lowpan.nhc.ext.eid': _auto, '6lowpan.reassembled.length': _auto, '6lowpan.fragments': _str, '6lowpan.fragment.count': _auto, '6lowpan.mesh.orig16': _auto, '6lowpan.mesh.hops8': _auto, '6lowpan.mesh.hops': _auto, '6lowpan.mesh.f': _auto, '6lowpan.mesh.dest16': _auto, # ICMPv6 'icmpv6.type': _first(_auto), 'icmpv6.code': _first(_auto), 'icmpv6.checksum': _first(_auto), 'icmpv6.reserved': _raw_hex, 'icmpv6.resptime': _float, 'icmpv6.resp_to': _auto, 'icmpv6.mldr.nb_mcast_records': _auto, 'icmpv6.nd.ra.cur_hop_limit': _auto, 'icmpv6.nd.ns.target_address': _ipv6_addr, 'icmpv6.nd.na.target_address': _ipv6_addr, 'icmpv6.nd.na.flag.s': _auto, 'icmpv6.nd.na.flag.o': _auto, 'icmpv6.nd.na.flag.r': _auto, 'icmpv6.nd.na.flag.rsv': _auto, 'icmpv6.mldr.mar.record_type': _list(_auto), 'icmpv6.mldr.mar.aux_data_len': _list(_auto), 'icmpv6.mldr.mar.nb_sources': _list(_auto), 'icmpv6.mldr.mar.multicast_address': _list(_ipv6_addr), 'icmpv6.opt.type': _list(_auto), 'icmpv6.opt.nonce': _bytes, 'icmpv6.opt.linkaddr': _eth_addr, 'icmpv6.opt.src_linkaddr': _eth_addr, 'icmpv6.opt.target_linkaddr': _eth_addr, 'icmpv6.opt.route_lifetime': _auto, 'icmpv6.opt.route_info.flag.route_preference': _auto, 'icmpv6.opt.route_info.flag.reserved': _auto, 'icmpv6.opt.prefix.valid_lifetime': _auto, 'icmpv6.opt.prefix.preferred_lifetime': _auto, 'icmpv6.opt.prefix.length': _list(_auto), 'icmpv6.opt.prefix.flag.reserved': _auto, 'icmpv6.opt.prefix.flag.r': _auto, 'icmpv6.opt.prefix.flag.l': _auto, 'icmpv6.opt.prefix.flag.a': _auto, 'icmpv6.opt.length': _list(_auto), 'icmpv6.opt.reserved': _str, 'icmpv6.nd.ra.router_lifetime': _auto, 'icmpv6.nd.ra.retrans_timer': _auto, 'icmpv6.nd.ra.reachable_time': _auto, 'icmpv6.nd.ra.flag.rsv': _auto, 'icmpv6.nd.ra.flag.prf': _auto, 'icmpv6.nd.ra.flag.p': _auto, 'icmpv6.nd.ra.flag.o': _auto, 'icmpv6.nd.ra.flag.m': _auto, 'icmpv6.nd.ra.flag.h': _auto, 'icmpv6.echo.sequence_number': _auto, 'icmpv6.echo.identifier': _auto, 'icmpv6.data.len': _auto, # COAP 'coap.code': _auto, 'coap.version': _auto, 'coap.type': _auto, 'coap.mid': _auto, 'coap.token_len': _auto, 'coap.token': _auto, 'coap.opt.uri_path': _list(_str), 'coap.opt.name': _list(_str), 'coap.opt.length': _list(_auto), 'coap.opt.uri_path_recon': _str, 'coap.payload': _payload, 'coap.payload_length': _auto, 'coap.payload_desc': _str, 'coap.opt.end_marker': _auto, 'coap.opt.desc': _list(_str), 'coap.opt.delta': _list(_auto), 'coap.response_to': _auto, 'coap.response_time': _float, # COAP TLVS 'coap.tlv.type': _list(_auto), 'coap.tlv.status': _auto, 'coap.tlv.target_eid': _ipv6_addr, 'coap.tlv.ml_eid': _ext_addr, 'coap.tlv.last_transaction_time': _auto, 'coap.tlv.rloc16': _auto, 'coap.tlv.net_name': _str, 'coap.tlv.ext_mac_addr': _ext_addr, 'coap.tlv.router_mask_assigned': _auto, 'coap.tlv.router_mask_id_seq': _auto, # dtls 'dtls.handshake.type': _list(_auto), 'dtls.handshake.cookie': _auto, 'dtls.record.content_type': _list(_auto), 'dtls.alert_message.desc': _auto, # thread beacon 'thread_bcn.protocol': _auto, 'thread_bcn.version': _auto, 'thread_bcn.network_name': _str, 'thread_bcn.epid': _ext_addr, # thread_address 'thread_address.tlv.len': _list(_auto), 'thread_address.tlv.type': _list(_auto), 'thread_address.tlv.status': _auto, 'thread_address.tlv.target_eid': _ipv6_addr, 'thread_address.tlv.ext_mac_addr': _ext_addr, 'thread_address.tlv.router_mask_id_seq': _auto, 'thread_address.tlv.router_mask_assigned': _bytes, 'thread_address.tlv.rloc16': _hex, 'thread_address.tlv.target_eid': _ipv6_addr, 'thread_address.tlv.ml_eid': _ext_addr, # thread bl 'thread_bl.tlv.type': _list(_auto), 'thread_bl.tlv.len': _list(_auto), 'thread_bl.tlv.target_eid': _ipv6_addr, 'thread_bl.tlv.ml_eid': _ext_addr, 'thread_bl.tlv.last_transaction_time': _auto, 'thread_bl.tlv.timeout': _auto, # THEAD NM 'thread_nm.tlv.type': _list(_auto), 'thread_nm.tlv.ml_eid': _ext_addr, 'thread_nm.tlv.target_eid': _ipv6_addr, 'thread_nm.tlv.status': _auto, 'thread_nm.tlv.timeout': _auto, # thread_meshcop is not a real layer 'thread_meshcop.len_size_mismatch': _str, 'thread_meshcop.tlv.type': _list(_auto), 'thread_meshcop.tlv.len8': _list(_auto), 'thread_meshcop.tlv.net_name': _list(_str), # from thread_bl 'thread_meshcop.tlv.commissioner_id': _str, 'thread_meshcop.tlv.commissioner_sess_id': _auto, # from mle "thread_meshcop.tlv.channel_page": _auto, # from ble "thread_meshcop.tlv.channel": _list(_auto), # from ble "thread_meshcop.tlv.chan_mask": _str, # from ble 'thread_meshcop.tlv.chan_mask_page': _auto, 'thread_meshcop.tlv.chan_mask_len': _auto, 'thread_meshcop.tlv.chan_mask_mask': _bytes, 'thread_meshcop.tlv.discovery_req_ver': _auto, 'thread_meshcop.tlv.discovery_rsp_ver': _auto, 'thread_meshcop.tlv.discovery_rsp_n': _auto, 'thread_meshcop.tlv.energy_list': _list(_auto), 'thread_meshcop.tlv.pan_id': _list(_auto), 'thread_meshcop.tlv.xpan_id': _bytes, 'thread_meshcop.tlv.ml_prefix': _bytes, 'thread_meshcop.tlv.master_key': _bytes, 'thread_meshcop.tlv.pskc': _bytes, 'thread_meshcop.tlv.sec_policy_rot': _auto, 'thread_meshcop.tlv.sec_policy_o': _auto, 'thread_meshcop.tlv.sec_policy_n': _auto, 'thread_meshcop.tlv.sec_policy_r': _auto, 'thread_meshcop.tlv.sec_policy_c': _auto, 'thread_meshcop.tlv.sec_policy_b': _auto, 'thread_meshcop.tlv.state': _auto, 'thread_meshcop.tlv.steering_data': _bytes, 'thread_meshcop.tlv.unknown': _bytes, 'thread_meshcop.tlv.udp_port': _list(_auto), 'thread_meshcop.tlv.ba_locator': _auto, 'thread_meshcop.tlv.jr_locator': _auto, 'thread_meshcop.tlv.active_tstamp': _auto, 'thread_meshcop.tlv.pending_tstamp': _auto, 'thread_meshcop.tlv.delay_timer': _auto, 'thread_meshcop.tlv.ipv6_addr': _list(_ipv6_addr), # THREAD NWD 'thread_nwd.tlv.type': _list(_auto), 'thread_nwd.tlv.len': _list(_auto), 'thread_nwd.tlv.stable': _list(_auto), 'thread_nwd.tlv.service.t': _auto, 'thread_nwd.tlv.service.s_id': _auto, 'thread_nwd.tlv.service.s_data_len': _auto, 'thread_nwd.tlv.service.s_data.seqno': _auto, 'thread_nwd.tlv.service.s_data.rrdelay': _auto, 'thread_nwd.tlv.service.s_data.mlrtimeout': _auto, 'thread_nwd.tlv.server_16': _list(_auto), 'thread_nwd.tlv.border_router_16': _list(_auto), 'thread_nwd.tlv.sub_tlvs': _list(_str), # TODO: support thread_nwd.tlv.prefix.length and thread_nwd.tlv.prefix.domain_id 'thread_nwd.tlv.prefix': _list(_ipv6_addr), 'thread_nwd.tlv.border_router.pref': _auto, 'thread_nwd.tlv.border_router.flag.s': _list(_auto), 'thread_nwd.tlv.border_router.flag.r': _list(_auto), 'thread_nwd.tlv.border_router.flag.p': _list(_auto), 'thread_nwd.tlv.border_router.flag.o': _list(_auto), 'thread_nwd.tlv.border_router.flag.n': _list(_auto), 'thread_nwd.tlv.border_router.flag.dp': _list(_auto), 'thread_nwd.tlv.border_router.flag.d': _list(_auto), 'thread_nwd.tlv.border_router.flag.c': _list(_auto), 'thread_nwd.tlv.6co.flag.reserved': _auto, 'thread_nwd.tlv.6co.flag.cid': _auto, 'thread_nwd.tlv.6co.flag.c': _list(_auto), 'thread_nwd.tlv.6co.context_length': _auto, # Thread Diagnostic 'thread_diagnostic.tlv.type': _list(_auto), 'thread_diagnostic.tlv.len8': _list(_auto), 'thread_diagnostic.tlv.general': _list(_str), # DNS 'dns.resp.ttl': _auto, 'dns.flags.response': _auto, } _layer_containers = set() for key in _LAYER_FIELDS: assert key.strip() == key and ' ' not in key, key secs = key.split('.') assert len(secs) >= 2 assert secs[0] in VALID_LAYER_NAMES, secs[0] for i in range(len(secs) - 2): path = secs[0] + '.' + '.'.join(secs[1:i + 2]) assert path not in _LAYER_FIELDS, '%s can not be both field and path' % path _layer_containers.add(path) def is_layer_field(uri: str) -> bool: """ Returns if the URI is a valid layer field. :param uri: The layer field URI. """ return uri in _LAYER_FIELDS def is_layer_field_container(uri: str) -> bool: """ Returns if the URI is a valid layer field container. :param uri: The layer field container URI. """ return uri in _layer_containers def get_layer_field(packet: RawPacket, field_uri: str) -> Any: """ Get a given layer field from the packet. :param packet: The packet. :param field_uri: The layer field URI. :return: The specified layer field. """ assert isinstance(packet, RawPacket) secs = field_uri.split('.') layer_depth = 0 layer_name = secs[0] if layer_name.endswith('inner'): layer_name = layer_name[:-len('inner')] field_uri = '.'.join([layer_name] + secs[1:]) layer_depth = 1 if is_layer_field(field_uri): candidate_layers = _get_candidate_layers(packet, layer_name) for layers in candidate_layers: if layer_depth >= len(layers): continue layer = layers[layer_depth] v = layer.get_field(field_uri) if v is not None: try: v = _LAYER_FIELDS[field_uri](v) print("[%s = %r] " % (field_uri, v), file=sys.stderr) return v except Exception as ex: raise ValueError('can not parse field %s = %r' % (field_uri, (v.get_default_value(), v.raw_value))) from ex print("[%s = %s] " % (field_uri, "null"), file=sys.stderr) return nullField elif is_layer_field_container(field_uri): from pktverify.layer_fields_container import LayerFieldsContainer return LayerFieldsContainer(packet, field_uri) else: raise NotImplementedError('Field %s is not valid, please add it to `_LAYER_FIELDS`' % field_uri) def check_layer_field_exists(packet, field_uri): """ Check if a given layer field URI exists in the packet. :param packet: The packet to check. :param field_uri: The layer field URI. :return: Whether the layer field URI exists in the packet. """ assert isinstance(packet, RawPacket) secs = field_uri.split('.') layer_name = secs[0] if not is_layer_field(field_uri) and not is_layer_field_container(field_uri): raise NotImplementedError('%s is neither a field or field container' % field_uri) candidate_layers = _get_candidate_layers(packet, layer_name) for layers in candidate_layers: for layer in layers: for k, v in layer._all_fields.items(): if k == field_uri or k.startswith(field_uri + '.'): return True return False def _get_candidate_layers(packet, layer_name): if layer_name == 'thread_meshcop': candidate_layer_names = ['thread_meshcop', 'mle', 'coap', 'thread_bl', 'thread_nm'] elif layer_name == 'thread_nwd': candidate_layer_names = ['mle', 'thread_address', 'thread_diagnostic'] elif layer_name == 'wpan': candidate_layer_names = ['wpan', 'mle'] elif layer_name == 'ip': candidate_layer_names = ['ip', 'ipv6'] elif layer_name == 'thread_bcn': candidate_layer_names = ['thread_bcn'] else: candidate_layer_names = [layer_name] layers = [] for ln in candidate_layer_names: if hasattr(packet, ln): layers.append(packet.get_multiple_layers(ln)) return layers<|fim▁end|>
# # Copyright (c) 2019, The OpenThread Authors. # All rights reserved.
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin from .models import Post, Publisher class PostInline(admin.StackedInline): model = Post <|fim▁hole|> inlines = [PostInline,] admin.site.register(Publisher, PublisherAdmin) admin.site.register(Post)<|fim▁end|>
class PublisherAdmin(admin.ModelAdmin):
<|file_name|>FavoritesSharedPrefsUtilsImpl.java<|end_file_name|><|fim▁begin|>package org.septa.android.app.services.apiinterfaces; import android.content.Context; import android.content.SharedPreferences; import android.util.Log; import com.google.gson.Gson; import com.google.gson.JsonSyntaxException; import com.google.gson.reflect.TypeToken; import org.septa.android.app.favorites.FavoriteState; import org.septa.android.app.services.apiinterfaces.model.Favorite; import org.septa.android.app.services.apiinterfaces.model.NextArrivalFavorite; import org.septa.android.app.services.apiinterfaces.model.TransitViewFavorite; import org.septa.android.app.transitview.TransitViewUtils; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; public class FavoritesSharedPrefsUtilsImpl implements FavoritesSharedPrefsUtils { public static final String TAG = FavoritesSharedPrefsUtilsImpl.class.getSimpleName(); private static final String KEY_FAVORITES_NTA = "favorite_json"; private static final String KEY_FAVORITES_TRANSITVIEW = "favorite_transitview_json"; private static final String KEY_FAVORITES_STATE = "favorite_state_json"; // using commit() instead of apply() so that the values are immediately written to memory /** * fixing some corrupt favorites * * @param context * @return list of valid favorites */ @Override public Map<String, NextArrivalFavorite> getNTAFavorites(Context context) { SharedPreferences sharedPreferences = getSharedPreferences(context); Map<String, NextArrivalFavorite> favorites = getNTAFavorites(sharedPreferences); for (Map.Entry<String, NextArrivalFavorite> entry : favorites.entrySet()) { // delete any invalid NTA favorites if (entry.getValue().getStart() == null) { deleteAllFavorites(context); return new HashMap<>(); } } return favorites; } /** * fixing some corrupt favorites * * @param context * @return list of valid favorites */ @Override public Map<String, TransitViewFavorite> getTransitViewFavorites(Context context) { SharedPreferences sharedPreferences = getSharedPreferences(context); Map<String, TransitViewFavorite> favorites = getTransitViewFavorites(sharedPreferences); for (Map.Entry<String, TransitViewFavorite> entry : favorites.entrySet()) { // TODO: delete / fix any invalid TransitView favorites if (entry.getValue().getSecondRoute() == null && entry.getValue().getThirdRoute() != null) { deleteAllFavorites(context); return new HashMap<>(); } } return favorites; } @Override public List<FavoriteState> getFavoriteStates(Context context) { SharedPreferences sharedPreferences = getSharedPreferences(context); String preferencesJson = sharedPreferences.getString(KEY_FAVORITES_STATE, null); if (preferencesJson == null) { return new ArrayList<>(); } Gson gson = new Gson(); try { return gson.fromJson(preferencesJson, new TypeToken<List<FavoriteState>>() { }.getType()); } catch (JsonSyntaxException e) { Log.e(TAG, e.toString()); sharedPreferences.edit().remove(KEY_FAVORITES_STATE).commit(); return new ArrayList<>(); } } @Override public void addFavorites(Context context, Favorite favorite) { SharedPreferences sharedPreferences = getSharedPreferences(context); if (favorite instanceof NextArrivalFavorite) { Map<String, NextArrivalFavorite> favorites = getNTAFavorites(sharedPreferences); favorites.put(favorite.getKey(), (NextArrivalFavorite) favorite); addFavoriteState(context, new FavoriteState(favorite.getKey())); storeNTAFavorites(sharedPreferences, favorites); } else if (favorite instanceof TransitViewFavorite) { Map<String, TransitViewFavorite> favorites = getTransitViewFavorites(sharedPreferences); favorites.put(favorite.getKey(), (TransitViewFavorite) favorite); addFavoriteState(context, new FavoriteState(favorite.getKey())); storeTransitViewFavorites(sharedPreferences, favorites); } else { Log.e(TAG, "Invalid class type -- could not create a new Favorite for " + favorite.getKey()); } } @Override public void addFavoriteState(Context context, FavoriteState favoriteState) { SharedPreferences sharedPreferences = getSharedPreferences(context); List<FavoriteState> favoritesState = getFavoriteStates(context); if (!favoritesState.contains(favoriteState)) { favoritesState.add(favoriteState); storeFavoritesState(sharedPreferences, favoritesState); } else { Log.d(TAG, "Already have a favorite state for " + favoriteState.getFavoriteKey()); } } @Override public void setFavoriteStates(Context context, List<FavoriteState> favoriteStateList) { SharedPreferences sharedPreferences = getSharedPreferences(context); storeFavoritesState(sharedPreferences, favoriteStateList); } @Override public void modifyFavoriteState(Context context, int index, boolean expanded) { SharedPreferences sharedPreferences = getSharedPreferences(context); List<FavoriteState> favoriteStateList = getFavoriteStates(context); favoriteStateList.get(index).setExpanded(expanded); storeFavoritesState(sharedPreferences, favoriteStateList); } @Override public void renameFavorite(Context context, Favorite favorite) { SharedPreferences sharedPreferences = getSharedPreferences(context); if (favorite instanceof NextArrivalFavorite) { Map<String, NextArrivalFavorite> favorites = getNTAFavorites(sharedPreferences); if (favorites.containsKey(favorite.getKey())) { favorites.put(favorite.getKey(), (NextArrivalFavorite) favorite); storeNTAFavorites(sharedPreferences, favorites); } else { Log.d(TAG, "NTA Favorite could not be renamed because it did not exist!"); addFavorites(context, favorite); } } else if (favorite instanceof TransitViewFavorite) { Map<String, TransitViewFavorite> favorites = getTransitViewFavorites(sharedPreferences); if (favorites.containsKey(favorite.getKey())) { favorites.put(favorite.getKey(), (TransitViewFavorite) favorite); storeTransitViewFavorites(sharedPreferences, favorites); } else { Log.d(TAG, "TransitView Favorite could not be renamed because it did not exist!"); addFavorites(context, favorite); } } else { Log.e(TAG, "Invalid class type -- could not rename Favorite " + favorite.getKey()); } } @Override public void deleteFavorite(Context context, String favoriteKey) { SharedPreferences sharedPreferences = getSharedPreferences(context); // attempt to delete NTA favorite Map<String, NextArrivalFavorite> ntaFavorites = getNTAFavorites(sharedPreferences); if (ntaFavorites.remove(favoriteKey) != null) { storeNTAFavorites(sharedPreferences, ntaFavorites); } else { // attempt to delete TransitView favorite Map<String, TransitViewFavorite> transitViewFavorites = getTransitViewFavorites(sharedPreferences); if (transitViewFavorites.remove(favoriteKey) != null) { storeTransitViewFavorites(sharedPreferences, transitViewFavorites); } else { Log.e(TAG, "Could not delete Favorite with key " + favoriteKey); } } deleteFavoriteState(context, favoriteKey); } private void deleteFavoriteState(Context context, String favoriteKey) { SharedPreferences sharedPreferences = getSharedPreferences(context); List<FavoriteState> favoriteStates = getFavoriteStates(context); int indexToRemove = -1; for (int i = 0; i < favoriteStates.size(); i++) { if (favoriteKey.equals(favoriteStates.get(i).getFavoriteKey())) { indexToRemove = i; break; } } if (indexToRemove != -1) { favoriteStates.remove(indexToRemove); } else { Log.e(TAG, "Could not delete favorite state with key " + favoriteKey); } storeFavoritesState(sharedPreferences, favoriteStates); } @Override public void deleteAllFavorites(Context context) { SharedPreferences sharedPreferences = getSharedPreferences(context); sharedPreferences.edit().remove(KEY_FAVORITES_NTA).commit(); sharedPreferences.edit().remove(KEY_FAVORITES_TRANSITVIEW).commit(); deleteAllFavoriteStates(context); } @Override public void deleteAllFavoriteStates(Context context) { SharedPreferences sharedPreferences = getSharedPreferences(context); sharedPreferences.edit().remove(KEY_FAVORITES_STATE).commit(); } @Override public Favorite getFavoriteByKey(Context context, String key) { SharedPreferences sharedPreferences = getSharedPreferences(context); Favorite favorite = getNTAFavorites(sharedPreferences).get(key); if (favorite == null) { favorite = getTransitViewFavorites(sharedPreferences).get(key); } return favorite; } @Override public void moveFavoriteStateToIndex(Context context, int fromPosition, int toPosition) { SharedPreferences sharedPreferences = getSharedPreferences(context); List<FavoriteState> favoriteStateList = getFavoriteStates(context); FavoriteState favoriteStateToMove = favoriteStateList.get(fromPosition); // remove favorite state favoriteStateList.remove(fromPosition); // re-add at index which shifts everything else back one favoriteStateList.add(toPosition, favoriteStateToMove); storeFavoritesState(sharedPreferences, favoriteStateList); } @Override public void resyncFavoritesMap(Context context) { SharedPreferences sharedPreferences = getSharedPreferences(context); List<FavoriteState> favoriteStateList = getFavoriteStates(context); Map<String, NextArrivalFavorite> ntaFavorites = getNTAFavorites(context); Map<String, TransitViewFavorite> transitViewFavorites = getTransitViewFavorites(context); if (favoriteStateList.isEmpty() && (!ntaFavorites.isEmpty() || !transitViewFavorites.isEmpty())) { // initialize favorite state list Log.d(TAG, "Initializing favorite states now..."); for (NextArrivalFavorite entry : ntaFavorites.values()) { FavoriteState favoriteState = new FavoriteState(entry.getKey()); favoriteStateList.add(favoriteState); } for (TransitViewFavorite entry : transitViewFavorites.values()) { FavoriteState favoriteState = new FavoriteState(entry.getKey()); favoriteStateList.add(favoriteState); } setFavoriteStates(context, favoriteStateList); } else if (favoriteStateList.size() != (ntaFavorites.size() + transitViewFavorites.size())) { // resync because state list does not map 1-to-1 Log.d(TAG, "Resyncing favorite states now..."); deleteAllFavorites(context); Map<String, NextArrivalFavorite> newNTAFavorites = new HashMap<>(); Map<String, TransitViewFavorite> newTransitViewFavorites = new HashMap<>(); for (FavoriteState favoriteState : favoriteStateList) { String favoriteKey = favoriteState.getFavoriteKey(); // copy over favorite if (TransitViewUtils.isATransitViewFavorite(favoriteKey)) { newTransitViewFavorites.put(favoriteKey, transitViewFavorites.get(favoriteKey)); } else { newNTAFavorites.put(favoriteKey, ntaFavorites.get(favoriteKey)); } // create new favorite state for it if (getFavoriteByKey(context, favoriteKey) == null) { addFavoriteState(context, new FavoriteState(favoriteKey)); } else { Log.d(TAG, "Favorite state already exists for favorite with key: " + favoriteKey); } } storeNTAFavorites(sharedPreferences, newNTAFavorites); storeTransitViewFavorites(sharedPreferences, newTransitViewFavorites); } else {<|fim▁hole|> Log.d(TAG, "Resync of favorites map did not occur. State list size: " + favoriteStateList.size() + " NTA Map size: " + ntaFavorites.size() + " TransitView map size: " + transitViewFavorites.size()); } } private SharedPreferences getSharedPreferences(Context context) { return context.getSharedPreferences("PREFERENCE_NAME", Context.MODE_PRIVATE); } private Map<String, NextArrivalFavorite> getNTAFavorites(SharedPreferences sharedPreferences) { String preferencesJson = sharedPreferences.getString(KEY_FAVORITES_NTA, null); if (preferencesJson == null) { return new HashMap<>(); } Gson gson = new Gson(); try { return gson.fromJson(preferencesJson, new TypeToken<Map<String, NextArrivalFavorite>>() { }.getType()); } catch (JsonSyntaxException e) { Log.e(TAG, e.toString()); sharedPreferences.edit().remove(KEY_FAVORITES_NTA).commit(); return new HashMap<>(); } } private Map<String, TransitViewFavorite> getTransitViewFavorites(SharedPreferences sharedPreferences) { String preferencesJson = sharedPreferences.getString(KEY_FAVORITES_TRANSITVIEW, null); if (preferencesJson == null) { return new HashMap<>(); } Gson gson = new Gson(); try { return gson.fromJson(preferencesJson, new TypeToken<Map<String, TransitViewFavorite>>() { }.getType()); } catch (JsonSyntaxException e) { Log.e(TAG, e.toString()); sharedPreferences.edit().remove(KEY_FAVORITES_TRANSITVIEW).commit(); return new HashMap<>(); } } private void storeNTAFavorites(SharedPreferences sharedPreferences, Map<String, NextArrivalFavorite> favorites) { Gson gson = new Gson(); String favoritesJson = gson.toJson(favorites); sharedPreferences.edit().putString(KEY_FAVORITES_NTA, favoritesJson).commit(); } private void storeTransitViewFavorites(SharedPreferences sharedPreferences, Map<String, TransitViewFavorite> favorites) { Gson gson = new Gson(); String favoritesJson = gson.toJson(favorites); sharedPreferences.edit().putString(KEY_FAVORITES_TRANSITVIEW, favoritesJson).commit(); } private void storeFavoritesState(SharedPreferences sharedPreferences, List<FavoriteState> favoriteStateList) { Gson gson = new Gson(); String favoritesStatesJson = gson.toJson(favoriteStateList); sharedPreferences.edit().putString(KEY_FAVORITES_STATE, favoritesStatesJson).commit(); } }<|fim▁end|>
<|file_name|>minit.py<|end_file_name|><|fim▁begin|># Copyright 2017 The Meson development team # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Code that creates simple startup projects.""" from pathlib import Path from enum import Enum import subprocess import shutil import sys import os import re from glob import glob from mesonbuild import mesonlib from mesonbuild.environment import detect_ninja from mesonbuild.templates.samplefactory import sameple_generator import typing as T if T.TYPE_CHECKING: import argparse ''' we currently have one meson template at this time. ''' from mesonbuild.templates.mesontemplates import create_meson_build FORTRAN_SUFFIXES = {'.f', '.for', '.F', '.f90', '.F90'} LANG_SUFFIXES = {'.c', '.cc', '.cpp', '.cs', '.cu', '.d', '.m', '.mm', '.rs', '.java', '.vala'} | FORTRAN_SUFFIXES LANG_SUPPORTED = {'c', 'cpp', 'cs', 'cuda', 'd', 'fortran', 'java', 'rust', 'objc', 'objcpp', 'vala'} DEFAULT_PROJECT = 'executable' DEFAULT_VERSION = '0.1' class DEFAULT_TYPES(Enum): EXE = 'executable' LIB = 'library' INFO_MESSAGE = '''Sample project created. To build it run the following commands: meson setup builddir meson compile -C builddir ''' def create_sample(options: 'argparse.Namespace') -> None: ''' Based on what arguments are passed we check for a match in language then check for project type and create new Meson samples project. ''' sample_gen = sameple_generator(options) if options.type == DEFAULT_TYPES['EXE'].value: sample_gen.create_executable() elif options.type == DEFAULT_TYPES['LIB'].value: sample_gen.create_library() else: raise RuntimeError('Unreachable code') print(INFO_MESSAGE) def autodetect_options(options: 'argparse.Namespace', sample: bool = False) -> None: ''' Here we autodetect options for args not passed in so don't have to think about it. ''' if not options.name: options.name = Path().resolve().stem if not re.match('[a-zA-Z_][a-zA-Z0-9]*', options.name) and sample: raise SystemExit(f'Name of current directory "{options.name}" is not usable as a sample project name.\n' 'Specify a project name with --name.') print(f'Using "{options.name}" (name of current directory) as project name.') if not options.executable: options.executable = options.name print(f'Using "{options.executable}" (project name) as name of executable to build.') if sample: # The rest of the autodetection is not applicable to generating sample projects. return if not options.srcfiles: srcfiles = [] for f in (f for f in Path().iterdir() if f.is_file()): if f.suffix in LANG_SUFFIXES: srcfiles.append(f) if not srcfiles: raise SystemExit('No recognizable source files found.\n' 'Run meson init in an empty directory to create a sample project.') options.srcfiles = srcfiles print("Detected source files: " + ' '.join(map(str, srcfiles))) options.srcfiles = [Path(f) for f in options.srcfiles] if not options.language: for f in options.srcfiles: if f.suffix == '.c': options.language = 'c' break if f.suffix in ('.cc', '.cpp'): options.language = 'cpp' break if f.suffix == '.cs': options.language = 'cs' break if f.suffix == '.cu': options.language = 'cuda' break if f.suffix == '.d': options.language = 'd' break if f.suffix in FORTRAN_SUFFIXES: options.language = 'fortran' break if f.suffix == '.rs': options.language = 'rust' break if f.suffix == '.m': options.language = 'objc' break if f.suffix == '.mm': options.language = 'objcpp' break if f.suffix == '.java': options.language = 'java' break if f.suffix == '.vala': options.language = 'vala' break if not options.language: raise SystemExit("Can't autodetect language, please specify it with -l.") print("Detected language: " + options.language) def add_arguments(parser: 'argparse.ArgumentParser') -> None: ''' Here we add args for that the user can passed when making a new Meson project. ''' parser.add_argument("srcfiles", metavar="sourcefile", nargs="*", help="source files. default: all recognized files in current directory") parser.add_argument('-C', dest='wd', action=mesonlib.RealPathAction, help='directory to cd into before running') parser.add_argument("-n", "--name", help="project name. default: name of current directory") parser.add_argument("-e", "--executable", help="executable name. default: project name") parser.add_argument("-d", "--deps", help="dependencies, comma-separated") parser.add_argument("-l", "--language", choices=sorted(LANG_SUPPORTED), help="project language. default: autodetected based on source files") parser.add_argument("-b", "--build", action='store_true', help="build after generation") parser.add_argument("--builddir", default='build', help="directory for build") parser.add_argument("-f", "--force", action="store_true", help="force overwrite of existing files and directories.") parser.add_argument('--type', default=DEFAULT_PROJECT, choices=('executable', 'library'), help=f"project type. default: {DEFAULT_PROJECT} based project") parser.add_argument('--version', default=DEFAULT_VERSION, help=f"project version. default: {DEFAULT_VERSION}") def run(options: 'argparse.Namespace') -> int: ''' Here we generate the new Meson sample project. ''' if not Path(options.wd).exists(): sys.exit('Project source root directory not found. Run this command in source directory root.') os.chdir(options.wd) if not glob('*'): autodetect_options(options, sample=True) if not options.language: print('Defaulting to generating a C language project.') options.language = 'c' create_sample(options) else: autodetect_options(options) if Path('meson.build').is_file() and not options.force: raise SystemExit('meson.build already exists. Use --force to overwrite.') create_meson_build(options) if options.build: if Path(options.builddir).is_dir() and options.force: print('Build directory already exists, deleting it.') shutil.rmtree(options.builddir) print('Building...') cmd = mesonlib.get_meson_command() + [options.builddir] ret = subprocess.run(cmd) if ret.returncode: raise SystemExit cmd = detect_ninja() + ['-C', options.builddir] ret = subprocess.run(cmd)<|fim▁hole|> if ret.returncode: raise SystemExit return 0<|fim▁end|>
<|file_name|>singleOrDefault.ts<|end_file_name|><|fim▁begin|>export default function singleOrDefault<TSource>(this: Iterable<TSource>): TSource | null; export default function singleOrDefault<TSource>(this: Iterable<TSource>, predicate: (element: TSource) => boolean): TSource | null; export default function singleOrDefault<TSource>(this: Iterable<TSource>, predicate: ((element: TSource) => boolean) | null, defaultValue: TSource): TSource; export default function singleOrDefault<TSource>(this: Iterable<TSource>, predicate?: ((element: TSource) => boolean) | null, defaultValue: TSource | null = null): TSource | null { if (predicate) { let value: TSource | undefined; let hasValue = false; for (const element of this) { if (predicate(element)) { if (hasValue) { return defaultValue; } value = element; hasValue = true; } } if (hasValue) { return value as TSource; } } else { if (Array.isArray(this)) { switch ((this as any).length) { case 1: return (this as any)[0]; default: return defaultValue; } } else { let value: TSource | undefined; let hasValue = false; for (const element of this) { if (hasValue) { return defaultValue; } value = element;<|fim▁hole|> if (hasValue) { return value as TSource; } } } return defaultValue; }<|fim▁end|>
hasValue = true; }
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # envoy documentation build configuration file, created by # sphinx-quickstart on Sat May 28 10:51:27 2016. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. from datetime import datetime import os from sphinx.directives.code import CodeBlock import sphinx_rtd_theme import sys # https://stackoverflow.com/questions/44761197/how-to-use-substitution-definitions-with-code-blocks class SubstitutionCodeBlock(CodeBlock): """ Similar to CodeBlock but replaces placeholders with variables. See "substitutions" below. """ def run(self): """ Replace placeholders with given variables. """ app = self.state.document.settings.env.app new_content = [] existing_content = self.content for item in existing_content: for pair in app.config.substitutions: original, replacement = pair item = item.replace(original, replacement) new_content.append(item) self.content = new_content return list(CodeBlock.run(self)) def setup(app): app.add_config_value('release_level', '', 'env') app.add_config_value('substitutions', [], 'html') app.add_directive('substitution-code-block', SubstitutionCodeBlock) if not os.environ.get('ENVOY_DOCS_RELEASE_LEVEL'): raise Exception("ENVOY_DOCS_RELEASE_LEVEL env var must be defined") release_level = os.environ['ENVOY_DOCS_RELEASE_LEVEL'] blob_sha = os.environ['ENVOY_BLOB_SHA'] # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = ['sphinxcontrib.httpdomain', 'sphinx.ext.extlinks', 'sphinx.ext.ifconfig'] extlinks = { 'repo': ('https://github.com/envoyproxy/envoy/blob/{}/%s'.format(blob_sha), ''), 'api': ('https://github.com/envoyproxy/envoy/blob/{}/api/%s'.format(blob_sha), ''), } # Setup global substitutions if 'pre-release' in release_level: substitutions = [('|envoy_docker_image|', 'envoy-dev:{}'.format(blob_sha))] else: substitutions = [('|envoy_docker_image|', 'envoy:{}'.format(blob_sha))] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'envoy' copyright = u'2016-{}, Envoy Project Authors'.format(datetime.now().year) author = u'Envoy Project Authors' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. if not os.environ.get('ENVOY_DOCS_VERSION_STRING'): raise Exception("ENVOY_DOCS_VERSION_STRING env var must be defined") # The short X.Y version. version = os.environ['ENVOY_DOCS_VERSION_STRING'] # The full version, including alpha/beta/rc tags. release = os.environ['ENVOY_DOCS_VERSION_STRING'] # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path exclude_patterns = [ '_build', '_venv', 'Thumbs.db', '.DS_Store', 'api-v2/api/v2/endpoint/load_report.proto.rst', 'api-v2/service/discovery/v2/hds.proto.rst', ] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. #pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] # The name for this set of Sphinx documents. # "<project> v<release> documentation" by default. #html_title = u'envoy v1.0.0' # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (relative to this directory) to use as a favicon of<|fim▁hole|>html_favicon = 'favicon.ico' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] html_style = 'css/envoy.css' # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not None, a 'Last updated on:' timestamp is inserted at every page # bottom, using the given strftime format. # The empty string is equivalent to '%b %d, %Y'. #html_last_updated_fmt = None # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh' #html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # 'ja' uses this config value. # 'zh' user can custom change `jieba` dictionary path. #html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. #html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. htmlhelp_basename = 'envoydoc'<|fim▁end|>
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large.
<|file_name|>runonsave.py<|end_file_name|><|fim▁begin|>import sublime, sublime_plugin class RunOnSave(sublime_plugin.EventListener): def on_post_save(self, view): # Check if project has run-on-save enabled. settings = view.settings() if settings.get('run_on_save') == 1: command = settings.get('command') if command is not None: option_dict = {'cmd': command} folders = view.window().folders() if folders is not None and len(folders) > 0: option_dict['working_dir'] = folders[0] path = settings.get('path') if path is not None: option_dict['path'] = path environment_dict = settings.get('environment_variables')<|fim▁hole|> if environment_dict is not None and len(environment_dict) > 0: option_dict['env'] = environment_dict; view.window().run_command('exec', option_dict)<|fim▁end|>
<|file_name|>mockApplications.service.ts<|end_file_name|><|fim▁begin|>import * as _ from 'lodash'; interface IApplicationsService { getApplications(context: any): Promise<any>; } /** Backend service communications. */ export class ApplicationsService implements IApplicationsService { public static $inject = ['$filter', '$q', 'DataService']; private $filter: any; private $q: any; private DataService: any; constructor ($filter: any, $q: any, DataService: any) { this.$filter = $filter; this.$q = $q; this.DataService = DataService; } public getApplications(context: any): Promise<any> { var deferred: any = this.$q.defer(); var promises: any = []; // Load all the "application" types promises.push(this.DataService.list('deploymentconfigs', context)); promises.push(this.DataService.list('replicationcontrollers', context)); promises.push(this.DataService.list({group: 'apps', resource: 'deployments'}, context)); promises.push(this.DataService.list({group: 'extensions', resource: 'replicasets'}, context)); promises.push(this.DataService.list({group: 'apps', resource: 'statefulsets'}, context)); this.$q.all(promises).then(_.spread((deploymentConfigData: any, replicationControllerData: any, deploymentData: any, replicaSetData: any, statefulSetData: any) => { var deploymentConfigs: any = _.toArray(deploymentConfigData.by('metadata.name')); var replicationControllers: any = _.reject(replicationControllerData.by('metadata.name'), this.$filter('hasDeploymentConfig')); var deployments: any = _.toArray(deploymentData.by('metadata.name')); var replicaSets: any = _.reject(replicaSetData.by('metadata.name'), this.$filter('hasDeployment')); var statefulSets: any = _.toArray(statefulSetData.by('metadata.name')); var apiObjects: any = deploymentConfigs.concat(deployments) .concat(replicationControllers) .concat(replicaSets) .concat(statefulSets); deferred.resolve(_.sortBy(apiObjects, ['metadata.name', 'kind']));<|fim▁hole|> }); return deferred.promise; } }<|fim▁end|>
}), function(e: any) { deferred.reject(e);
<|file_name|>namespace_range.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # # Copyright 2010 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Represents a lexographic range of namespaces.""" # pylint: disable=g-bad-name __all__ = [ 'NAMESPACE_CHARACTERS', 'MAX_NAMESPACE_LENGTH', 'MAX_NAMESPACE', 'MIN_NAMESPACE', 'NAMESPACE_BATCH_SIZE', 'NamespaceRange', 'get_namespace_keys', ] import itertools import string from google.appengine.api import datastore from google.appengine.ext import db from google.appengine.ext.db import metadata NAMESPACE_CHARACTERS = ''.join(sorted(string.digits + string.lowercase + string.uppercase + '._-')) MAX_NAMESPACE_LENGTH = 100 MIN_NAMESPACE = '' NAMESPACE_BATCH_SIZE = 50 def _setup_constants(alphabet=NAMESPACE_CHARACTERS, max_length=MAX_NAMESPACE_LENGTH, batch_size=NAMESPACE_BATCH_SIZE): """Calculate derived constant values. Only useful for testing.""" global NAMESPACE_CHARACTERS global MAX_NAMESPACE_LENGTH global MAX_NAMESPACE global _LEX_DISTANCE global NAMESPACE_BATCH_SIZE NAMESPACE_CHARACTERS = alphabet MAX_NAMESPACE_LENGTH = max_length MAX_NAMESPACE = NAMESPACE_CHARACTERS[-1] * MAX_NAMESPACE_LENGTH NAMESPACE_BATCH_SIZE = batch_size # _LEX_DISTANCE will contain the lexical distance between two adjacent # characters in NAMESPACE_CHARACTERS at each character index. This is used # to calculate the ordinal for each string. Example: # NAMESPACE_CHARACTERS = 'ab' # MAX_NAMESPACE_LENGTH = 3 # _LEX_DISTANCE = [1, 3, 7] # '' => 0 # 'a' => 1 # 'aa' => 2 # 'aaa' => 3 # 'aab' => 4 - Distance between 'aaa' and 'aab' is 1. # 'ab' => 5 - Distance between 'aa' and 'ab' is 3. # 'aba' => 6 # 'abb' => 7 # 'b' => 8 - Distance between 'a' and 'b' is 7. # 'ba' => 9 # 'baa' => 10 # 'bab' => 11 # ... # _namespace_to_ord('bab') = (1 * 7 + 1) + (0 * 3 + 1) + (1 * 1 + 1) = 11 _LEX_DISTANCE = [1] for i in range(1, MAX_NAMESPACE_LENGTH): _LEX_DISTANCE.append( _LEX_DISTANCE[i-1] * len(NAMESPACE_CHARACTERS) + 1) del i _setup_constants() def _ord_to_namespace(n, _max_length=None): """Convert a namespace ordinal to a namespace string. Converts an int, representing the sequence number of a namespace ordered lexographically, into a namespace string. >>> _ord_to_namespace(0) '' >>> _ord_to_namespace(1) '-' >>> _ord_to_namespace(2) '--' >>> _ord_to_namespace(3) '---' Args: n: A number representing the lexographical ordering of a namespace. Returns: A string representing the nth namespace in lexographical order. """ if _max_length is None: _max_length = MAX_NAMESPACE_LENGTH length = _LEX_DISTANCE[_max_length - 1] if n == 0: return '' n -= 1 return (NAMESPACE_CHARACTERS[n / length] + _ord_to_namespace(n % length, _max_length - 1)) def _namespace_to_ord(namespace): """Converts a namespace string into an int representing its lexographic order. >>> _namespace_to_ord('') '' >>> _namespace_to_ord('_') 1 >>> _namespace_to_ord('__') 2 Args: namespace: A namespace string. Returns: An int representing the lexographical order of the given namespace string. """ n = 0 for i, c in enumerate(namespace): n += (_LEX_DISTANCE[MAX_NAMESPACE_LENGTH - i- 1] * NAMESPACE_CHARACTERS.index(c) + 1) return n def _key_for_namespace(namespace, app): """Return the __namespace__ key for a namespace. Args: namespace: The namespace whose key is requested. app: The id of the application that the key belongs to. Returns: A db.Key representing the namespace. """ if namespace: return db.Key.from_path(metadata.Namespace.KIND_NAME, namespace, _app=app) else: return db.Key.from_path(metadata.Namespace.KIND_NAME, metadata.Namespace.EMPTY_NAMESPACE_ID, _app=app) class NamespaceRange(object): """An inclusive lexographical range of namespaces. This class is immutable. """ def __init__(self, namespace_start=None, namespace_end=None, _app=None): """Initializes a NamespaceRange instance. Args: namespace_start: A string representing the start of the namespace range. namespace_start is included in the range. If namespace_start is None then the lexographically first namespace is used. namespace_end: A string representing the end of the namespace range. namespace_end is included in the range and must be >= namespace_start. If namespace_end is None then the lexographically last namespace is used. Raises: ValueError: if namespace_start > namespace_end. """ if namespace_start is None: namespace_start = MIN_NAMESPACE if namespace_end is None: namespace_end = MAX_NAMESPACE if namespace_start > namespace_end: raise ValueError('namespace_start (%r) > namespace_end (%r)' % ( namespace_start, namespace_end)) self.__namespace_start = namespace_start self.__namespace_end = namespace_end self.__app = _app @property def app(self): return self.__app @property def namespace_start(self): return self.__namespace_start @property def namespace_end(self): return self.__namespace_end @property def is_single_namespace(self): """True if the namespace range only includes a single namespace.""" return self.namespace_start == self.namespace_end def split_range(self): """Splits the NamespaceRange into two nearly equal-sized ranges. Returns: If this NamespaceRange contains a single namespace then a list containing this NamespaceRange is returned. Otherwise a two-element list containing two NamespaceRanges whose total range is identical to this NamespaceRange's is returned. """ if self.is_single_namespace: return [self] mid_point = (_namespace_to_ord(self.namespace_start) + _namespace_to_ord(self.namespace_end)) // 2 return [NamespaceRange(self.namespace_start, _ord_to_namespace(mid_point), _app=self.app), NamespaceRange(_ord_to_namespace(mid_point+1), self.namespace_end, _app=self.app)] def __copy__(self): return self.__class__(self.__namespace_start, self.__namespace_end, self.__app) def __eq__(self, o): return (self.namespace_start == o.namespace_start and self.namespace_end == o.namespace_end) def __hash__(self): return hash((self.namespace_start, self.namespace_end, self.app)) def __repr__(self): if self.app is None: return 'NamespaceRange(namespace_start=%r, namespace_end=%r)' % ( self.namespace_start, self.namespace_end) else: return 'NamespaceRange(namespace_start=%r, namespace_end=%r, _app=%r)' % ( self.namespace_start, self.namespace_end, self.app) def with_start_after(self, after_namespace): """Returns a copy of this NamespaceName with a new namespace_start. Args:<|fim▁hole|> Returns: A NamespaceRange object whose namespace_start is the lexographically next namespace after the given namespace string. Raises: ValueError: if the NamespaceRange includes only a single namespace. """ namespace_start = _ord_to_namespace(_namespace_to_ord(after_namespace) + 1) return NamespaceRange(namespace_start, self.namespace_end, _app=self.app) def make_datastore_query(self, cursor=None): """Returns a datastore.Query that generates all namespaces in the range. Args: cursor: start cursor for the query. Returns: A datastore.Query instance that generates db.Keys for each namespace in the NamespaceRange. """ filters = {} filters['__key__ >= '] = _key_for_namespace( self.namespace_start, self.app) filters['__key__ <= '] = _key_for_namespace( self.namespace_end, self.app) return datastore.Query('__namespace__', filters=filters, keys_only=True, cursor=cursor, _app=self.app) def normalized_start(self): """Returns a NamespaceRange with leading non-existant namespaces removed. Returns: A copy of this NamespaceRange whose namespace_start is adjusted to exclude the portion of the range that contains no actual namespaces in the datastore. None is returned if the NamespaceRange contains no actual namespaces in the datastore. """ namespaces_after_key = list(self.make_datastore_query().Run(limit=1)) if not namespaces_after_key: return None namespace_after_key = namespaces_after_key[0].name() or '' return NamespaceRange(namespace_after_key, self.namespace_end, _app=self.app) def to_json_object(self): """Returns a dict representation that can be serialized to JSON.""" obj_dict = dict(namespace_start=self.namespace_start, namespace_end=self.namespace_end) if self.app is not None: obj_dict['app'] = self.app return obj_dict @classmethod def from_json_object(cls, json): """Returns a NamespaceRange from an object deserialized from JSON.""" return cls(json['namespace_start'], json['namespace_end'], _app=json.get('app')) # TODO(user): Implement an option where the returned namespace range is # not normalized using with_start_after to support consistent namespace # queries. @classmethod def split(cls, n, contiguous, can_query=itertools.chain(itertools.repeat(True, 50), itertools.repeat(False)).next, _app=None): """Splits the complete NamespaceRange into n equally-sized NamespaceRanges. Args: n: The maximum number of NamespaceRanges to return. Fewer than n namespaces may be returned. contiguous: If True then the returned NamespaceRanges will cover the entire space of possible namespaces (i.e. from MIN_NAMESPACE to MAX_NAMESPACE) without gaps. If False then the returned NamespaceRanges may exclude namespaces that don't appear in the datastore. can_query: A function that returns True if split() can query the datastore to generate more fair namespace range splits, and False otherwise. If not set then split() is allowed to make 50 datastore queries. Returns: A list of at most n NamespaceRanges representing a near-equal distribution of actual existant datastore namespaces. The returned list will be sorted lexographically. Raises: ValueError: if n is < 1. """ if n < 1: raise ValueError('n must be >= 1') ns_range = NamespaceRange(_app=_app) if can_query(): ns_range = ns_range.normalized_start() if ns_range is None: if contiguous: return [NamespaceRange(_app=_app)] else: return [] ranges = [ns_range] singles = [] while ranges and (len(ranges) + len(singles)) < n: namespace_range = ranges.pop(0) if namespace_range.is_single_namespace: singles.append(namespace_range) else: left, right = namespace_range.split_range() if can_query(): right = right.normalized_start() if right is not None: ranges.append(right) ranges.append(left) ns_ranges = sorted(singles + ranges, key=lambda ns_range: ns_range.namespace_start) if contiguous: if not ns_ranges: # This condition is possible if every namespace was deleted after the # first call to ns_range.normalized_start(). return [NamespaceRange(_app=_app)] continuous_ns_ranges = [] for i in range(len(ns_ranges)): if i == 0: namespace_start = MIN_NAMESPACE else: namespace_start = ns_ranges[i].namespace_start if i == len(ns_ranges) - 1: namespace_end = MAX_NAMESPACE else: namespace_end = _ord_to_namespace( _namespace_to_ord(ns_ranges[i+1].namespace_start) - 1) continuous_ns_ranges.append(NamespaceRange(namespace_start, namespace_end, _app=_app)) return continuous_ns_ranges else: return ns_ranges def __iter__(self): """Iterate over all the namespaces within this range.""" cursor = None while True: query = self.make_datastore_query(cursor=cursor) count = 0 for ns_key in query.Run(limit=NAMESPACE_BATCH_SIZE): count += 1 yield ns_key.name() or '' if count < NAMESPACE_BATCH_SIZE: break cursor = query.GetCursor() def get_namespace_keys(app, limit): """Get namespace keys.""" ns_query = datastore.Query('__namespace__', keys_only=True, _app=app) return list(ns_query.Run(limit=limit))<|fim▁end|>
after_namespace: A namespace string.
<|file_name|>types.tsx<|end_file_name|><|fim▁begin|>/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ import { LineSerieData } from '@nivo/line'; import { AxiosResponse } from 'axios'; import { ICounter } from '@alluxio/common-ui/src/constants'; export interface IMetrics { cacheHitLocal: string; cacheHitRemote: string; cacheMiss: string; masterCapacityFreePercentage: number; masterCapacityUsedPercentage: number; masterUnderfsCapacityFreePercentage: number; masterUnderfsCapacityUsedPercentage: number; rpcInvocationMetrics: { [key: string]: ICounter; }; timeSeriesMetrics: LineSerieData[]; totalBytesReadLocal: string; totalBytesReadLocalThroughput: string; totalBytesReadDomainSocket: string; totalBytesReadDomainSocketThroughput: string; totalBytesReadRemote: string; totalBytesReadRemoteThroughput: string; totalBytesReadUfs: string; totalBytesReadUfsThroughput: string; totalBytesWrittenLocal: string; totalBytesWrittenLocalThroughput: string; totalBytesWrittenAlluxio: string; totalBytesWrittenAlluxioThroughput: string; totalBytesWrittenDomainSocket: string; totalBytesWrittenDomainSocketThroughput: string; totalBytesWrittenUfs: string; totalBytesWrittenUfsThroughput: string; ufsOps: { [key: string]: { [key: string]: number; }; }; ufsOpsSaved: { [key: string]: { [key: string]: number; }; }; ufsReadSize: {<|fim▁hole|> [key: string]: string; }; ufsWriteSize: { [key: string]: string; }; operationMetrics: { [key: string]: ICounter; }; } export enum MetricsActionTypes { FETCH_REQUEST = '@@metrics/FETCH_REQUEST', FETCH_SUCCESS = '@@metrics/FETCH_SUCCESS', FETCH_ERROR = '@@metrics/FETCH_ERROR', } export interface IMetricsState { readonly data: IMetrics; readonly errors?: AxiosResponse; readonly loading: boolean; readonly response?: AxiosResponse; }<|fim▁end|>
<|file_name|>LCMRover.py<|end_file_name|><|fim▁begin|>''' Created on Dec 3, 2014 @author: gearsad ''' import sys from roverpylot import rover from bot_update_t import bot_update_t from bot_control_command_t import bot_control_command_t import lcm # Try to start OpenCV for video try: import cv except: cv = None class LCMRover(rover.Rover): ''' A rover using LCM for control and camera feed upstream ''' def Initialize(self, botname): ''' Init the rover and store the name ''' self.__botname = botname self.__lcm = lcm.LCM("udpm://239.255.76.67:7667?ttl=1") self.__controlSubscription = self.__lcm.subscribe("ARNerve_Bot_Control_" + self.__botname, self.UpdateBotControlHandler) self.__lightsOn = 0 self.__infraredOn = 0 def processVideo(self, jpegbytes): #try: camUpdate = bot_update_t() camUpdate.name = self.__botname camUpdate.numBytes_cameraFrameJpeg = len(jpegbytes) camUpdate.cameraFrameJpeg = jpegbytes # Get the battery health as well battery = self.getBatteryPercentage() camUpdate.batteryPercentage = battery self.__lcm.publish("ARNerve_Bot_Update_" + self.__botname, camUpdate.encode()) #except: # print "Exception", sys.exc_info()[0] # pass def Update(self): ''' Update the LCM ''' self.__lcm.handle() def Disconnect(self): self.lc.unsubscribe(self.__controlSubscription)<|fim▁hole|> def UpdateBotControlHandler(self, channel, data): ''' Get the updated bot parameters and send them to the bot. ''' controlParams = bot_control_command_t.decode(data) # Check if it is the right bot. if self.__botname != controlParams.name: return self.setTreads(controlParams.botTreadVelLeft, controlParams.botTreadVelright) print "Setting the treads to {0}, {1}".format(controlParams.botTreadVelLeft, controlParams.botTreadVelright) if self.__lightsOn != controlParams.isLightsOn: if controlParams.isLightsOn != 0: self.turnLightsOn() else: self.turnLightsOff() self.__lightsOn = controlParams.isLightsOn if self.__infraredOn != controlParams.isInfraredOn: if controlParams.isInfraredOn != 0: self.turnInfraredOn() else: self.turnInfraredOff() self.__infraredOn = controlParams.isInfraredOn<|fim▁end|>
<|file_name|>cm.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import, print_function, division import numpy as np from matplotlib.colors import LinearSegmentedColormap,ListedColormap import sys __author__ = "Juhyeong Kang " __email__ = "[email protected]" def create_cdict(r, g, b): i = np.linspace(0, 1, 256) cdict = dict( (name, list(zip(i, el / 255.0, el / 255.0))) for el, name in [(r, 'red'), (g, 'green'), (b, 'blue')] ) return cdict def hac(r=False): hr=np.array([0, 0, 1, 2, 3, 4, 4, 6, 6, 7, 8, 9, 10, 10, 12, 12, 13, 14, 15, 16, 16, 18, 18, 19, 20, 21, 22, 23, 24, 25, 25, 26, 27, 28, 29, 30, 31, 31, 33, 33, 34, 35, 36, 37, 37, 39, 39, 40, 41, 42, 43, 43, 45, 45, 46, 47, 48, 49, 50, 51, 51, 52, 53, 54, 55, 56, 57, 58, 58, 59, 60, 61, 62, 63, 64, 64, 66, 66, 67, 68, 69, 70, 70, 72, 72, 73, 74, 75, 76, 76, 78, 78, 79, 80, 81, 82, 83, 84, 84, 86, 87, 88, 89, 91, 92, 93, 94, 96, 97, 98, 99, 100, 102, 102, 104, 105, 106, 107, 108, 110, 111, 112, 113, 115, 116, 117, 118, 120, 121, 121, 123, 124, 125, 126, 128, 129, 130, 131, 132, 134, 135, 136, 137, 139, 139, 141, 142, 143, 144, 145, 147, 148, 149, 150, 152, 153, 154, 155, 156, 158, 158, 160, 161, 162, 163, 165, 166, 167, 168, 169, 171, 172, 173, 174, 176, 176, 178, 178, 179, 179, 179, 180, 180, 180, 181, 181, 181, 182, 182, 182, 183, 183, 183, 184, 186, 187, 188, 189, 190, 191, 192, 193, 195, 196, 197, 198, 199, 200, 201, 202, 204, 205, 206, 207, 208, 209, 210, 212, 213, 214, 215, 216, 217, 218, 219, 221, 222, 223, 224, 225, 226, 227, 228, 230, 231, 232, 233, 234, 235, 237, 238, 239, 240, 241, 242, 243, 244, 245, 247, 248, 249, 250, 251, 252, 253, 255]) hg=np.array([0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6, 7, 7, 7, 7, 7, 8, 8, 8, 8, 8, 9, 9, 9, 9, 9, 10, 10, 10, 10, 10, 11, 11, 11, 11, 11, 12, 12, 12, 12, 12, 13, 13, 13, 13, 13, 14, 14, 14, 14, 14, 15, 15, 15, 15, 15, 16, 16, 16, 16, 16, 17, 17, 17, 17, 17, 18, 18, 18, 18, 18, 19, 19, 19, 20, 20, 21, 22, 22, 23, 23, 24, 24, 25, 25, 26, 26, 27, 27, 28, 28, 29, 29, 30, 30, 31, 31, 33, 34, 36, 38, 39, 41, 43, 44, 46, 47, 49, 51, 53, 54, 56, 58, 59, 61, 62, 64, 66, 67, 69, 71, 73, 74, 76, 77, 79, 81, 82, 84, 86, 88, 89, 91, 92, 94, 96, 97, 99, 101, 102, 104, 106, 107, 109, 110, 112, 114, 116, 117, 119, 121, 122, 124, 125, 127, 129, 130, 132, 134, 136, 137, 138, 140, 142, 144, 145, 147, 149, 150, 152, 153, 155, 157, 158, 160, 162, 164, 165, 166, 168, 170, 172, 173, 175, 177, 179, 180, 181, 183, 185, 187, 188, 190, 192, 193, 195, 196, 198, 200, 201, 203, 205, 207, 208, 210, 211, 213, 215, 216, 218, 220, 221, 223, 225, 226, 228, 229, 231, 233, 235, 236, 238, 240, 241, 243, 244, 246, 248, 250, 251, 253, 255]) hb=np.array([0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7, 8, 8, 8, 9, 9, 9, 9, 10, 10, 10, 10, 11, 11, 11, 12, 12, 12, 12, 13, 13, 13, 13, 14, 14, 14, 14, 15, 15, 15, 16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 19, 19, 19, 19, 20, 20, 20, 20, 21, 21, 21, 22, 22, 22, 22, 23, 23, 23, 23, 24, 24, 24, 25, 25, 25, 25, 26, 26, 26, 26, 27, 27, 27, 28, 28, 28, 28, 29, 29, 29, 29, 30, 30, 30, 30, 31, 31, 31, 32, 32, 32, 32, 33, 33, 33, 33, 34, 34, 34, 35, 35, 35, 35, 36, 36, 36, 36, 37, 37, 37, 38, 38, 38, 38, 39, 39, 39, 39, 40, 40, 40, 41, 41, 41, 41, 42, 42, 42, 42, 43, 43, 43, 44, 44, 44, 44, 45, 45, 45, 45, 46, 46, 46, 47, 47, 47, 47, 48, 48, 48, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 59, 62, 65, 68, 71, 74, 78, 81, 84, 87, 90, 93, 96, 99, 102, 105, 108, 111, 114, 117, 120, 123, 126, 130, 133, 136, 138, 141, 144, 148, 151, 154, 157, 160, 163, 166, 169, 172, 175, 178, 181, 184, 187, 190, 193, 196, 199, 203, 206, 209, 212, 215, 217, 221, 224, 227, 230, 233, 236, 239, 242, 245, 248, 251, 255]) hadic=create_cdict(hr,hg,hb) hardic=create_cdict(hr[::-1],hg[::-1],hb[::-1]) if r: return LinearSegmentedColormap('mytables',hardic) else: return LinearSegmentedColormap('mytables',hadic)<|fim▁hole|> cr=np.array([0, 0, 0, 0, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 6, 6, 6, 6, 7, 7, 7, 8, 8, 8, 8, 9, 9, 9, 10, 10, 10, 11, 11, 11, 11, 12, 12, 12, 13, 13, 13, 13, 14, 14, 14, 15, 15, 15, 15, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 19, 19, 19, 19, 20, 20, 20, 21, 21, 21, 22, 22, 22, 22, 23, 23, 23, 24, 24, 24, 24, 25, 25, 25, 26, 26, 26, 26, 27, 27, 27, 28, 28, 30, 31, 32, 34, 35, 36, 37, 39, 40, 41, 42, 43, 45, 46, 47, 49, 50, 51, 53, 53, 55, 56, 57, 59, 60, 61, 63, 64, 65, 67, 67, 69, 70, 71, 73, 74, 75, 76, 78, 79, 80, 81, 83, 84, 85, 86, 88, 89, 90, 92, 92, 94, 95, 96, 98, 99, 100, 102, 103, 104, 106, 106, 108, 109, 110, 112, 113, 114, 115, 117, 118, 119, 120, 122, 123, 124, 125, 127, 128, 129, 130, 130, 132, 133, 133, 135, 136, 136, 138, 138, 139, 141, 141, 142, 144, 146, 148, 149, 151, 153, 155, 157, 158, 160, 162, 164, 166, 167, 169, 171, 172, 174, 176, 178, 180, 181, 183, 185, 187, 189, 190, 192, 194, 196, 198, 199, 201, 203, 204, 206, 208, 210, 212, 213, 215, 217, 219, 221, 222, 224, 226, 228, 230, 232, 233, 235, 236, 238, 240, 242, 244, 245, 247, 249, 251, 253, 255]) cg=np.array([0, 0, 1, 1, 2, 3, 3, 4, 4, 5, 6, 6, 7, 7, 8, 9, 9, 10, 10, 11, 12, 12, 13, 13, 14, 15, 15, 16, 16, 17, 18, 18, 19, 19, 20, 21, 21, 22, 22, 23, 24, 24, 25, 25, 26, 27, 27, 28, 28, 29, 30, 30, 31, 31, 32, 33, 33, 34, 34, 35, 36, 36, 37, 37, 38, 39, 39, 40, 40, 41, 42, 42, 43, 43, 44, 45, 45, 46, 46, 47, 48, 48, 49, 49, 50, 51, 51, 52, 52, 53, 54, 54, 55, 55, 56, 57, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 78, 79, 81, 82, 83, 84, 86, 87, 88, 90, 91, 92, 94, 95, 96, 97, 99, 100, 101, 103, 104, 105, 107, 108, 109, 110, 112, 113, 114, 116, 117, 118, 120, 121, 122, 124, 125, 126, 127, 129, 130, 131, 133, 134, 135, 137, 138, 139, 140, 142, 143, 144, 146, 147, 148, 150, 151, 152, 153, 155, 156, 157, 159, 160, 161, 162, 164, 165, 166, 168, 169, 170, 172, 173, 174, 175, 177, 178, 179, 181, 182, 183, 185, 186, 187, 188, 190, 191, 192, 194, 195, 196, 197, 199, 200, 201, 203, 204, 205, 207, 208, 209, 210, 212, 213, 214, 216, 217, 218, 220, 221, 222, 223, 225, 226, 227, 229, 230, 231, 232, 234, 235, 236, 238, 239, 240, 242, 243, 244, 245, 247, 248, 249, 251, 252, 253, 255]) cb=np.array([0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7, 8, 8, 8, 9, 9, 9, 9, 10, 10, 10, 10, 11, 11, 11, 12, 12, 12, 12, 13, 13, 13, 13, 14, 14, 14, 14, 15, 15, 15, 16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 19, 19, 19, 19, 20, 20, 20, 20, 21, 21, 21, 22, 22, 22, 22, 23, 23, 23, 23, 24, 24, 24, 25, 25, 25, 25, 26, 26, 26, 26, 27, 27, 27, 28, 28, 28, 28, 29, 29, 29, 29, 30, 30, 30, 30, 31, 31, 31, 32, 32, 32, 32, 33, 33, 33, 33, 34, 34, 34, 35, 35, 35, 35, 36, 36, 36, 36, 37, 37, 37, 38, 38, 38, 38, 39, 39, 39, 39, 40, 40, 40, 41, 41, 41, 41, 42, 42, 42, 42, 43, 43, 43, 44, 44, 44, 44, 45, 45, 45, 45, 46, 46, 46, 47, 47, 47, 47, 48, 48, 48, 48, 50, 53, 55, 57, 60, 62, 65, 67, 69, 72, 74, 77, 80, 83, 86, 89, 92, 95, 97, 100, 103, 106, 109, 111, 115, 117, 120, 123, 126, 129, 131, 135, 137, 140, 143, 146, 149, 151, 154, 157, 160, 163, 166, 169, 172, 174, 177, 180, 183, 186, 188, 192, 194, 197, 200, 203, 206, 208, 212, 214, 217, 220, 223, 226, 228, 231, 234, 237, 240, 243, 246, 249, 251, 255]) cadic=create_cdict(cr,cg,cb) cardic=create_cdict(cr[::-1],cg[::-1],cb[::-1]) if r: return LinearSegmentedColormap('mytables',cardic) else: return LinearSegmentedColormap('mytables',cadic) def nac(r= False): nr=np.array([0, 0, 0, 1, 2, 3, 3, 4, 4, 5, 6, 6, 7, 8, 9, 9, 10, 10, 11, 12, 12, 13, 13, 14, 15, 16, 16, 17, 18, 18, 19, 19, 20, 21, 22, 22, 23, 23, 24, 25, 25, 26, 27, 27, 28, 29, 29, 30, 31, 31, 32, 32, 33, 34, 35, 35, 36, 36, 37, 38, 38, 39, 40, 40, 41, 42, 42, 43, 44, 44, 45, 45, 46, 47, 48, 48, 49, 49, 50, 51, 51, 52, 53, 54, 54, 55, 55, 56, 57, 57, 58, 58, 59, 60, 61, 61, 62, 63, 63, 65, 66, 68, 69, 71, 72, 73, 75, 76, 77, 79, 80, 81, 83, 84, 86, 87, 89, 90, 91, 93, 94, 95, 97, 98, 99, 101, 102, 104, 105, 106, 108, 109, 111, 112, 113, 115, 116, 117, 119, 120, 121, 123, 124, 126, 127, 129, 130, 131, 133, 134, 135, 137, 138, 139, 141, 142, 144, 145, 146, 148, 149, 151, 152, 153, 155, 156, 157, 158, 160, 162, 163, 164, 166, 167, 169, 170, 171, 172, 173, 174, 174, 175, 176, 176, 177, 178, 179, 180, 180, 181, 182, 182, 183, 184, 185, 187, 187, 189, 190, 191, 192, 193, 194, 196, 197, 198, 199, 200, 201, 202, 203, 204, 206, 207, 208, 209, 210, 211, 213, 213, 215, 216, 217, 218, 219, 220, 222, 222, 224, 225, 226, 227, 228, 229, 231, 232, 233, 234, 235, 236, 237, 239, 240, 241, 242, 243, 244, 245, 246, 248, 248, 250, 251, 252, 253, 255]) ng=np.array([0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 13, 14, 14, 15, 15, 16, 16, 17, 17, 18, 18, 18, 19, 19, 20, 20, 21, 21, 22, 22, 22, 23, 23, 24, 24, 25, 25, 26, 26, 27, 27, 27, 28, 28, 29, 29, 30, 30, 31, 31, 31, 32, 32, 33, 33, 34, 34, 35, 35, 36, 36, 36, 37, 37, 38, 38, 39, 39, 40, 40, 40, 41, 41, 42, 42, 43, 43, 44, 45, 45, 46, 46, 47, 48, 48, 49, 49, 50, 51, 51, 52, 53, 53, 54, 54, 55, 56, 56, 57, 57, 59, 60, 62, 63, 65, 66, 68, 69, 71, 72, 73, 75, 76, 78, 79, 81, 82, 84, 85, 86, 88, 89, 91, 92, 94, 95, 97, 98, 99, 101, 102, 104, 105, 107, 108, 110, 111, 113, 114, 116, 117, 119, 120, 122, 123, 124, 126, 127, 129, 130, 132, 133, 135, 136, 137, 139, 140, 142, 143, 145, 146, 148, 149, 150, 152, 154, 155, 157, 158, 160, 161, 163, 164, 165, 167, 168, 170, 171, 173, 174, 176, 177, 178, 180, 181, 183, 184, 186, 187, 189, 190, 191, 193, 194, 196, 197, 199, 200, 202, 203, 205, 206, 208, 209, 211, 212, 214, 215, 216, 218, 219, 221, 222, 224, 225, 227, 228, 229, 231, 232, 234, 235, 237, 238, 240, 241, 242, 244, 246, 247, 249, 250, 252, 253, 255]) nb=np.array([0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6, 6, 6, 7, 7, 7, 7, 7, 7, 7, 8, 8, 8, 8, 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 9, 10, 10, 10, 10, 10, 10, 10, 10, 11, 11, 11, 11, 11, 11, 11, 12, 12, 12, 12, 12, 12, 12, 13, 13, 13, 13, 13, 13, 13, 13, 14, 14, 14, 14, 14, 14, 14, 15, 15, 15, 15, 15, 15, 15, 16, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 17, 17, 17, 18, 18, 18, 18, 18, 18, 18, 18, 19, 19, 19, 19, 19, 19, 19, 20, 20, 20, 20, 20, 20, 20, 21, 21, 21, 21, 21, 21, 21, 21, 22, 22, 22, 22, 22, 22, 22, 23, 23, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 25, 27, 29, 30, 32, 34, 36, 37, 39, 42, 45, 48, 52, 55, 58, 62, 65, 69, 72, 75, 79, 82, 85, 88, 92, 95, 98, 102, 105, 108, 111, 115, 118, 122, 125, 128, 131, 134, 138, 141, 145, 148, 151, 155, 158, 161, 165, 168, 171, 174, 178, 181, 184, 188, 191, 194, 198, 201, 205, 208, 211, 214, 218, 221, 224, 228, 231, 234, 237, 241, 244, 248, 251, 255]) nadic=create_cdict(nr,ng,nb) nardic=create_cdict(nr[::-1],ng[::-1],nb[::-1]) if r: return LinearSegmentedColormap('mytables',nardic) else: return LinearSegmentedColormap('mytables',nadic) def fec(r= False): fr=np.array([0, 0, 0, 0, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 6, 6, 6, 6, 7, 7, 7, 8, 8, 8, 8, 9, 9, 9, 10, 10, 10, 11, 11, 11, 11, 12, 12, 12, 13, 13, 13, 13, 14, 14, 14, 15, 15, 15, 15, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 19, 19, 19, 19, 20, 20, 20, 21, 21, 21, 22, 22, 22, 22, 23, 23, 23, 24, 24, 24, 24, 25, 25, 25, 26, 26, 26, 26, 27, 27, 27, 28, 28, 30, 31, 32, 34, 35, 36, 37, 39, 40, 41, 42, 43, 45, 46, 47, 49, 50, 51, 53, 53, 55, 56, 57, 59, 60, 61, 63, 64, 65, 67, 67, 69, 70, 71, 73, 74, 75, 76, 78, 79, 80, 81, 83, 84, 85, 86, 88, 89, 90, 92, 92, 94, 95, 96, 98, 99, 100, 102, 103, 104, 106, 106, 108, 109, 110, 112, 113, 114, 115, 117, 118, 119, 120, 122, 123, 124, 125, 127, 128, 129, 130, 130, 132, 133, 133, 135, 136, 136, 138, 138, 139, 141, 141, 142, 144, 146, 148, 149, 151, 153, 155, 157, 158, 160, 162, 164, 166, 167, 169, 171, 172, 174, 176, 178, 180, 181, 183, 185, 187, 189, 190, 192, 194, 196, 198, 199, 201, 203, 204, 206, 208, 210, 212, 213, 215, 217, 219, 221, 222, 224, 226, 228, 230, 232, 233, 235, 236, 238, 240, 242, 244, 245, 247, 249, 251, 253, 255]) fg=np.array([0, 0, 1, 1, 2, 3, 3, 4, 4, 5, 6, 6, 7, 7, 8, 9, 9, 10, 10, 11, 12, 12, 13, 13, 14, 15, 15, 16, 16, 17, 18, 18, 19, 19, 20, 21, 21, 22, 22, 23, 24, 24, 25, 25, 26, 27, 27, 28, 28, 29, 30, 30, 31, 31, 32, 33, 33, 34, 34, 35, 36, 36, 37, 37, 38, 39, 39, 40, 40, 41, 42, 42, 43, 43, 44, 45, 45, 46, 46, 47, 48, 48, 49, 49, 50, 51, 51, 52, 52, 53, 54, 54, 55, 55, 56, 57, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 78, 79, 81, 82, 83, 84, 86, 87, 88, 90, 91, 92, 94, 95, 96, 97, 99, 100, 101, 103, 104, 105, 107, 108, 109, 110, 112, 113, 114, 116, 117, 118, 120, 121, 122, 124, 125, 126, 127, 129, 130, 131, 133, 134, 135, 137, 138, 139, 140, 142, 143, 144, 146, 147, 148, 150, 151, 152, 153, 155, 156, 157, 159, 160, 161, 162, 164, 165, 166, 168, 169, 170, 172, 173, 174, 175, 177, 178, 179, 181, 182, 183, 185, 186, 187, 188, 190, 191, 192, 194, 195, 196, 197, 199, 200, 201, 203, 204, 205, 207, 208, 209, 210, 212, 213, 214, 216, 217, 218, 220, 221, 222, 223, 225, 226, 227, 229, 230, 231, 232, 234, 235, 236, 238, 239, 240, 242, 243, 244, 245, 247, 248, 249, 251, 252, 253, 255]) fb=np.array([0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7, 8, 8, 8, 9, 9, 9, 9, 10, 10, 10, 10, 11, 11, 11, 12, 12, 12, 12, 13, 13, 13, 13, 14, 14, 14, 14, 15, 15, 15, 16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 19, 19, 19, 19, 20, 20, 20, 20, 21, 21, 21, 22, 22, 22, 22, 23, 23, 23, 23, 24, 24, 24, 25, 25, 25, 25, 26, 26, 26, 26, 27, 27, 27, 28, 28, 28, 28, 29, 29, 29, 29, 30, 30, 30, 30, 31, 31, 31, 32, 32, 32, 32, 33, 33, 33, 33, 34, 34, 34, 35, 35, 35, 35, 36, 36, 36, 36, 37, 37, 37, 38, 38, 38, 38, 39, 39, 39, 39, 40, 40, 40, 41, 41, 41, 41, 42, 42, 42, 42, 43, 43, 43, 44, 44, 44, 44, 45, 45, 45, 45, 46, 46, 46, 47, 47, 47, 47, 48, 48, 48, 48, 50, 53, 55, 57, 60, 62, 65, 67, 69, 72, 74, 77, 80, 83, 86, 89, 92, 95, 97, 100, 103, 106, 109, 111, 115, 117, 120, 123, 126, 129, 131, 135, 137, 140, 143, 146, 149, 151, 154, 157, 160, 163, 166, 169, 172, 174, 177, 180, 183, 186, 188, 192, 194, 197, 200, 203, 206, 208, 212, 214, 217, 220, 223, 226, 228, 231, 234, 237, 240, 243, 246, 249, 251, 255]) fedic=create_cdict(fb,fg,fr) ferdic=create_cdict(fb[::-1],fg[::-1],fr[::-1]) if r: return LinearSegmentedColormap('mytables',ferdic) else: return LinearSegmentedColormap('mytables',fedic) def allwhite(): return ListedColormap(['w','w','w']) def allblack(): return ListedColormap(['k','k','k']) setattr(sys.modules[__name__],'ca',cac()) setattr(sys.modules[__name__],'ca_r',cac(r=True)) setattr(sys.modules[__name__],'ha',hac()) setattr(sys.modules[__name__],'ha_r',hac(r=True)) setattr(sys.modules[__name__],'na',nac()) setattr(sys.modules[__name__],'na_r',nac(r=True)) setattr(sys.modules[__name__],'fe',fec()) setattr(sys.modules[__name__],'fe_r',fec(r=True)) setattr(sys.modules[__name__],'allwhite',allwhite()) setattr(sys.modules[__name__],'allblack',allblack())<|fim▁end|>
def cac(r=False):
<|file_name|>plot.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python """ This script plots various quantities. """ from __future__ import division, print_function import numpy as np import pandas as pd import matplotlib.pyplot as plt import argparse ylabels = {"cl": r"$C_l$", "cd": r"$C_d$", "cl/cd": r"$C_l/C_d$", "k": "$k$", "omega": r"$\omega$", "epsilon": r"$\epsilon$"} <|fim▁hole|> df = pd.read_csv("processed/NACA{}_{:.1e}.csv".format(foil, Re)) plt.figure() if quantity == "cl/cd": q = df.cl/df.cd else: q = df[quantity] plt.plot(df.alpha_deg, q, "-o") plt.xlabel(r"$\alpha$ (deg)") plt.ylabel(ylabels[quantity]) plt.grid(True) plt.tight_layout() if __name__ == "__main__": try: import seaborn seaborn.set(style="white", context="notebook", font_scale=1.5) except ImportError: print("Could not import seaborn for plot styling. Try") print("\n conda install seaborn\n\nor") print("\n pip install seaborn\n") parser = argparse.ArgumentParser(description="Plotting results") parser.add_argument("quantity", nargs="?", default="cl/cd", help="Which quantity to plot", choices=["cl", "cd", "cl/cd", "k", "omega", "epsilon"]) parser.add_argument("--foil", "-f", help="Foil", default="0012") parser.add_argument("--Reynolds", "-R", help="Reynolds number", default=2e5) parser.add_argument("--save", "-s", action="store_true", help="Save plots") parser.add_argument("--noshow", action="store_true", default=False, help="Do not show") args = parser.parse_args() plot_foil_perf(args.quantity, args.foil, float(args.Reynolds)) if args.save: if not os.path.isdir("figures"): os.mkdir("figures") plt.savefig("figures/{}.pdf".format(args.quantity)) if not args.noshow: plt.show()<|fim▁end|>
def plot_foil_perf(quantity="cl/cd", foil="0012", Re=2e5):
<|file_name|>printer.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python """ @author: Manuel F Martinez <[email protected]> @organization: Bashlinux @copyright: Copyright (c) 2012 Bashlinux @license: GNU GPL v3 """ import usb.core import usb.util import serial import socket from .escpos import * from .constants import * from .exceptions import * class Usb(Escpos): """ Define USB printer """ def __init__(self, idVendor, idProduct, interface=0, in_ep=0x82, out_ep=0x01): """ @param idVendor : Vendor ID @param idProduct : Product ID @param interface : USB device interface @param in_ep : Input end point @param out_ep : Output end point """ self.idVendor = idVendor self.idProduct = idProduct self.interface = interface self.in_ep = in_ep self.out_ep = out_ep self.open() def open(self): """ Search device on USB tree and set is as escpos device """ self.device = usb.core.find(idVendor=self.idVendor, idProduct=self.idProduct) if self.device is None: print("Cable isn't plugged in") check_driver = None try: check_driver = self.device.is_kernel_driver_active(0) except NotImplementedError: pass if check_driver is None or check_driver: try: self.device.detach_kernel_driver(0) except usb.core.USBError as e: if check_driver is not None: print("Could not detatch kernel driver: %s" % str(e)) try: self.device.set_configuration() self.device.reset() except usb.core.USBError as e: print("Could not set configuration: %s" % str(e)) def _raw(self, msg): """ Print any command sent in raw format """ self.device.write(self.out_ep, msg, self.interface) def __del__(self): """ Release USB interface """ if self.device: usb.util.dispose_resources(self.device) self.device = None class Serial(Escpos): """ Define Serial printer """ def __init__(self, devfile="/dev/ttyS0", baudrate=9600, bytesize=8, timeout=1, parity=serial.PARITY_NONE, stopbits=serial.STOPBITS_ONE, xonxoff=False , dsrdtr=True): """ @param devfile : Device file under dev filesystem @param baudrate : Baud rate for serial transmission @param bytesize : Serial buffer size @param timeout : Read/Write timeout @param parity : Parity checking @param stopbits : Number of stop bits @param xonxoff : Software flow control @param dsrdtr : Hardware flow control (False to enable RTS/CTS) """ self.devfile = devfile self.baudrate = baudrate self.bytesize = bytesize self.timeout = timeout self.parity = parity self.stopbits = stopbits self.xonxoff = xonxoff self.dsrdtr = dsrdtr self.open() def open(self): """ Setup serial port and set is as escpos device """ self.device = serial.Serial(port=self.devfile, baudrate=self.baudrate, bytesize=self.bytesize, parity=self.parity, stopbits=self.stopbits, timeout=self.timeout, xonxoff=self.xonxoff, dsrdtr=self.dsrdtr) if self.device is not None: print("Serial printer enabled") else: print("Unable to open serial printer on: %s" % self.devfile) def _raw(self, msg): """ Print any command sent in raw format """ self.device.write(msg) def __del__(self): """ Close Serial interface """ if self.device is not None: self.device.close() class Network(Escpos): """ Define Network printer """ def __init__(self,host,port=9100): """ @param host : Printer's hostname or IP address @param port : Port to write to """ self.host = host self.port = port<|fim▁hole|> def open(self): """ Open TCP socket and set it as escpos device """ self.device = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.device.connect((self.host, self.port)) if self.device is None: print("Could not open socket for %s" % self.host) def _raw(self, msg): """ Print any command sent in raw format """ if isinstance(msg, str): self.device.send(msg.encode()) else: self.device.send(msg) def __del__(self): """ Close TCP connection """ self.device.close() def close(self): self.__del__() class File(Escpos): """ Define Generic file printer """ def __init__(self, devfile="/dev/usb/lp0"): """ @param devfile : Device file under dev filesystem """ self.devfile = devfile self.open() def open(self): """ Open system file """ self.device = open(self.devfile, "wb") if self.device is None: print("Could not open the specified file %s" % self.devfile) def _raw(self, msg): """ Print any command sent in raw format """ self.device.write(msg); def __del__(self): """ Close system file """ self.device.close()<|fim▁end|>
self.open()
<|file_name|>helper.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import wx import win32clipboard import win32con import gui import treeInterceptorHandler import textInfos import globalVars def getSelectedText(): obj = globalVars.focusObject if isinstance(obj.treeInterceptor, treeInterceptorHandler.DocumentTreeInterceptor) and not obj.treeInterceptor.passThrough: obj = obj.treeInterceptor try: info = obj.makeTextInfo(textInfos.POSITION_SELECTION) except (RuntimeError, NotImplementedError): info = None if not info or info.isCollapsed: return None return info.text def getClipboardText(): try: win32clipboard.OpenClipboard() except win32clipboard.error: return None try: text = win32clipboard.GetClipboardData(win32con.CF_UNICODETEXT) except: text = None finally: win32clipboard.CloseClipboard() return text def setClipboardText(text): if not isinstance(text, unicode) or len(text)==0 or text.isspace(): return False try: win32clipboard.OpenClipboard() except win32clipboard.error: return False try: win32clipboard.EmptyClipboard() win32clipboard.SetClipboardData(win32con.CF_UNICODETEXT, text) success = True except: success = False <|fim▁hole|>class TextWindow(wx.Frame): def __init__(self, text, title, readOnly=True): super(TextWindow, self).__init__(gui.mainFrame, title=title) sizer = wx.BoxSizer(wx.VERTICAL) style = wx.TE_MULTILINE | wx.TE_RICH if readOnly: style |= wx.TE_READONLY self.outputCtrl = wx.TextCtrl(self, style=style) self.outputCtrl.Bind(wx.EVT_KEY_DOWN, self.onOutputKeyDown) sizer.Add(self.outputCtrl, proportion=1, flag=wx.EXPAND) self.SetSizer(sizer) sizer.Fit(self) self.outputCtrl.SetValue(text) self.outputCtrl.SetFocus() self.Raise() self.Maximize() self.Show() def onOutputKeyDown(self, event): if event.GetKeyCode() == wx.WXK_ESCAPE: self.Close() event.Skip()<|fim▁end|>
win32clipboard.CloseClipboard() return success
<|file_name|>terrain.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ Terrain module providing step overlapping data containers """ import threading <|fim▁hole|>world = threading.local() # pylint: disable=invalid-name def pick(func): """ Picks the given function and add it to the world object """ setattr(world, func.__name__, func) return func world.pick = pick<|fim▁end|>
<|file_name|>0002_auto__del_unique_cards__del_cards__del_versions__del_unique_versions__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from south.utils import datetime_utils as datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Deleting model 'unique_cards' db.delete_table(u'card_game_unique_cards') # Deleting model 'cards' db.delete_table(u'card_game_cards') # Deleting model 'versions' db.delete_table(u'card_game_versions') # Deleting model 'unique_versions' db.delete_table(u'card_game_unique_versions') # Adding model 'Unique_Card' db.create_table(u'card_game_unique_card', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('card_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['card_game.Card'])), )) db.send_create_signal(u'card_game', ['Unique_Card']) # Adding model 'Card' db.create_table(u'card_game_card', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('name', self.gf('django.db.models.fields.CharField')(max_length=64)), ('cost', self.gf('django.db.models.fields.IntegerField')(default=0)), ('art', self.gf('django.db.models.fields.files.ImageField')(max_length=100, blank=True)), ('text', self.gf('django.db.models.fields.TextField')()), ('power', self.gf('django.db.models.fields.IntegerField')(default=0)), ('toughness', self.gf('django.db.models.fields.IntegerField')(default=1)), )) db.send_create_signal(u'card_game', ['Card']) # Adding model 'Version' db.create_table(u'card_game_version', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('version_number', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['card_game.Unique_Version'])), ('card_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['card_game.Card'])), )) db.send_create_signal(u'card_game', ['Version']) # Adding model 'Unique_Version' db.create_table(u'card_game_unique_version', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=128)), ('description', self.gf('django.db.models.fields.CharField')(max_length=256)), ('creation_date', self.gf('django.db.models.fields.DateField')(auto_now_add=True, blank=True)), )) db.send_create_signal(u'card_game', ['Unique_Version']) def backwards(self, orm): # Adding model 'unique_cards' db.create_table(u'card_game_unique_cards', ( ('card_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['card_game.cards'])), (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), )) db.send_create_signal(u'card_game', ['unique_cards']) # Adding model 'cards' db.create_table(u'card_game_cards', ( ('toughness', self.gf('django.db.models.fields.IntegerField')(default=1)), ('art', self.gf('django.db.models.fields.files.ImageField')(max_length=100, blank=True)), ('name', self.gf('django.db.models.fields.CharField')(max_length=64)), ('power', self.gf('django.db.models.fields.IntegerField')(default=0)), ('text', self.gf('django.db.models.fields.TextField')()), (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('cost', self.gf('django.db.models.fields.IntegerField')(default=0)), )) db.send_create_signal(u'card_game', ['cards']) # Adding model 'versions' db.create_table(u'card_game_versions', ( ('version_number', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['card_game.unique_versions'])), ('card_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['card_game.cards'])),<|fim▁hole|> # Adding model 'unique_versions' db.create_table(u'card_game_unique_versions', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('description', self.gf('django.db.models.fields.CharField')(max_length=256)), ('name', self.gf('django.db.models.fields.CharField')(max_length=128, unique=True)), ('creation_date', self.gf('django.db.models.fields.DateField')(auto_now_add=True, blank=True)), )) db.send_create_signal(u'card_game', ['unique_versions']) # Deleting model 'Unique_Card' db.delete_table(u'card_game_unique_card') # Deleting model 'Card' db.delete_table(u'card_game_card') # Deleting model 'Version' db.delete_table(u'card_game_version') # Deleting model 'Unique_Version' db.delete_table(u'card_game_unique_version') models = { u'card_game.card': { 'Meta': {'object_name': 'Card'}, 'art': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}), 'cost': ('django.db.models.fields.IntegerField', [], {'default': '0'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'power': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'text': ('django.db.models.fields.TextField', [], {}), 'toughness': ('django.db.models.fields.IntegerField', [], {'default': '1'}) }, u'card_game.unique_card': { 'Meta': {'object_name': 'Unique_Card'}, 'card_id': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['card_game.Card']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) }, u'card_game.unique_version': { 'Meta': {'object_name': 'Unique_Version'}, 'creation_date': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'description': ('django.db.models.fields.CharField', [], {'max_length': '256'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}) }, u'card_game.version': { 'Meta': {'object_name': 'Version'}, 'card_id': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['card_game.Card']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'version_number': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['card_game.Unique_Version']"}) } } complete_apps = ['card_game']<|fim▁end|>
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), )) db.send_create_signal(u'card_game', ['versions'])
<|file_name|>test_views.py<|end_file_name|><|fim▁begin|>""" Tests for Blocks Views """ import json import ddt from django.test import RequestFactory, TestCase from django.core.urlresolvers import reverse import httpretty <|fim▁hole|>from third_party_auth.tests.utils import ThirdPartyOAuthTestMixin, ThirdPartyOAuthTestMixinGoogle from .constants import DUMMY_REDIRECT_URL from .. import adapters from .. import views from . import mixins class _DispatchingViewTestCase(TestCase): """ Base class for tests that exercise DispatchingViews. """ dop_adapter = adapters.DOPAdapter() dot_adapter = adapters.DOTAdapter() view_class = None url = None def setUp(self): super(_DispatchingViewTestCase, self).setUp() self.user = UserFactory() self.dot_app = self.dot_adapter.create_public_client( name='test dot application', user=self.user, redirect_uri=DUMMY_REDIRECT_URL, client_id='dot-app-client-id', ) self.dop_client = self.dop_adapter.create_public_client( name='test dop client', user=self.user, redirect_uri=DUMMY_REDIRECT_URL, client_id='dop-app-client-id', ) def _post_request(self, user, client, token_type=None): """ Call the view with a POST request objectwith the appropriate format, returning the response object. """ return self.client.post(self.url, self._post_body(user, client, token_type)) def _post_body(self, user, client, token_type=None): """ Return a dictionary to be used as the body of the POST request """ raise NotImplementedError() @ddt.ddt class TestAccessTokenView(mixins.AccessTokenMixin, _DispatchingViewTestCase): """ Test class for AccessTokenView """ view_class = views.AccessTokenView url = reverse('access_token') def _post_body(self, user, client, token_type=None): """ Return a dictionary to be used as the body of the POST request """ body = { 'client_id': client.client_id, 'grant_type': 'password', 'username': user.username, 'password': 'test', } if token_type: body['token_type'] = token_type return body @ddt.data('dop_client', 'dot_app') def test_access_token_fields(self, client_attr): client = getattr(self, client_attr) response = self._post_request(self.user, client) self.assertEqual(response.status_code, 200) data = json.loads(response.content) self.assertIn('access_token', data) self.assertIn('expires_in', data) self.assertIn('scope', data) self.assertIn('token_type', data) @ddt.data('dop_client', 'dot_app') def test_jwt_access_token(self, client_attr): client = getattr(self, client_attr) response = self._post_request(self.user, client, token_type='jwt') self.assertEqual(response.status_code, 200) data = json.loads(response.content) self.assertIn('expires_in', data) self.assertEqual(data['token_type'], 'JWT') self.assert_valid_jwt_access_token(data['access_token'], self.user, data['scope'].split(' ')) def test_dot_access_token_provides_refresh_token(self): response = self._post_request(self.user, self.dot_app) self.assertEqual(response.status_code, 200) data = json.loads(response.content) self.assertIn('refresh_token', data) def test_dop_public_client_access_token(self): response = self._post_request(self.user, self.dop_client) self.assertEqual(response.status_code, 200) data = json.loads(response.content) self.assertNotIn('refresh_token', data) @ddt.ddt @httpretty.activate class TestAccessTokenExchangeView(ThirdPartyOAuthTestMixinGoogle, ThirdPartyOAuthTestMixin, _DispatchingViewTestCase): """ Test class for AccessTokenExchangeView """ view_class = views.AccessTokenExchangeView url = reverse('exchange_access_token', kwargs={'backend': 'google-oauth2'}) def _post_body(self, user, client, token_type=None): return { 'client_id': client.client_id, 'access_token': self.access_token, } @ddt.data('dop_client', 'dot_app') def test_access_token_exchange_calls_dispatched_view(self, client_attr): client = getattr(self, client_attr) self.oauth_client = client self._setup_provider_response(success=True) response = self._post_request(self.user, client) self.assertEqual(response.status_code, 200) @ddt.ddt class TestAuthorizationView(TestCase): """ Test class for AuthorizationView """ dop_adapter = adapters.DOPAdapter() def setUp(self): super(TestAuthorizationView, self).setUp() self.user = UserFactory() self.dop_client = self._create_confidential_client(user=self.user, client_id='dop-app-client-id') def _create_confidential_client(self, user, client_id): """ Create a confidential client suitable for testing purposes. """ return self.dop_adapter.create_confidential_client( name='test_app', user=user, client_id=client_id, redirect_uri=DUMMY_REDIRECT_URL ) def test_authorization_view(self): self.client.login(username=self.user.username, password='test') response = self.client.post( '/oauth2/authorize/', { 'client_id': self.dop_client.client_id, # TODO: DOT is not yet supported (MA-2124) 'response_type': 'code', 'state': 'random_state_string', 'redirect_uri': DUMMY_REDIRECT_URL, }, follow=True, ) self.assertEqual(response.status_code, 200) # check form is in context and form params are valid context = response.context_data # pylint: disable=no-member self.assertIn('form', context) self.assertIsNone(context['form']['authorize'].value()) self.assertIn('oauth_data', context) oauth_data = context['oauth_data'] self.assertEqual(oauth_data['redirect_uri'], DUMMY_REDIRECT_URL) self.assertEqual(oauth_data['state'], 'random_state_string') class TestViewDispatch(TestCase): """ Test that the DispatchingView dispatches the right way. """ dop_adapter = adapters.DOPAdapter() dot_adapter = adapters.DOTAdapter() def setUp(self): super(TestViewDispatch, self).setUp() self.user = UserFactory() self.view = views._DispatchingView() # pylint: disable=protected-access self.dop_adapter.create_public_client( name='', user=self.user, client_id='dop-id', redirect_uri=DUMMY_REDIRECT_URL ) self.dot_adapter.create_public_client( name='', user=self.user, client_id='dot-id', redirect_uri=DUMMY_REDIRECT_URL ) def assert_is_view(self, view_candidate): """ Assert that a given object is a view. That is, it is callable, and takes a request argument. Note: while technically, the request argument could take any name, this assertion requires the argument to be named `request`. This is good practice. You should do it anyway. """ _msg_base = u'{view} is not a view: {reason}' msg_not_callable = _msg_base.format(view=view_candidate, reason=u'it is not callable') msg_no_request = _msg_base.format(view=view_candidate, reason=u'it has no request argument') self.assertTrue(hasattr(view_candidate, '__call__'), msg_not_callable) args = view_candidate.func_code.co_varnames self.assertTrue(args, msg_no_request) self.assertEqual(args[0], 'request') def _get_request(self, client_id): """ Return a request with the specified client_id in the body """ return RequestFactory().post('/', {'client_id': client_id}) def test_dispatching_to_dot(self): request = self._get_request('dot-id') self.assertEqual(self.view.select_backend(request), self.dot_adapter.backend) def test_dispatching_to_dop(self): request = self._get_request('dop-id') self.assertEqual(self.view.select_backend(request), self.dop_adapter.backend) def test_dispatching_with_no_client(self): request = self._get_request(None) self.assertEqual(self.view.select_backend(request), self.dop_adapter.backend) def test_dispatching_with_invalid_client(self): request = self._get_request('abcesdfljh') self.assertEqual(self.view.select_backend(request), self.dop_adapter.backend) def test_get_view_for_dot(self): view_object = views.AccessTokenView() self.assert_is_view(view_object.get_view_for_backend(self.dot_adapter.backend)) def test_get_view_for_dop(self): view_object = views.AccessTokenView() self.assert_is_view(view_object.get_view_for_backend(self.dop_adapter.backend)) def test_get_view_for_no_backend(self): view_object = views.AccessTokenView() self.assertRaises(KeyError, view_object.get_view_for_backend, None)<|fim▁end|>
from student.tests.factories import UserFactory
<|file_name|>Posts.js<|end_file_name|><|fim▁begin|>import React, { Component } from 'react'; import PropTypes from 'prop-types'; import * as postsActions from 'redux/modules/posts'; import { asyncConnect } from 'redux-connect'; import { connect } from 'react-redux';<|fim▁hole|> @asyncConnect([{ promise: ({ store: { dispatch, getState } }) => { dispatch(postsActions.load({ user_id: getState().auth.user.id })); } }]) @connect( state => ({ posts: state.posts.items }), { ...postsActions, pushState: push } ) export default class Posts extends Component { static propTypes = { posts: PropTypes.array.isRequired, clearItems: PropTypes.func.isRequired, dispatch: PropTypes.func.isRequired, pushState: PropTypes.func.isRequired }; static defaultProps = { posts: [] }; static contextTypes = { }; state = { editId: 0 } componentWillUnmount() { if (!this.state.editId) { this.props.dispatch(this.props.clearItems()); } } gotoEdit = id => { this.setState({ editId: id }); this.props.pushState(`/profile/edit_post/${id}`); } render() { const { posts } = this.props; return ( <div> <h1>My Posts</h1> <ListPosts items={posts} editable gotoEdit={this.gotoEdit} /> </div> ); } }<|fim▁end|>
import { push } from 'react-router-redux'; import ListPosts from '../Posts/ListPosts';
<|file_name|>contact.py<|end_file_name|><|fim▁begin|>import re from datetime import datetime from flask import current_app as app from flask_jwt import current_identity from flask_restplus import Namespace, Resource, fields, reqparse from sqlalchemy.exc import IntegrityError from packr.models import Message api = Namespace('contact', description='Operations related to the contact form') message = api.model('Contact', { 'email': fields.String(required=True, description='Contact email'), 'content': fields.String(required=True, description='Message'), }) message_id = api.model('ContactCompletion', { 'id': fields.Integer(required=True, description='id') }) @api.route('/') class MessageItem(Resource): @api.expect(message) @api.response(204, 'Message successfully received.') def post(self): req_parse = reqparse.RequestParser(bundle_errors=True) req_parse.add_argument('email', type=str, required=True, help='No email provided', location='json') req_parse.add_argument('content', type=str, required=True, help='No message provided', location='json') args = req_parse.parse_args() email = args.get('email') content = args.get('content') if email == '': return {'message': {'email': 'No email provided'}}, 400 elif not re.match(r"^[A-Za-z0-9.+_-]+@[A-Za-z0-9._-]+\.[a-zA-Z]*$", email): return {'message': {'email': 'Invalid email provided'}}, 400 if content == '': return {'message': {'content': 'No content provided'}}, 400 new_message = Message(email=email, content=content, time=datetime.now()) try: new_message.save() except IntegrityError as e: print(e) return { 'description': 'Failed to send message.' }, 409 except Exception as e: print(e) return {'description': 'Server encountered an error.'}, 500 return {'email': new_message.email}, 201 def get(self): if not current_identity and not app.config.get('TESTING'): return {'message': 'User not authenticated'}, 401 if app.config.get('TESTING') \ or current_identity.role.role_name == "ADMIN": messages = dict() for message_row in Message.query.filter_by(done=False).all(): messages[message_row.id] = { "email": message_row.email, "time": message_row.time.isoformat(), "content": message_row.content }<|fim▁hole|> @api.route('/complete') class CompleteItem(Resource): @api.expect(message_id) @api.response(204, 'Message successfully updated.') def post(self): req_parse = reqparse.RequestParser(bundle_errors=True) req_parse.add_argument('id', type=int, required=True, help='No id provided', location='json') args = req_parse.parse_args() id = args.get('id') if id == 0: return {'message': {'id': 'No id provided'}}, 400 completed_message = Message.query.filter_by(id=id).first() completed_message.done = True try: completed_message.save() except IntegrityError as e: print(e) return { 'description': 'Failed to update message.' }, 409 except Exception as e: print(e) return {'description': 'Server encountered an error.'}, 500 return {'message': "Message updated"}, 201<|fim▁end|>
return messages, 201 else: return {'message': 'Not authorised'}, 401
<|file_name|>plugin.py<|end_file_name|><|fim▁begin|>from Components.ActionMap import ActionMap from Components.Sources.List import List from Components.Sources.StaticText import StaticText from Components.ConfigList import ConfigList from Components.config import * from Components.Console import Console from skin import loadSkin from Components.Label import Label from Screens.Screen import Screen from Components.Pixmap import Pixmap from Plugins.Plugin import PluginDescriptor from Tools.Directories import pathExists, fileExists from Weather import * from Search_Id import * from Screens.MessageBox import MessageBox from Screens.Standby import TryQuitMainloop from __init__ import _ import os import commands from enigma import getDesktop from boxbranding import getMachineName, getMachineBrand from Screens.InputBox import PinInput from Tools.BoundFunction import boundFunction config.plugins.mc_global = ConfigSubsection() config.plugins.mc_global.vfd = ConfigSelection(default='off', choices=[('off', 'off'), ('on', 'on')]) config.plugins.mc_globalsettings.upnp_enable = ConfigYesNo(default=False) #change to FullHD if getDesktop(0).size().width() == 1920: loadSkin("/usr/lib/enigma2/python/Plugins/Extensions/BMediaCenter/skins/defaultHD/skinHD.xml") else: loadSkin("/usr/lib/enigma2/python/Plugins/Extensions/BMediaCenter/skins/defaultHD/skin.xml") #try: # from enigma import evfd # config.plugins.mc_global.vfd.value = 'on' # config.plugins.mc_global.save() #except Exception as e: # print 'Media Center: Import evfd failed' try: from Plugins.Extensions.DVDPlayer.plugin import * dvdplayer = True except: print "Media Center: Import DVDPlayer failed" dvdplayer = False mcpath = '/usr/lib/enigma2/python/Plugins/Extensions/BMediaCenter/skins/defaultHD/images/' class DMC_MainMenu(Screen): def __init__(self, session): Screen.__init__(self, session) self["text"] = Label(_("My Music")) self["left"] = Pixmap() self["middle"] = Pixmap() self["right"] = Pixmap() self.Console = Console()<|fim▁hole|> self.session.nav.stopService() # Disable OSD Transparency try: self.can_osd_alpha = open("/proc/stb/video/alpha", "r") and True or False except: self.can_osd_alpha = False if self.can_osd_alpha: open("/proc/stb/video/alpha", "w").write(str("255")) open("/proc/sys/vm/drop_caches", "w").write(str("3")) list = [] list.append((_("My Music"), "MC_AudioPlayer", "menu_music", "50")) list.append((_("My Music"), "MC_AudioPlayer", "menu_music", "50")) list.append((_("My Videos"), "MC_VideoPlayer", "menu_video", "50")) list.append((_("DVD Player"), "MC_DVDPlayer", "menu_video", "50")) list.append((_("My Pictures"), "MC_PictureViewer", "menu_pictures", "50")) list.append((_("Web Radio"), "MC_WebRadio", "menu_radio", "50")) list.append((_("VLC Player"), "MC_VLCPlayer", "menu_vlc", "50")) list.append((_("Weather Info"), "MC_WeatherInfo", "menu_weather", "50")) list.append((_("MUZU.TV"), "MUZU.TV", "menu_webmedia", "50")) list.append((_("Opera"), "Webbrowser", "menu_webbrowser", "50")) list.append((_("SHOUTcast"), "SHOUTcast", "menu_shoutcast", "50")) list.append((_("TSMedia"), "TSMedia", "menu_weblinks", "50")) list.append((_("Settings"), "MC_Settings", "menu_settings", "50")) list.append(("Exit", "Exit", "menu_exit", "50")) self["menu"] = List(list) self["actions"] = ActionMap(["OkCancelActions", "DirectionActions"], { "cancel": self.Exit, "ok": self.okbuttonClick, "right": self.next, "upRepeated": self.prev, "down": self.next, "downRepeated": self.next, "leftRepeated": self.prev, "rightRepeated": self.next, "up": self.prev, "left": self.prev }, -1) #if config.plugins.mc_global.vfd.value == "on": # evfd.getInstance().vfd_write_string(_("My Music")) if config.plugins.mc_globalsettings.upnp_enable.getValue(): if fileExists("/media/upnp") is False: os.mkdir("/media/upnp") os.system('djmount /media/upnp &') if self.isProtected() and config.ParentalControl.servicepin[0].value: self.onFirstExecBegin.append(boundFunction(self.session.openWithCallback, self.pinEntered, PinInput, pinList=[x.value for x in config.ParentalControl.servicepin], triesEntry=config.ParentalControl.retries.servicepin, title=_("Please enter the correct pin code"), windowTitle=_("Enter pin code"))) def isProtected(self): return config.ParentalControl.setuppinactive.value and config.ParentalControl.config_sections.bmediacenter.value def pinEntered(self, result): if result is None: self.closeProtectedScreen() elif not result: self.session.openWithCallback(self.close(), MessageBox, _("The pin code you entered is wrong."), MessageBox.TYPE_ERROR, timeout=3) def closeProtectedScreen(self, result=None): self.close(None) def checkNetworkState(self, str, retval, extra_args): if 'Collected errors' in str: self.session.openWithCallback(self.close, MessageBox, _("A background update check is in progress, please wait a few minutes and try again."), type=MessageBox.TYPE_INFO, timeout=10, close_on_any_key=True) elif not str: self.feedscheck = self.session.open(MessageBox,_('Please wait whilst feeds state is checked.'), MessageBox.TYPE_INFO, enable_input = False) self.feedscheck.setTitle(_('Checking Feeds')) cmd1 = "opkg update" self.CheckConsole = Console() self.CheckConsole.ePopen(cmd1, self.checkNetworkStateFinished) else: self.session.open(MessageBox,"Error: No Updateservice Avaible in Moment", MessageBox.TYPE_INFO) def checkNetworkStateFinished(self, result, retval,extra_args=None): if 'bad address' in result: self.session.openWithCallback(self.InstallPackageFailed, MessageBox, _("Your %s %s is not connected to the internet, please check your network settings and try again.") % (getMachineBrand(), getMachineName()), type=MessageBox.TYPE_INFO, timeout=10, close_on_any_key=True) elif ('wget returned 1' or 'wget returned 255' or '404 Not Found') in result: self.session.openWithCallback(self.InstallPackageFailed, MessageBox, _("Sorry feeds are down for maintenance, please try again later."), type=MessageBox.TYPE_INFO, timeout=10, close_on_any_key=True) else: self.session.openWithCallback(self.InstallPackage, MessageBox, _('Ready to install %s ?') % self.service_name, MessageBox.TYPE_YESNO) self.Exit() def InstallPackageFailed(self, val): self.feedscheck.close() self.close() self.Exit() def InstallPackage(self, val): if val: self.doInstall(self.installComplete, self.service_name) else: self.feedscheck.close() self.close() def doInstall(self, callback, pkgname): self.message = self.session.open(MessageBox,_("please wait..."), MessageBox.TYPE_INFO, enable_input = False) self.message.setTitle(_('Installing Service')) self.Console.ePopen('/usr/bin/opkg install ' + pkgname, callback) def installComplete(self,result = None, retval = None, extra_args = None): self.session.open(TryQuitMainloop, 3) def InstallCheckDVD(self): self.service_name = 'enigma2-plugin-extensions-dvdplayer' self.Console.ePopen('/usr/bin/opkg list_installed ' + self.service_name, self.checkNetworkState) def InstallCheckVLC(self): self.service_name = 'enigma2-plugin-extensions-vlcplayer' self.Console.ePopen('/usr/bin/opkg list_installed ' + self.service_name, self.checkNetworkState) def InstallCheckSHOUT(self): self.service_name = 'enigma2-plugin-extensions-shoutcast' self.Console.ePopen('/usr/bin/opkg list_installed ' + self.service_name, self.checkNetworkState) def InstallCheckTSMedia(self): self.service_name = 'enigma2-plugin-extensions-tsmedia-oe2.0' self.Console.ePopen('/usr/bin/opkg list_installed ' + self.service_name, self.checkNetworkState) def InstallCheckMUZU(self): self.service_name = 'enigma2-plugin-extensions-muzutv' self.Console.ePopen('/usr/bin/opkg list_installed ' + self.service_name, self.checkNetworkState) def InstallCheckWebbrowser(self): self.service_name = 'enigma2-plugin-extensions-hbbtv-opennfr-fullhd' self.Console.ePopen('/usr/bin/opkg list_installed ' + self.service_name, self.checkNetworkState) def next(self): self["menu"].selectNext() if self["menu"].getIndex() == 13: self["menu"].setIndex(1) #if self["menu"].getIndex() == 14: # self["menu"].setIndex(1) self.update() def prev(self): self["menu"].selectPrevious() if self["menu"].getIndex() == 0: self["menu"].setIndex(12) self.update() def update(self): if self["menu"].getIndex() == 1: self["left"].instance.setPixmapFromFile(mcpath +"MenuIconSettingssw.png") self["middle"].instance.setPixmapFromFile(mcpath +"MenuIconMusic.png") self["right"].instance.setPixmapFromFile(mcpath +"MenuIconVideosw.png") elif self["menu"].getIndex() == 2: self["left"].instance.setPixmapFromFile(mcpath +"MenuIconMusicsw.png") self["middle"].instance.setPixmapFromFile(mcpath +"MenuIconVideo.png") self["right"].instance.setPixmapFromFile(mcpath +"MenuIconDVDsw.png") elif self["menu"].getIndex() == 3: self["left"].instance.setPixmapFromFile(mcpath +"MenuIconVideosw.png") self["middle"].instance.setPixmapFromFile(mcpath +"MenuIconDVD.png") self["right"].instance.setPixmapFromFile(mcpath +"MenuIconPicturesw.png") elif self["menu"].getIndex() == 4: self["left"].instance.setPixmapFromFile(mcpath +"MenuIconDVDsw.png") self["middle"].instance.setPixmapFromFile(mcpath +"MenuIconPicture.png") self["right"].instance.setPixmapFromFile(mcpath +"MenuIconRadiosw.png") elif self["menu"].getIndex() == 5: self["left"].instance.setPixmapFromFile(mcpath +"MenuIconPicturesw.png") self["middle"].instance.setPixmapFromFile(mcpath +"MenuIconRadio.png") self["right"].instance.setPixmapFromFile(mcpath +"MenuIconVLCsw.png") elif self["menu"].getIndex() == 6: self["left"].instance.setPixmapFromFile(mcpath +"MenuIconRadiosw.png") self["middle"].instance.setPixmapFromFile(mcpath +"MenuIconVLC.png") self["right"].instance.setPixmapFromFile(mcpath +"MenuIconWeathersw.png") elif self["menu"].getIndex() == 7: self["left"].instance.setPixmapFromFile(mcpath +"MenuIconVLCsw.png") self["middle"].instance.setPixmapFromFile(mcpath +"MenuIconWeather.png") self["right"].instance.setPixmapFromFile(mcpath +"MenuIconWebmediasw.png") elif self["menu"].getIndex() == 8: self["left"].instance.setPixmapFromFile(mcpath +"MenuIconWeathersw.png") self["middle"].instance.setPixmapFromFile(mcpath +"MenuIconWebmedia.png") self["right"].instance.setPixmapFromFile(mcpath +"MenuIconWebbrowsersw.png") elif self["menu"].getIndex() == 9: self["left"].instance.setPixmapFromFile(mcpath +"MenuIconWebmediasw.png") self["middle"].instance.setPixmapFromFile(mcpath +"MenuIconWebbrowser.png") self["right"].instance.setPixmapFromFile(mcpath +"MenuIconShoutcastsw.png") elif self["menu"].getIndex() == 10: self["left"].instance.setPixmapFromFile(mcpath +"MenuIconWebbrowsersw.png") self["middle"].instance.setPixmapFromFile(mcpath +"MenuIconShoutcast.png") self["right"].instance.setPixmapFromFile(mcpath +"MenuIconWeblinkssw.png") elif self["menu"].getIndex() == 11: self["left"].instance.setPixmapFromFile(mcpath +"MenuIconShoutcastsw.png") self["middle"].instance.setPixmapFromFile(mcpath +"MenuIconWeblinks.png") self["right"].instance.setPixmapFromFile(mcpath +"MenuIconSettingssw.png") elif self["menu"].getIndex() == 12: self["left"].instance.setPixmapFromFile(mcpath +"MenuIconWeblinkssw.png") self["middle"].instance.setPixmapFromFile(mcpath +"MenuIconSettings.png") self["right"].instance.setPixmapFromFile(mcpath +"MenuIconMusicsw.png") #if config.plugins.mc_global.vfd.value == "on": # evfd.getInstance().vfd_write_string(self["menu"].getCurrent()[0]) self["text"].setText(self["menu"].getCurrent()[0]) def okbuttonClick(self): from Screens.MessageBox import MessageBox selection = self["menu"].getCurrent() if selection is not None: if selection[1] == "MC_VideoPlayer": from MC_VideoPlayer import MC_VideoPlayer self.session.open(MC_VideoPlayer) elif selection[1] == "MC_DVDPlayer": if dvdplayer: self.session.open(DVDPlayer) else: self.InstallCheckDVD() self.session.open(MessageBox,"Error: DVD-Player Plugin not installed ...", MessageBox.TYPE_INFO, timeout=5) elif selection[1] == "MC_PictureViewer": from MC_PictureViewer import MC_PictureViewer self.session.open(MC_PictureViewer) elif selection[1] == "MC_AudioPlayer": from MC_AudioPlayer import MC_AudioPlayer self.session.open(MC_AudioPlayer) elif selection[1] == "MC_WebRadio": from MC_AudioPlayer import MC_WebRadio self.session.open(MC_WebRadio) elif selection[1] == "MC_VLCPlayer": if pathExists("/usr/lib/enigma2/python/Plugins/Extensions/VlcPlayer/") == True: from MC_VLCPlayer import MC_VLCServerlist self.session.open(MC_VLCServerlist) else: self.session.open(MessageBox,"Error: VLC-Player Plugin not installed ...", MessageBox.TYPE_INFO, timeout=5) self.InstallCheckVLC() elif selection[1] == "Webbrowser": if os.path.exists("/usr/lib/enigma2/python/Plugins/Extensions/HbbTV/") == True: from Plugins.Extensions.HbbTV.plugin import OperaBrowser global didOpen didOpen = True url = 'http://www.nachtfalke.biz' self.session.open(OperaBrowser, url) global browserinstance else: # self.session.openWithCallback(self.browserCallback, BrowserRemoteControl, url) self.session.open(MessageBox,"Error: WebBrowser Plugin not installed ...", MessageBox.TYPE_INFO, timeout=5) self.InstallCheckWebbrowser() elif selection[1] == "SHOUTcast": if os.path.exists("/usr/lib/enigma2/python/Plugins/Extensions/SHOUTcast/") == True: from Plugins.Extensions.SHOUTcast.plugin import SHOUTcastWidget self.session.open(SHOUTcastWidget) else: self.session.open(MessageBox,"Error: SHOUTcast Plugin not installed ...", MessageBox.TYPE_INFO, timeout=5) self.InstallCheckSHOUT() elif selection[1] == "MC_WeatherInfo": self.session.nav.playService(self.oldbmcService) self.session.open(MeteoMain) elif selection[1] == "MC_Settings": from MC_Settings import MC_Settings self.session.open(MC_Settings) elif selection[1] == "MUZU.TV": if os.path.exists("/usr/lib/enigma2/python/Plugins/Extensions/MUZUtv/") == True: from Plugins.Extensions.MUZUtv.plugin import muzuMain self.session.open(muzuMain) else: self.session.open(MessageBox,"Error: MUZUtv Plugin not installed ...", MessageBox.TYPE_INFO, timeout=5) self.InstallCheckMUZU() elif selection[1] == "TSMedia": if os.path.exists("/usr/lib/enigma2/python/Plugins/Extensions/TSmedia/") == True: from Plugins.Extensions.TSmedia.plugin import TSmediabootlogo self.session.open(TSmediabootlogo) else: self.session.open(MessageBox,"Error: TSmedia Plugin not installed ...", MessageBox.TYPE_INFO, timeout=5) self.InstallCheckTSMedia() else: self.session.open(MessageBox,("Error: Could not find plugin %s\ncoming soon ... :)") % (selection[1]), MessageBox.TYPE_INFO) def error(self, error): from Screens.MessageBox import MessageBox self.session.open(MessageBox,("UNEXPECTED ERROR:\n%s") % (error), MessageBox.TYPE_INFO) def Exit(self): self.session.nav.stopService() # Restore OSD Transparency Settings open("/proc/sys/vm/drop_caches", "w").write(str("3")) if self.can_osd_alpha: try: if config.plugins.mc_global.vfd.value == "on": trans = commands.getoutput('cat /etc/enigma2/settings | grep config.av.osd_alpha | cut -d "=" -f2') else: trans = commands.getoutput('cat /etc/enigma2/settings | grep config.osd.alpha | cut -d "=" -f2') open("/proc/stb/video/alpha", "w").write(str(trans)) except: print "Set OSD Transparacy failed" #if config.plugins.mc_global.vfd.value == "on": # evfd.getInstance().vfd_write_string(_("Media Center")) os.system('umount /media/upnp') self.session.nav.playService(self.oldbmcService) self.close() def main(session, **kwargs): session.open(DMC_MainMenu) def menu(menuid, **kwargs): if menuid == "mainmenu": return [(_("Media Center"), main, "dmc_mainmenu", 44)] return [] def Plugins(**kwargs): if config.plugins.mc_globalsettings.showinmainmenu.value == True and config.plugins.mc_globalsettings.showinextmenu.value == True: return [ PluginDescriptor(name = "Media Center", description = "Media Center Plugin for your OpenNFR-Image", icon="plugin.png", where = PluginDescriptor.WHERE_PLUGINMENU, fnc = main), PluginDescriptor(name = "Media Center", description = "Media Center Plugin for your OpenNFR-Image", where = PluginDescriptor.WHERE_MENU, fnc = menu), PluginDescriptor(name = "Media Center", description = "Media Center Plugin for your OpenNFR-Image", icon="plugin.png", where = PluginDescriptor.WHERE_EXTENSIONSMENU, fnc=main)] elif config.plugins.mc_globalsettings.showinmainmenu.value == True and config.plugins.mc_globalsettings.showinextmenu.value == False: return [ PluginDescriptor(name = "Media Center", description = "Media Center Plugin for your OpenNFR-Image", icon="plugin.png", where = PluginDescriptor.WHERE_PLUGINMENU, fnc = main), PluginDescriptor(name = "Media Center", description = "Media Center Plugin for your OpenNFR-Image", where = PluginDescriptor.WHERE_MENU, fnc = menu)] elif config.plugins.mc_globalsettings.showinmainmenu.value == False and config.plugins.mc_globalsettings.showinextmenu.value == True: return [ PluginDescriptor(name = "Media Center", description = "Media Center Plugin for your OpenNFR-Image", icon="plugin.png", where = PluginDescriptor.WHERE_PLUGINMENU, fnc = main), PluginDescriptor(name = "Media Center", description = "Media Center Plugin for your OpenNFR-Image", icon="plugin.png", where = PluginDescriptor.WHERE_EXTENSIONSMENU, fnc=main)] else: return [ PluginDescriptor(name = "Media Center", description = "Media Center Plugin for your OpenNFR-Image", icon="plugin.png", where = PluginDescriptor.WHERE_PLUGINMENU, fnc = main)]<|fim▁end|>
self.oldbmcService = self.session.nav.getCurrentlyPlayingServiceReference()
<|file_name|>_nticks.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators class NticksValidator(_plotly_utils.basevalidators.IntegerValidator): def __init__( self, plotly_name="nticks", parent_name="layout.ternary.baxis", **kwargs ): super(NticksValidator, self).__init__(<|fim▁hole|> **kwargs )<|fim▁end|>
plotly_name=plotly_name, parent_name=parent_name, edit_type=kwargs.pop("edit_type", "plot"), min=kwargs.pop("min", 1),
<|file_name|>props.rs<|end_file_name|><|fim▁begin|>// This Source Code Form is subject to the terms of the Mozilla Public // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at http://mozilla.org/MPL/2.0/. use dbus::arg::IterAppend; use dbus_tree::{MTSync, MethodErr, PropInfo}; use crate::{ dbus_api::{ filesystem::shared::{self, filesystem_operation}, types::TData, }, engine::{Engine, Name, Pool}, }; /// Get a filesystem property and place it on the D-Bus. The property is /// found by means of the getter method which takes a reference to a /// Filesystem and obtains the property from the filesystem. fn get_filesystem_property<F, R, E>( i: &mut IterAppend<'_>, p: &PropInfo<'_, MTSync<TData<E>>, TData<E>>, getter: F, ) -> Result<(), MethodErr> where F: Fn((Name, Name, &<E::Pool as Pool>::Filesystem)) -> Result<R, String>, R: dbus::arg::Append, E: Engine, { #[allow(clippy::redundant_closure)] i.append( filesystem_operation(p.tree, p.path.get_name(), getter) .map_err(|ref e| MethodErr::failed(e))?, ); Ok(()) } /// Get the devnode for an object path. pub fn get_filesystem_devnode<E>( i: &mut IterAppend<'_>, p: &PropInfo<'_, MTSync<TData<E>>, TData<E>>, ) -> Result<(), MethodErr><|fim▁hole|> E: Engine, { get_filesystem_property(i, p, |(pool_name, fs_name, fs)| { Ok(shared::fs_devnode_prop::<E>(fs, &pool_name, &fs_name)) }) } pub fn get_filesystem_name<E>( i: &mut IterAppend<'_>, p: &PropInfo<'_, MTSync<TData<E>>, TData<E>>, ) -> Result<(), MethodErr> where E: Engine, { get_filesystem_property(i, p, |(_, fs_name, _)| Ok(shared::fs_name_prop(&fs_name))) } /// Get the creation date and time in rfc3339 format. pub fn get_filesystem_created<E>( i: &mut IterAppend<'_>, p: &PropInfo<'_, MTSync<TData<E>>, TData<E>>, ) -> Result<(), MethodErr> where E: Engine, { get_filesystem_property(i, p, |(_, _, fs)| Ok(shared::fs_created_prop::<E>(fs))) } /// Get the size of the filesystem in bytes. pub fn get_filesystem_size<E>( i: &mut IterAppend<'_>, p: &PropInfo<'_, MTSync<TData<E>>, TData<E>>, ) -> Result<(), MethodErr> where E: Engine, { get_filesystem_property(i, p, |(_, _, fs)| Ok(shared::fs_size_prop(fs))) } /// Get the size of the used portion of the filesystem in bytes. pub fn get_filesystem_used<E>( i: &mut IterAppend<'_>, p: &PropInfo<'_, MTSync<TData<E>>, TData<E>>, ) -> Result<(), MethodErr> where E: Engine, { get_filesystem_property(i, p, |(_, _, fs)| Ok(shared::fs_used_prop::<E>(fs))) }<|fim▁end|>
where
<|file_name|>draggableRowTests.js<|end_file_name|><|fim▁begin|>describe("Dragable Row Directive ", function () { var scope, container, element, html, compiled, compile; beforeEach(module("app", function ($provide) { $provide.value("authService", {}) })); beforeEach(inject(function ($compile, $rootScope) { html = '<div id="element-id" data-draggable-row=""' + ' data-draggable-elem-selector=".draggable"' + ' data-drop-area-selector=".drop-area">' + '<div class="draggable"></div>' + '<div class="drop-area" style="display: none;"></div>' + '</div>'; scope = $rootScope.$new(); compile = $compile; })); function prepareDirective(s) { container = angular.element(html); compiled = compile(container); element = compiled(s); s.$digest(); } /***********************************************************************************************************************/ it('should add draggable attribute to draggable element, when initialise', function () { prepareDirective(scope); expect(element.find('.draggable').attr('draggable')).toBe('true'); }); it('should prevent default, when dragging over allowed element', function () { prepareDirective(scope); var event = $.Event('dragover'); event.preventDefault = window.jasmine.createSpy('preventDefault'); event.originalEvent = { dataTransfer: { types: {}, getData: window.jasmine.createSpy('getData').and.returnValue("element-id") } }; element.trigger(event); expect(event.preventDefault).toHaveBeenCalled(); }); it('should show drop area, when drag enter allowed element', function () { prepareDirective(scope); var event = $.Event('dragenter'); event.originalEvent = { dataTransfer: { types: {}, getData: window.jasmine.createSpy('getData').and.returnValue("element-id") } }; element.trigger(event); expect(element.find('.drop-area').css('display')).not.toEqual('none'); expect(event.originalEvent.dataTransfer.getData).toHaveBeenCalledWith('draggedRow'); }); it('should call scope onDragEnd, when dragging ends', function () { prepareDirective(scope); var event = $.Event('dragend'); var isolateScope = element.isolateScope(); spyOn(isolateScope, 'onDragEnd'); element.trigger(event); expect(isolateScope.onDragEnd).toHaveBeenCalled(); }); it('should set drag data and call scope onDrag, when drag starts', function () { prepareDirective(scope); var event = $.Event('dragstart'); event.originalEvent = { dataTransfer: { setData: window.jasmine.createSpy('setData') } }; var isolateScope = element.isolateScope(); spyOn(isolateScope, 'onDrag'); element.find('.draggable').trigger(event); expect(isolateScope.onDrag).toHaveBeenCalled(); expect(event.originalEvent.dataTransfer.setData).toHaveBeenCalledWith('draggedRow', 'element-id'); }); it('should prevent default, when dragging over allowed drop area', function () { prepareDirective(scope); var event = $.Event('dragover'); event.preventDefault = window.jasmine.createSpy('preventDefault'); event.originalEvent = { dataTransfer: { types: {}, getData: window.jasmine.createSpy('getData').and.returnValue("element-id") } }; element.find('.drop-area').trigger(event); expect(event.preventDefault).toHaveBeenCalled(); }); it('should show drop area, when drag enter allowed drop area', function () { prepareDirective(scope); var event = $.Event('dragenter'); event.originalEvent = { dataTransfer: { types: {}, getData: window.jasmine.createSpy('getData').and.returnValue("element-id") } }; element.find('.drop-area').trigger(event); expect(element.find('.drop-area').css('display')).not.toEqual('none'); expect(event.originalEvent.dataTransfer.getData).toHaveBeenCalledWith('draggedRow'); }); it('should hide drop area, when drag leave drop area', function () { prepareDirective(scope); var event = $.Event('dragleave'); event.originalEvent = { dataTransfer: { types: {}, getData: window.jasmine.createSpy('getData').and.returnValue("element-id") }<|fim▁hole|> expect(event.originalEvent.dataTransfer.getData).toHaveBeenCalledWith('draggedRow'); }); it('should hide drop area and call scope onDrop, when drop on drop area', function () { prepareDirective(scope); var event = $.Event('drop'); event.preventDefault = window.jasmine.createSpy('preventDefault'); event.originalEvent = { dataTransfer: { types: {}, getData: window.jasmine.createSpy('getData').and.returnValue("element-id") } }; var isolateScope = element.isolateScope(); spyOn(isolateScope, 'onDrop'); element.find('.drop-area').trigger(event); expect(event.originalEvent.dataTransfer.getData).toHaveBeenCalledWith('draggedRow'); expect(event.preventDefault).toHaveBeenCalled(); expect(element.find('.drop-area').css('display')).toEqual('none'); expect(isolateScope.onDrop).toHaveBeenCalled(); }); });<|fim▁end|>
}; element.find('.drop-area').trigger(event); expect(element.find('.drop-area').css('display')).toEqual('none');
<|file_name|>deploy.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import os from watermark.config import config as conf from watermark import connect config_name = os.getenv('WM_CONFIG_ENV') or 'default'<|fim▁hole|>conn = connect.get_connection(config) conn.message.create_queue(name=config.NAME) print("{name} queue created".format(name=config.NAME))<|fim▁end|>
config = conf[config_name]()
<|file_name|>pagers.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from typing import ( Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, ) from google.cloud.certificate_manager_v1.types import certificate_manager class ListCertificatesPager: """A pager for iterating through ``list_certificates`` requests. This class thinly wraps an initial :class:`google.cloud.certificate_manager_v1.types.ListCertificatesResponse` object, and provides an ``__iter__`` method to iterate through its ``certificates`` field. If there are more pages, the ``__iter__`` method will make additional ``ListCertificates`` requests and continue to iterate through the ``certificates`` field on the corresponding responses. All the usual :class:`google.cloud.certificate_manager_v1.types.ListCertificatesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, method: Callable[..., certificate_manager.ListCertificatesResponse], request: certificate_manager.ListCertificatesRequest, response: certificate_manager.ListCertificatesResponse, *, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. Args: method (Callable): The method that was originally called, and which instantiated this pager. request (google.cloud.certificate_manager_v1.types.ListCertificatesRequest): The initial request object. response (google.cloud.certificate_manager_v1.types.ListCertificatesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = certificate_manager.ListCertificatesRequest(request) self._response = response self._metadata = metadata def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property def pages(self) -> Iterator[certificate_manager.ListCertificatesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response def __iter__(self) -> Iterator[certificate_manager.Certificate]: for page in self.pages: yield from page.certificates def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListCertificatesAsyncPager: """A pager for iterating through ``list_certificates`` requests. This class thinly wraps an initial :class:`google.cloud.certificate_manager_v1.types.ListCertificatesResponse` object, and provides an ``__aiter__`` method to iterate through its ``certificates`` field. If there are more pages, the ``__aiter__`` method will make additional ``ListCertificates`` requests and continue to iterate through the ``certificates`` field on the corresponding responses. All the usual :class:`google.cloud.certificate_manager_v1.types.ListCertificatesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, method: Callable[..., Awaitable[certificate_manager.ListCertificatesResponse]], request: certificate_manager.ListCertificatesRequest, response: certificate_manager.ListCertificatesResponse, *, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. Args: method (Callable): The method that was originally called, and which instantiated this pager. request (google.cloud.certificate_manager_v1.types.ListCertificatesRequest): The initial request object. response (google.cloud.certificate_manager_v1.types.ListCertificatesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = certificate_manager.ListCertificatesRequest(request) self._response = response self._metadata = metadata def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property async def pages( self, ) -> AsyncIterator[certificate_manager.ListCertificatesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterator[certificate_manager.Certificate]: async def async_generator(): async for page in self.pages: for response in page.certificates: yield response return async_generator() def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListCertificateMapsPager: """A pager for iterating through ``list_certificate_maps`` requests. This class thinly wraps an initial :class:`google.cloud.certificate_manager_v1.types.ListCertificateMapsResponse` object, and provides an ``__iter__`` method to iterate through its ``certificate_maps`` field. If there are more pages, the ``__iter__`` method will make additional ``ListCertificateMaps`` requests and continue to iterate through the ``certificate_maps`` field on the corresponding responses. All the usual :class:`google.cloud.certificate_manager_v1.types.ListCertificateMapsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, method: Callable[..., certificate_manager.ListCertificateMapsResponse], request: certificate_manager.ListCertificateMapsRequest, response: certificate_manager.ListCertificateMapsResponse, *, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. Args: method (Callable): The method that was originally called, and which instantiated this pager. request (google.cloud.certificate_manager_v1.types.ListCertificateMapsRequest): The initial request object. response (google.cloud.certificate_manager_v1.types.ListCertificateMapsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = certificate_manager.ListCertificateMapsRequest(request) self._response = response self._metadata = metadata def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property def pages(self) -> Iterator[certificate_manager.ListCertificateMapsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response def __iter__(self) -> Iterator[certificate_manager.CertificateMap]: for page in self.pages: yield from page.certificate_maps def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListCertificateMapsAsyncPager: """A pager for iterating through ``list_certificate_maps`` requests. This class thinly wraps an initial :class:`google.cloud.certificate_manager_v1.types.ListCertificateMapsResponse` object, and provides an ``__aiter__`` method to iterate through its ``certificate_maps`` field. If there are more pages, the ``__aiter__`` method will make additional ``ListCertificateMaps`` requests and continue to iterate through the ``certificate_maps`` field on the corresponding responses. All the usual :class:`google.cloud.certificate_manager_v1.types.ListCertificateMapsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, method: Callable[ ..., Awaitable[certificate_manager.ListCertificateMapsResponse] ], request: certificate_manager.ListCertificateMapsRequest, response: certificate_manager.ListCertificateMapsResponse, *, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. Args: method (Callable): The method that was originally called, and which instantiated this pager. request (google.cloud.certificate_manager_v1.types.ListCertificateMapsRequest): The initial request object. response (google.cloud.certificate_manager_v1.types.ListCertificateMapsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = certificate_manager.ListCertificateMapsRequest(request) self._response = response self._metadata = metadata def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property async def pages( self, ) -> AsyncIterator[certificate_manager.ListCertificateMapsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterator[certificate_manager.CertificateMap]: async def async_generator(): async for page in self.pages: for response in page.certificate_maps: yield response return async_generator() def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListCertificateMapEntriesPager: """A pager for iterating through ``list_certificate_map_entries`` requests. This class thinly wraps an initial :class:`google.cloud.certificate_manager_v1.types.ListCertificateMapEntriesResponse` object, and provides an ``__iter__`` method to iterate through its ``certificate_map_entries`` field. If there are more pages, the ``__iter__`` method will make additional ``ListCertificateMapEntries`` requests and continue to iterate through the ``certificate_map_entries`` field on the corresponding responses. All the usual :class:`google.cloud.certificate_manager_v1.types.ListCertificateMapEntriesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, method: Callable[..., certificate_manager.ListCertificateMapEntriesResponse], request: certificate_manager.ListCertificateMapEntriesRequest, response: certificate_manager.ListCertificateMapEntriesResponse, *, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. Args: method (Callable): The method that was originally called, and which instantiated this pager. request (google.cloud.certificate_manager_v1.types.ListCertificateMapEntriesRequest): The initial request object. response (google.cloud.certificate_manager_v1.types.ListCertificateMapEntriesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = certificate_manager.ListCertificateMapEntriesRequest(request) self._response = response self._metadata = metadata def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property def pages(self) -> Iterator[certificate_manager.ListCertificateMapEntriesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response def __iter__(self) -> Iterator[certificate_manager.CertificateMapEntry]: for page in self.pages: yield from page.certificate_map_entries def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListCertificateMapEntriesAsyncPager: """A pager for iterating through ``list_certificate_map_entries`` requests. This class thinly wraps an initial :class:`google.cloud.certificate_manager_v1.types.ListCertificateMapEntriesResponse` object, and provides an ``__aiter__`` method to iterate through its ``certificate_map_entries`` field. If there are more pages, the ``__aiter__`` method will make additional ``ListCertificateMapEntries`` requests and continue to iterate through the ``certificate_map_entries`` field on the corresponding responses. All the usual :class:`google.cloud.certificate_manager_v1.types.ListCertificateMapEntriesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, method: Callable[ ..., Awaitable[certificate_manager.ListCertificateMapEntriesResponse] ], request: certificate_manager.ListCertificateMapEntriesRequest, response: certificate_manager.ListCertificateMapEntriesResponse, *, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. Args: method (Callable): The method that was originally called, and which instantiated this pager. request (google.cloud.certificate_manager_v1.types.ListCertificateMapEntriesRequest): The initial request object. response (google.cloud.certificate_manager_v1.types.ListCertificateMapEntriesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = certificate_manager.ListCertificateMapEntriesRequest(request) self._response = response self._metadata = metadata def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property async def pages( self, ) -> AsyncIterator[certificate_manager.ListCertificateMapEntriesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterator[certificate_manager.CertificateMapEntry]: async def async_generator(): async for page in self.pages: for response in page.certificate_map_entries: yield response return async_generator() def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListDnsAuthorizationsPager: """A pager for iterating through ``list_dns_authorizations`` requests. This class thinly wraps an initial :class:`google.cloud.certificate_manager_v1.types.ListDnsAuthorizationsResponse` object, and provides an ``__iter__`` method to iterate through its ``dns_authorizations`` field. If there are more pages, the ``__iter__`` method will make additional ``ListDnsAuthorizations`` requests and continue to iterate through the ``dns_authorizations`` field on the corresponding responses. All the usual :class:`google.cloud.certificate_manager_v1.types.ListDnsAuthorizationsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, method: Callable[..., certificate_manager.ListDnsAuthorizationsResponse], request: certificate_manager.ListDnsAuthorizationsRequest, response: certificate_manager.ListDnsAuthorizationsResponse, *, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. Args: method (Callable): The method that was originally called, and which instantiated this pager. request (google.cloud.certificate_manager_v1.types.ListDnsAuthorizationsRequest): The initial request object. response (google.cloud.certificate_manager_v1.types.ListDnsAuthorizationsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = certificate_manager.ListDnsAuthorizationsRequest(request) self._response = response self._metadata = metadata def __getattr__(self, name: str) -> Any:<|fim▁hole|> @property def pages(self) -> Iterator[certificate_manager.ListDnsAuthorizationsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response def __iter__(self) -> Iterator[certificate_manager.DnsAuthorization]: for page in self.pages: yield from page.dns_authorizations def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListDnsAuthorizationsAsyncPager: """A pager for iterating through ``list_dns_authorizations`` requests. This class thinly wraps an initial :class:`google.cloud.certificate_manager_v1.types.ListDnsAuthorizationsResponse` object, and provides an ``__aiter__`` method to iterate through its ``dns_authorizations`` field. If there are more pages, the ``__aiter__`` method will make additional ``ListDnsAuthorizations`` requests and continue to iterate through the ``dns_authorizations`` field on the corresponding responses. All the usual :class:`google.cloud.certificate_manager_v1.types.ListDnsAuthorizationsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, method: Callable[ ..., Awaitable[certificate_manager.ListDnsAuthorizationsResponse] ], request: certificate_manager.ListDnsAuthorizationsRequest, response: certificate_manager.ListDnsAuthorizationsResponse, *, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. Args: method (Callable): The method that was originally called, and which instantiated this pager. request (google.cloud.certificate_manager_v1.types.ListDnsAuthorizationsRequest): The initial request object. response (google.cloud.certificate_manager_v1.types.ListDnsAuthorizationsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = certificate_manager.ListDnsAuthorizationsRequest(request) self._response = response self._metadata = metadata def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property async def pages( self, ) -> AsyncIterator[certificate_manager.ListDnsAuthorizationsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterator[certificate_manager.DnsAuthorization]: async def async_generator(): async for page in self.pages: for response in page.dns_authorizations: yield response return async_generator() def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response)<|fim▁end|>
return getattr(self._response, name)
<|file_name|>gregorian.js<|end_file_name|><|fim▁begin|>define( //begin v1.x content { "dateFormatItem-Ehm": "E h:mm a", "days-standAlone-short": [ "อา.", "จ.", "อ.", "พ.", "พฤ.", "ศ.", "ส." ], "months-format-narrow": [ "ม.ค.", "ก.พ.", "มี.ค.", "เม.ย.", "พ.ค.", "มิ.ย.", "ก.ค.", "ส.ค.", "ก.ย.", "ต.ค.", "พ.ย.", "ธ.ค." ], "field-second-relative+0": "ขณะนี้", "quarters-standAlone-narrow": [ "1", "2", "3", "4" ], "field-weekday": "วันในสัปดาห์", "dateFormatItem-yQQQ": "QQQ y", "dateFormatItem-yMEd": "E d/M/y", "field-wed-relative+0": "พุธนี้", "field-wed-relative+1": "พุธหน้า", "dateFormatItem-GyMMMEd": "E d MMM G y", "dateFormatItem-MMMEd": "E d MMM", "eraNarrow": [ "ก่อน ค.ศ.", "ก.ส.ศ.", "ค.ศ.", "ส.ศ." ], "field-tue-relative+-1": "อังคารที่แล้ว", "days-format-short": [ "อา.", "จ.", "อ.", "พ.", "พฤ.", "ศ.", "ส." ], "dateFormat-long": "d MMMM G y", "field-fri-relative+-1": "ศุกร์ที่แล้ว", "field-wed-relative+-1": "พุธที่แล้ว", "months-format-wide": [ "มกราคม", "กุมภาพันธ์", "มีนาคม", "เมษายน", "พฤษภาคม", "มิถุนายน", "กรกฎาคม", "สิงหาคม", "กันยายน", "ตุลาคม", "พฤศจิกายน", "ธันวาคม" ], "dateTimeFormat-medium": "{1} {0}", "dayPeriods-format-wide-pm": "หลังเที่ยง", "dateFormat-full": "EEEEที่ d MMMM G y", "field-thu-relative+-1": "พฤหัสที่แล้ว", "dateFormatItem-Md": "d/M", "dateFormatItem-yMd": "d/M/y", "field-era": "สมัย", "dateFormatItem-yM": "M/y", "months-standAlone-wide": [ "มกราคม", "กุมภาพันธ์", "มีนาคม", "เมษายน", "พฤษภาคม", "มิถุนายน", "กรกฎาคม", "สิงหาคม", "กันยายน", "ตุลาคม", "พฤศจิกายน", "ธันวาคม" ], "timeFormat-short": "HH:mm", "quarters-format-wide": [ "ไตรมาส 1", "ไตรมาส 2", "ไตรมาส 3", "ไตรมาส 4" ], "dateFormatItem-yQQQQ": "QQQQ G y", "timeFormat-long": "H นาฬิกา mm นาที ss วินาที z", "field-year": "ปี", "dateFormatItem-yMMM": "MMM y", "field-hour": "ชั่วโมง", "months-format-abbr": [ "ม.ค.", "ก.พ.", "มี.ค.", "เม.ย.", "พ.ค.", "มิ.ย.", "ก.ค.", "ส.ค.", "ก.ย.", "ต.ค.", "พ.ย.", "ธ.ค." ], "field-sat-relative+0": "เสาร์นี้", "field-sat-relative+1": "เสาร์หน้า", "timeFormat-full": "H นาฬิกา mm นาที ss วินาที zzzz", "field-day-relative+0": "วันนี้", "field-thu-relative+0": "พฤหัสนี้", "field-day-relative+1": "พรุ่งนี้", "field-thu-relative+1": "พฤหัสหน้า", "dateFormatItem-GyMMMd": "d MMM G y", "field-day-relative+2": "มะรืนนี้", "dateFormatItem-H": "HH", "months-standAlone-abbr": [ "ม.ค.", "ก.พ.", "มี.ค.", "เม.ย.", "พ.ค.", "มิ.ย.", "ก.ค.", "ส.ค.", "ก.ย.", "ต.ค.", "พ.ย.", "ธ.ค." ], "quarters-format-abbr": [ "ไตรมาส 1", "ไตรมาส 2", "ไตรมาส 3", "ไตรมาส 4" ], "quarters-standAlone-wide": [ "ไตรมาส 1", "ไตรมาส 2", "ไตรมาส 3", "ไตรมาส 4" ], "dateFormatItem-Gy": "G y", "dateFormatItem-M": "L", "days-standAlone-wide": [ "วันอาทิตย์", "วันจันทร์", "วันอังคาร", "วันพุธ", "วันพฤหัสบดี", "วันศุกร์", "วันเสาร์" ], "dateFormatItem-MMMMd": "d MMMM", "timeFormat-medium": "HH:mm:ss", "field-sun-relative+0": "อาทิตย์นี้", "dateFormatItem-Hm": "HH:mm", "field-sun-relative+1": "อาทิตย์หน้า", "quarters-standAlone-abbr": [ "ไตรมาส 1", "ไตรมาส 2", "ไตรมาส 3", "ไตรมาส 4" ], "eraAbbr": [ "ปีก่อน ค.ศ.", "ค.ศ." ], "field-minute": "นาที", "field-dayperiod": "ช่วงวัน", "days-standAlone-abbr": [ "อา.", "จ.",<|fim▁hole|> "อ.", "พ.", "พฤ.", "ศ.", "ส." ], "dateFormatItem-d": "d", "dateFormatItem-ms": "mm:ss", "quarters-format-narrow": [ "1", "2", "3", "4" ], "field-day-relative+-1": "เมื่อวาน", "dateFormatItem-h": "h a", "dateTimeFormat-long": "{1} {0}", "field-day-relative+-2": "เมื่อวานซืน", "dateFormatItem-MMMd": "d MMM", "dateFormatItem-MEd": "E d/M", "dateTimeFormat-full": "{1} {0}", "field-fri-relative+0": "ศุกร์นี้", "dateFormatItem-yMMMM": "MMMM G y", "field-fri-relative+1": "ศุกร์หน้า", "field-day": "วัน", "days-format-wide": [ "วันอาทิตย์", "วันจันทร์", "วันอังคาร", "วันพุธ", "วันพฤหัสบดี", "วันศุกร์", "วันเสาร์" ], "field-zone": "เขตเวลา", "dateFormatItem-y": "y", "months-standAlone-narrow": [ "ม.ค.", "ก.พ.", "มี.ค.", "เม.ย.", "พ.ค.", "มิ.ย.", "ก.ค.", "ส.ค.", "ก.ย.", "ต.ค.", "พ.ย.", "ธ.ค." ], "field-year-relative+-1": "ปีที่แล้ว", "field-month-relative+-1": "เดือนที่แล้ว", "dateFormatItem-hm": "h:mm a", "days-format-abbr": [ "อา.", "จ.", "อ.", "พ.", "พฤ.", "ศ.", "ส." ], "dateFormatItem-yMMMd": "d MMM y", "eraNames": [ "ปีก่อนคริสต์ศักราช", "คริสต์ศักราช" ], "days-format-narrow": [ "อา", "จ", "อ", "พ", "พฤ", "ศ", "ส" ], "days-standAlone-narrow": [ "อา", "จ", "อ", "พ", "พฤ", "ศ", "ส" ], "dateFormatItem-MMM": "LLL", "field-month": "เดือน", "field-tue-relative+0": "อังคารนี้", "field-tue-relative+1": "อังคารหน้า", "dayPeriods-format-wide-am": "ก่อนเที่ยง", "dateFormatItem-MMMMEd": "E d MMMM", "dateFormatItem-EHm": "E HH:mm", "field-mon-relative+0": "จันทร์นี้", "field-mon-relative+1": "จันทร์หน้า", "dateFormat-short": "d/M/yy", "dateFormatItem-EHms": "E HH:mm:ss", "dateFormatItem-Ehms": "E h:mm:ss a", "field-second": "วินาที", "field-sat-relative+-1": "เสาร์ที่แล้ว", "dateFormatItem-yMMMEd": "E d MMM y", "field-sun-relative+-1": "อาทิตย์ที่แล้ว", "field-month-relative+0": "เดือนนี้", "field-month-relative+1": "เดือนหน้า", "dateFormatItem-Ed": "E d", "dateTimeFormats-appendItem-Timezone": "{0} {1}", "field-week": "สัปดาห์", "dateFormat-medium": "d MMM y", "field-year-relative+0": "ปีนี้", "field-week-relative+-1": "สัปดาห์ที่แล้ว", "field-year-relative+1": "ปีหน้า", "dateFormatItem-mmss": "mm:ss", "dateTimeFormat-short": "{1} {0}", "dateFormatItem-Hms": "HH:mm:ss", "dateFormatItem-hms": "h:mm:ss a", "dateFormatItem-GyMMM": "MMM G y", "field-mon-relative+-1": "จันทร์ที่แล้ว", "field-week-relative+0": "สัปดาห์นี้", "field-week-relative+1": "สัปดาห์หน้า" } //end v1.x content );<|fim▁end|>
<|file_name|>dlccmds.go<|end_file_name|><|fim▁begin|>package main import ( "encoding/hex" "fmt" "strconv" "strings" "time" "github.com/fatih/color" "github.com/mit-dci/lit/litrpc" "github.com/mit-dci/lit/lnutil" "github.com/mit-dci/lit/logging" ) var dlcCommand = &Command{ Format: fmt.Sprintf("%s%s%s\n", lnutil.White("dlc"), lnutil.ReqColor("subcommand"), lnutil.OptColor("parameters...")), Description: fmt.Sprintf("%s\n%s\n%s\n%s\n", "Command for working with discreet log contracts. ", "Subcommand can be one of:", fmt.Sprintf("%-10s %s", lnutil.White("oracle"), "Command to manage oracles"), fmt.Sprintf("%-10s %s", lnutil.White("contract"), "Command to manage contracts"), ), ShortDescription: "Command for working with Discreet Log Contracts.\n", } var oracleCommand = &Command{ Format: fmt.Sprintf("%s%s%s\n", lnutil.White("dlc oracle"), lnutil.ReqColor("subcommand"), lnutil.OptColor("parameters...")), Description: fmt.Sprintf("%s\n%s\n%s\n%s\n", "Command for managing oracles. Subcommand can be one of:", fmt.Sprintf("%-20s %s", lnutil.White("add"), "Adds a new oracle by manually providing the pubkey"), fmt.Sprintf("%-20s %s", lnutil.White("import"), "Imports a new oracle using a URL to its REST interface"), fmt.Sprintf("%-20s %s", lnutil.White("ls"), "Shows a list of known oracles"), ), ShortDescription: "Manages oracles for the Discreet Log Contracts.\n", } var listOraclesCommand = &Command{ Format: fmt.Sprintf("%s\n", lnutil.White("dlc oracle ls")), Description: "Shows a list of known oracles\n", ShortDescription: "Shows a list of known oracles\n", } var importOracleCommand = &Command{ Format: fmt.Sprintf("%s%s%s\n", lnutil.White("dlc oracle import"), lnutil.ReqColor("url"), lnutil.ReqColor("name")), Description: fmt.Sprintf("%s\n%s\n%s\n", "Imports a new oracle using a URL to its REST interface", fmt.Sprintf("%-20s %s", lnutil.White("url"), "URL to the root of the publishes dlcoracle REST interface"), fmt.Sprintf("%-20s %s", lnutil.White("name"), "Name under which to register the oracle in LIT"), ), ShortDescription: "Imports a new oracle into LIT from a REST interface\n", } var addOracleCommand = &Command{ Format: fmt.Sprintf("%s%s%s\n", lnutil.White("dlc oracle add"), lnutil.ReqColor("keys"), lnutil.ReqColor("name")), Description: fmt.Sprintf("%s\n%s\n%s\n", "Adds a new oracle by entering the pubkeys manually", fmt.Sprintf("%-20s %s", lnutil.White("keys"), "Public key for the oracle (33 bytes in hex)"), fmt.Sprintf("%-20s %s", lnutil.White("name"), "Name under which to register the oracle in LIT"), ), ShortDescription: "Adds a new oracle into LIT\n", } var contractCommand = &Command{ Format: fmt.Sprintf("%s%s%s\n", lnutil.White("dlc contract"), lnutil.ReqColor("subcommand"), lnutil.OptColor("parameters...")), Description: fmt.Sprintf("%s\n%s\n%s\n%s\n%s\n%s\n"+ "%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n", "Command for managing contracts. Subcommand can be one of:", fmt.Sprintf("%-20s %s", lnutil.White("new"), "Adds a new draft contract"), fmt.Sprintf("%-20s %s", lnutil.White("view"), "Views a contract"), fmt.Sprintf("%-20s %s", lnutil.White("viewpayout"), "Views the payout table of a contract"), fmt.Sprintf("%-20s %s", lnutil.White("setoracle"), "Sets a contract to use a particular oracle"), fmt.Sprintf("%-20s %s", lnutil.White("settime"), "Sets the settlement time of a contract"), fmt.Sprintf("%-20s %s", lnutil.White("setdatafeed"), "Sets the data feed to use, will fetch the R point"), fmt.Sprintf("%-20s %s", lnutil.White("setrpoint"), "Sets the R point manually"), fmt.Sprintf("%-20s %s",<|fim▁hole|> lnutil.White("setfunding"), "Sets the funding parameters of a contract"), fmt.Sprintf("%-20s %s", lnutil.White("setdivision"), "Sets the settlement division of a contract"), fmt.Sprintf("%-20s %s", lnutil.White("setcointype"), "Sets the cointype of a contract"), fmt.Sprintf("%-20s %s", lnutil.White("offer"), "Offer a draft contract to one of your peers"), fmt.Sprintf("%-20s %s", lnutil.White("decline"), "Decline a contract sent to you"), fmt.Sprintf("%-20s %s", lnutil.White("settle"), "Settles the contract"), fmt.Sprintf("%-20s %s", lnutil.White("ls"), "Shows a list of known contracts"), ), ShortDescription: "Manages oracles for the Discreet Log Contracts.\n", } var listContractsCommand = &Command{ Format: fmt.Sprintf("%s\n", lnutil.White("dlc contract ls")), Description: "Shows a list of known contracts\n", ShortDescription: "Shows a list of known contracts\n", } var addContractCommand = &Command{ Format: fmt.Sprintf("%s\n", lnutil.White("dlc contract add")), Description: "Adds a new draft contract\n", ShortDescription: "Adds a new draft contract\n", } var viewContractCommand = &Command{ Format: fmt.Sprintf("%s%s\n", lnutil.White("dlc contract view"), lnutil.ReqColor("id")), Description: fmt.Sprintf("%s\n%s\n", "Views the current status of a contract", fmt.Sprintf("%-10s %s", lnutil.White("id"), "The ID of the contract to view"), ), ShortDescription: "Views the current status of a contract\n", } var viewContractPayoutCommand = &Command{ Format: fmt.Sprintf("%s%s%s%s%s\n", lnutil.White("dlc contract viewpayout"), lnutil.ReqColor("id"), lnutil.ReqColor("start"), lnutil.ReqColor("end"), lnutil.ReqColor("increment")), Description: fmt.Sprintf("%s\n%s\n%s\n%s\n%s\n", "Views the payout table of a contract", fmt.Sprintf("%-10s %s", lnutil.White("id"), "The ID of the contract to view"), fmt.Sprintf("%-10s %s", lnutil.White("start"), "The start value to print payout data for"), fmt.Sprintf("%-10s %s", lnutil.White("end"), "The end value to print payout data for"), fmt.Sprintf("%-10s %s", lnutil.White("increment"), "Print every X oracle value (1 = all)"), ), ShortDescription: "Views the payout table of a contract\n", } var setContractOracleCommand = &Command{ Format: fmt.Sprintf("%s%s\n", lnutil.White("dlc contract setoracle"), lnutil.ReqColor("cid", "oid")), Description: fmt.Sprintf("%s\n%s\n%s\n", "Configures a contract for using a specific oracle", fmt.Sprintf("%-10s %s", lnutil.White("cid"), "The ID of the contract"), fmt.Sprintf("%-10s %s", lnutil.White("oid"), "The ID of the oracle"), ), ShortDescription: "Configures a contract for using a specific oracle\n", } var setContractDatafeedCommand = &Command{ Format: fmt.Sprintf("%s%s\n", lnutil.White("dlc contract setdatafeed"), lnutil.ReqColor("cid", "feed")), Description: fmt.Sprintf("%s\n%s\n%s\n", "Sets the data feed to use for the contract", fmt.Sprintf("%-10s %s", lnutil.White("cid"), "The ID of the contract"), fmt.Sprintf("%-10s %s", lnutil.White("feed"), "The ID of the data feed (provided by the oracle)"), ), ShortDescription: "Sets the data feed to use for the contract\n", } var setContractRPointCommand = &Command{ Format: fmt.Sprintf("%s%s\n", lnutil.White("dlc contract setrpoint"), lnutil.ReqColor("cid", "rpoint")), Description: fmt.Sprintf("%s\n%s\n%s\n", "Sets the R point to use for the contract", fmt.Sprintf("%-10s %s", lnutil.White("cid"), "The ID of the contract"), fmt.Sprintf("%-10s %s", lnutil.White("rpoint"), "The Rpoint of the publication to use (33 byte in hex)"), ), ShortDescription: "Sets the R point to use for the contract\n", } var setContractSettlementTimeCommand = &Command{ Format: fmt.Sprintf("%s%s\n", lnutil.White("dlc contract settime"), lnutil.ReqColor("cid", "time")), Description: fmt.Sprintf("%s\n%s\n%s\n", "Sets the settlement time for the contract", fmt.Sprintf("%-10s %s", lnutil.White("cid"), "The ID of the contract"), fmt.Sprintf("%-10s %s", lnutil.White("time"), "The settlement time (unix timestamp)"), ), ShortDescription: "Sets the settlement time for the contract\n", } var setContractFundingCommand = &Command{ Format: fmt.Sprintf("%s%s\n", lnutil.White("dlc contract setfunding"), lnutil.ReqColor("cid", "ourAmount", "theirAmount")), Description: fmt.Sprintf("%s\n%s\n%s\n%s\n", "Sets the amounts both parties in the contract will fund", fmt.Sprintf("%-10s %s", lnutil.White("cid"), "The ID of the contract"), fmt.Sprintf("%-10s %s", lnutil.White("ourAmount"), "The amount we will fund"), fmt.Sprintf("%-10s %s", lnutil.White("theirAmount"), "The amount our peer will fund"), ), ShortDescription: "Sets the amount both parties will fund\n", } var setContractDivisionCommand = &Command{ Format: fmt.Sprintf("%s%s\n", lnutil.White("dlc contract setdivision"), lnutil.ReqColor("cid", "valueAllForUs", "valueAllForThem")), Description: fmt.Sprintf("%s\n%s\n%s\n%s\n", "Sets the values of the oracle data that will result in the full"+ "contract funds being paid to either peer", fmt.Sprintf("%-10s %s", lnutil.White("cid"), "The ID of the contract"), fmt.Sprintf("%-10s %s", lnutil.White("valueAllForUs"), "The outcome with which we will be entitled to the full"+ " contract value"), fmt.Sprintf("%-10s %s", lnutil.White("valueAllForThem"), "The outcome with which our peer will be entitled to the full"+ " contract value"), ), ShortDescription: "Sets the edge values for dividing the funds\n", } var setContractCoinTypeCommand = &Command{ Format: fmt.Sprintf("%s%s\n", lnutil.White("dlc contract setcointype"), lnutil.ReqColor("cid", "cointype")), Description: fmt.Sprintf("%s\n%s\n%s\n", "Sets the coin type to use for the contract", fmt.Sprintf("%-10s %s", lnutil.White("cid"), "The ID of the contract"), fmt.Sprintf("%-10s %s", lnutil.White("cointype"), "The ID of the coin type to use for the contract"), ), ShortDescription: "Sets the coin type to use for the contract\n", } var declineContractCommand = &Command{ Format: fmt.Sprintf("%s%s\n", lnutil.White("dlc contract decline"), lnutil.ReqColor("cid")), Description: fmt.Sprintf("%s\n%s\n", "Declines a contract offered to you", fmt.Sprintf("%-10s %s", lnutil.White("cid"), "The ID of the contract to decline"), ), ShortDescription: "Declines a contract offered to you\n", } var acceptContractCommand = &Command{ Format: fmt.Sprintf("%s%s\n", lnutil.White("dlc contract accept"), lnutil.ReqColor("cid")), Description: fmt.Sprintf("%s\n%s\n", "Accepts a contract offered to you", fmt.Sprintf("%-10s %s", lnutil.White("cid"), "The ID of the contract to accept"), ), ShortDescription: "Accepts a contract offered to you\n", } var offerContractCommand = &Command{ Format: fmt.Sprintf("%s%s\n", lnutil.White("dlc contract offer"), lnutil.ReqColor("cid", "peer")), Description: fmt.Sprintf("%s\n%s\n%s\n", "Offers a contract to one of your peers", fmt.Sprintf("%-10s %s", lnutil.White("cid"), "The ID of the contract"), fmt.Sprintf("%-10s %s", lnutil.White("cointype"), "The ID of the peer to offer the contract to"), ), ShortDescription: "Offers a contract to one of your peers\n", } var settleContractCommand = &Command{ Format: fmt.Sprintf("%s%s\n", lnutil.White("dlc contract settle"), lnutil.ReqColor("cid", "oracleValue", "oracleSig")), Description: fmt.Sprintf("%s\n%s\n%s\n%s\n", "Settles the contract based on a value and signature from the oracle", fmt.Sprintf("%-20s %s", lnutil.White("cid"), "The ID of the contract"), fmt.Sprintf("%-20s %s", lnutil.White("oracleValue"), "The value the oracle published"), fmt.Sprintf("%-20s %s", lnutil.White("oracleSig"), "The signature from the oracle"), ), ShortDescription: "Settles the contract\n", } func (lc *litAfClient) Dlc(textArgs []string) error { if len(textArgs) > 0 && textArgs[0] == "-h" { fmt.Fprintf(color.Output, dlcCommand.Format) fmt.Fprintf(color.Output, dlcCommand.Description) return nil } if len(textArgs) > 0 && textArgs[0] == "oracle" { return lc.DlcOracle(textArgs[1:]) } if len(textArgs) > 0 && textArgs[0] == "contract" { return lc.DlcContract(textArgs[1:]) } return fmt.Errorf(dlcCommand.Format) } func (lc *litAfClient) DlcOracle(textArgs []string) error { if len(textArgs) > 0 && textArgs[0] == "-h" { fmt.Fprintf(color.Output, oracleCommand.Format) fmt.Fprintf(color.Output, oracleCommand.Description) return nil } if len(textArgs) > 0 && textArgs[0] == "ls" { return lc.DlcListOracles(textArgs[1:]) } if len(textArgs) > 0 && textArgs[0] == "add" { return lc.DlcAddOracle(textArgs[1:]) } if len(textArgs) > 0 && textArgs[0] == "import" { return lc.DlcImportOracle(textArgs[1:]) } return fmt.Errorf(oracleCommand.Format) } func (lc *litAfClient) DlcListOracles(textArgs []string) error { args := new(litrpc.ListOraclesArgs) reply := new(litrpc.ListOraclesReply) err := lc.Call("LitRPC.ListOracles", args, reply) if err != nil { return err } if len(reply.Oracles) == 0 { logging.Infof("No oracles found") } for _, o := range reply.Oracles { fmt.Fprintf(color.Output, "%04d: [%x...%x...%x] %s\n", o.Idx, o.A[:2], o.A[15:16], o.A[31:], o.Name) } return nil } func (lc *litAfClient) DlcImportOracle(textArgs []string) error { stopEx, err := CheckHelpCommand(importOracleCommand, textArgs, 2) if err != nil || stopEx { return err } args := new(litrpc.ImportOracleArgs) reply := new(litrpc.ImportOracleReply) args.Url = textArgs[0] args.Name = textArgs[1] err = lc.Call("LitRPC.ImportOracle", args, reply) if err != nil { return err } fmt.Fprintf(color.Output, "Oracle successfully registered under ID %d\n", reply.Oracle.Idx) return nil } func (lc *litAfClient) DlcAddOracle(textArgs []string) error { stopEx, err := CheckHelpCommand(addOracleCommand, textArgs, 2) if err != nil || stopEx { return err } if err != nil { return err } args := new(litrpc.AddOracleArgs) reply := new(litrpc.AddOracleReply) args.Key = textArgs[0] args.Name = textArgs[1] err = lc.Call("LitRPC.AddOracle", args, reply) if err != nil { return err } fmt.Fprintf(color.Output, "Oracle successfully registered under ID %d\n", reply.Oracle.Idx) return nil } func (lc *litAfClient) DlcContract(textArgs []string) error { if len(textArgs) < 1 { // this shouldn't happen? return fmt.Errorf("No argument specified") } cmd := textArgs[0] textArgs = textArgs[1:] if cmd == "-h" { fmt.Fprintf(color.Output, contractCommand.Format) fmt.Fprintf(color.Output, contractCommand.Description) return nil } if cmd == "ls" { return lc.DlcListContracts(textArgs) } if cmd == "new" { return lc.DlcNewContract(textArgs) } if cmd == "view" { return lc.DlcViewContract(textArgs) } if cmd == "viewpayout" { return lc.DlcViewContractPayout(textArgs) } if cmd == "setoracle" { return lc.DlcSetContractOracle(textArgs) } if cmd == "setdatafeed" { return lc.DlcSetContractDatafeed(textArgs) } if cmd == "setrpoint" { return lc.DlcSetContractRPoint(textArgs) } if cmd == "settime" { return lc.DlcSetContractSettlementTime(textArgs) } if cmd == "setfunding" { return lc.DlcSetContractFunding(textArgs) } if cmd == "setdivision" { return lc.DlcSetContractDivision(textArgs) } if cmd == "setcointype" { return lc.DlcSetContractCoinType(textArgs) } if cmd == "offer" { return lc.DlcOfferContract(textArgs) } if cmd == "decline" { return lc.DlcDeclineContract(textArgs) } if cmd == "accept" { return lc.DlcAcceptContract(textArgs) } if cmd == "settle" { return lc.DlcSettleContract(textArgs) } return fmt.Errorf(contractCommand.Format) } func (lc *litAfClient) DlcListContracts(textArgs []string) error { args := new(litrpc.ListContractsArgs) reply := new(litrpc.ListContractsReply) err := lc.Call("LitRPC.ListContracts", args, reply) if err != nil { return err } if len(reply.Contracts) == 0 { fmt.Println("No contracts found") } for _, c := range reply.Contracts { fmt.Fprintf(color.Output, "%04d: \n", c.Idx) } return nil } func (lc *litAfClient) DlcNewContract(textArgs []string) error { args := new(litrpc.NewContractArgs) reply := new(litrpc.NewContractReply) err := lc.Call("LitRPC.NewContract", args, reply) if err != nil { return err } fmt.Fprint(color.Output, "Contract successfully created\n\n") PrintContract(reply.Contract) return nil } func (lc *litAfClient) DlcViewContract(textArgs []string) error { stopEx, err := CheckHelpCommand(viewContractCommand, textArgs, 1) if err != nil || stopEx { return err } args := new(litrpc.GetContractArgs) reply := new(litrpc.GetContractReply) cIdx, err := strconv.ParseUint(textArgs[0], 10, 64) if err != nil { return err } args.Idx = cIdx err = lc.Call("LitRPC.GetContract", args, reply) if err != nil { return err } PrintContract(reply.Contract) return nil } func (lc *litAfClient) DlcViewContractPayout(textArgs []string) error { stopEx, err := CheckHelpCommand(viewContractPayoutCommand, textArgs, 4) if err != nil || stopEx { return err } args := new(litrpc.GetContractArgs) reply := new(litrpc.GetContractReply) cIdx, err := strconv.ParseUint(textArgs[0], 10, 64) if err != nil { return err } start, err := strconv.ParseInt(textArgs[1], 10, 64) if err != nil { return err } end, err := strconv.ParseInt(textArgs[2], 10, 64) if err != nil { return err } increment, err := strconv.ParseInt(textArgs[3], 10, 64) if err != nil { return err } args.Idx = cIdx err = lc.Call("LitRPC.GetContract", args, reply) if err != nil { return err } PrintPayout(reply.Contract, start, end, increment) return nil } func (lc *litAfClient) DlcSetContractOracle(textArgs []string) error { stopEx, err := CheckHelpCommand(setContractOracleCommand, textArgs, 2) if err != nil || stopEx { return err } args := new(litrpc.SetContractOracleArgs) reply := new(litrpc.SetContractOracleReply) cIdx, err := strconv.ParseUint(textArgs[0], 10, 64) if err != nil { return err } oIdx, err := strconv.ParseUint(textArgs[1], 10, 64) if err != nil { return err } args.CIdx = cIdx args.OIdx = oIdx err = lc.Call("LitRPC.SetContractOracle", args, reply) if err != nil { return err } fmt.Fprint(color.Output, "Oracle set successfully\n") return nil } func (lc *litAfClient) DlcSetContractDatafeed(textArgs []string) error { stopEx, err := CheckHelpCommand(setContractDatafeedCommand, textArgs, 2) if err != nil || stopEx { return err } args := new(litrpc.SetContractDatafeedArgs) reply := new(litrpc.SetContractDatafeedReply) cIdx, err := strconv.ParseUint(textArgs[0], 10, 64) if err != nil { return err } feed, err := strconv.ParseUint(textArgs[1], 10, 64) if err != nil { return err } args.CIdx = cIdx args.Feed = feed err = lc.Call("LitRPC.SetContractDatafeed", args, reply) if err != nil { return err } fmt.Fprint(color.Output, "Datafeed set successfully\n") return nil } func (lc *litAfClient) DlcSetContractRPoint(textArgs []string) error { stopEx, err := CheckHelpCommand(setContractRPointCommand, textArgs, 2) if err != nil || stopEx { return err } args := new(litrpc.SetContractRPointArgs) reply := new(litrpc.SetContractRPointReply) cIdx, err := strconv.ParseUint(textArgs[0], 10, 64) if err != nil { return err } rPoint, err := hex.DecodeString(textArgs[1]) if err != nil { return err } args.CIdx = cIdx copy(args.RPoint[:], rPoint[:]) err = lc.Call("LitRPC.SetContractRPoint", args, reply) if err != nil { return err } fmt.Fprint(color.Output, "R-point set successfully\n") return nil } func (lc *litAfClient) DlcSetContractSettlementTime(textArgs []string) error { stopEx, err := CheckHelpCommand(setContractSettlementTimeCommand, textArgs, 2) if err != nil || stopEx { return err } args := new(litrpc.SetContractSettlementTimeArgs) reply := new(litrpc.SetContractSettlementTimeReply) cIdx, err := strconv.ParseUint(textArgs[0], 10, 64) if err != nil { return err } time, err := strconv.ParseUint(textArgs[1], 10, 64) if err != nil { return err } args.CIdx = cIdx args.Time = time err = lc.Call("LitRPC.SetContractSettlementTime", args, reply) if err != nil { return err } fmt.Fprint(color.Output, "Settlement time set successfully\n") return nil } func (lc *litAfClient) DlcSetContractFunding(textArgs []string) error { stopEx, err := CheckHelpCommand(setContractFundingCommand, textArgs, 3) if err != nil || stopEx { return err } args := new(litrpc.SetContractFundingArgs) reply := new(litrpc.SetContractFundingReply) cIdx, err := strconv.ParseUint(textArgs[0], 10, 64) if err != nil { return err } ourAmount, err := strconv.ParseInt(textArgs[1], 10, 64) if err != nil { return err } theirAmount, err := strconv.ParseInt(textArgs[2], 10, 64) if err != nil { return err } args.CIdx = cIdx args.OurAmount = ourAmount args.TheirAmount = theirAmount err = lc.Call("LitRPC.SetContractFunding", args, reply) if err != nil { return err } fmt.Fprint(color.Output, "Funding set successfully\n") return nil } func (lc *litAfClient) DlcSetContractCoinType(textArgs []string) error { stopEx, err := CheckHelpCommand(setContractCoinTypeCommand, textArgs, 2) if err != nil || stopEx { return err } args := new(litrpc.SetContractCoinTypeArgs) reply := new(litrpc.SetContractCoinTypeReply) cIdx, err := strconv.ParseUint(textArgs[0], 10, 64) if err != nil { return err } cointype, err := strconv.ParseUint(textArgs[1], 10, 64) if err != nil { return err } args.CIdx = cIdx args.CoinType = uint32(cointype) err = lc.Call("LitRPC.SetContractCoinType", args, reply) if err != nil { return err } fmt.Fprint(color.Output, "Cointype set successfully\n") return nil } func (lc *litAfClient) DlcSetContractDivision(textArgs []string) error { stopEx, err := CheckHelpCommand(setContractDivisionCommand, textArgs, 3) if err != nil || stopEx { return err } args := new(litrpc.SetContractDivisionArgs) reply := new(litrpc.SetContractDivisionReply) cIdx, err := strconv.ParseUint(textArgs[0], 10, 64) if err != nil { return err } fullyOurs, err := strconv.ParseInt(textArgs[1], 10, 64) if err != nil { return err } fullyTheirs, err := strconv.ParseInt(textArgs[2], 10, 64) if err != nil { return err } args.CIdx = cIdx args.ValueFullyOurs = fullyOurs args.ValueFullyTheirs = fullyTheirs err = lc.Call("LitRPC.SetContractDivision", args, reply) if err != nil { return err } fmt.Fprint(color.Output, "Funding set successfully\n") return nil } func (lc *litAfClient) DlcOfferContract(textArgs []string) error { stopEx, err := CheckHelpCommand(offerContractCommand, textArgs, 2) if err != nil || stopEx { return err } args := new(litrpc.OfferContractArgs) reply := new(litrpc.OfferContractReply) cIdx, err := strconv.ParseUint(textArgs[0], 10, 64) if err != nil { return err } peerIdx, err := strconv.ParseUint(textArgs[1], 10, 64) if err != nil { return err } args.CIdx = cIdx args.PeerIdx = uint32(peerIdx) err = lc.Call("LitRPC.OfferContract", args, reply) if err != nil { return err } fmt.Fprint(color.Output, "Offer sent set successfully\n") return nil } func (lc *litAfClient) dlcContractRespond(textArgs []string, aor bool) error { args := new(litrpc.ContractRespondArgs) reply := new(litrpc.ContractRespondReply) cIdx, err := strconv.ParseUint(textArgs[0], 10, 64) if err != nil { return err } args.CIdx = cIdx args.AcceptOrDecline = aor err = lc.Call("LitRPC.ContractRespond", args, reply) if err != nil { return err } if aor { fmt.Fprintf(color.Output, "Offer acceptance initiated. Use [dlc contract view %d] to see the status.\n", cIdx) } else { fmt.Fprint(color.Output, "Offer declined successfully\n") } return nil } func (lc *litAfClient) DlcDeclineContract(textArgs []string) error { stopEx, err := CheckHelpCommand(declineContractCommand, textArgs, 1) if err != nil || stopEx { return err } return lc.dlcContractRespond(textArgs, false) } func (lc *litAfClient) DlcAcceptContract(textArgs []string) error { stopEx, err := CheckHelpCommand(acceptContractCommand, textArgs, 1) if err != nil || stopEx { return err } return lc.dlcContractRespond(textArgs, true) } func (lc *litAfClient) DlcSettleContract(textArgs []string) error { stopEx, err := CheckHelpCommand(settleContractCommand, textArgs, 3) if err != nil || stopEx { return err } args := new(litrpc.SettleContractArgs) reply := new(litrpc.SettleContractReply) cIdx, err := strconv.ParseUint(textArgs[0], 10, 64) if err != nil { return err } args.CIdx = cIdx oracleValue, err := strconv.ParseInt(textArgs[1], 10, 64) if err != nil { return err } args.OracleValue = oracleValue oracleSigBytes, err := hex.DecodeString(textArgs[2]) if err != nil { return err } copy(args.OracleSig[:], oracleSigBytes) err = lc.Call("LitRPC.SettleContract", args, reply) if err != nil { return err } fmt.Fprint(color.Output, "Contract settled successfully\n") return nil } func PrintContract(c *lnutil.DlcContract) { fmt.Fprintf(color.Output, "%-30s : %d\n", lnutil.White("Index"), c.Idx) fmt.Fprintf(color.Output, "%-30s : [%x...%x...%x]\n", lnutil.White("Oracle public key"), c.OracleA[:2], c.OracleA[15:16], c.OracleA[31:]) fmt.Fprintf(color.Output, "%-30s : [%x...%x...%x]\n", lnutil.White("Oracle R-point"), c.OracleR[:2], c.OracleR[15:16], c.OracleR[31:]) fmt.Fprintf(color.Output, "%-30s : %s\n", lnutil.White("Settlement time"), time.Unix(int64(c.OracleTimestamp), 0).UTC().Format(time.UnixDate)) fmt.Fprintf(color.Output, "%-30s : %d\n", lnutil.White("Funded by us"), c.OurFundingAmount) fmt.Fprintf(color.Output, "%-30s : %d\n", lnutil.White("Funded by peer"), c.TheirFundingAmount) fmt.Fprintf(color.Output, "%-30s : %d\n", lnutil.White("Coin type"), c.CoinType) peer := "None" if c.PeerIdx > 0 { peer = fmt.Sprintf("Peer %d", c.PeerIdx) } fmt.Fprintf(color.Output, "%-30s : %s\n", lnutil.White("Peer"), peer) status := "Draft" switch c.Status { case lnutil.ContractStatusActive: status = "Active" case lnutil.ContractStatusClosed: status = "Closed" case lnutil.ContractStatusOfferedByMe: status = "Sent offer, awaiting reply" case lnutil.ContractStatusOfferedToMe: status = "Received offer, awaiting reply" case lnutil.ContractStatusAccepting: status = "Accepting" case lnutil.ContractStatusAccepted: status = "Accepted" case lnutil.ContractStatusAcknowledged: status = "Acknowledged" case lnutil.ContractStatusError: status = "Error" case lnutil.ContractStatusDeclined: status = "Declined" } fmt.Fprintf(color.Output, "%-30s : %s\n\n", lnutil.White("Status"), status) increment := int64(len(c.Division) / 10) PrintPayout(c, 0, int64(len(c.Division)), increment) } func PrintPayout(c *lnutil.DlcContract, start, end, increment int64) { fmt.Fprintf(color.Output, "Payout division:\n\n") fmt.Fprintf(color.Output, "%-20s | %-20s | %-20s\n", "Oracle value", "Our payout", "Their payout") fmt.Fprintf(color.Output, "%s\n", strings.Repeat("-", 66)) for i := start; i < end; i += increment { fmt.Fprintf(color.Output, "%20d | %20d | %20d\n", c.Division[i].OracleValue, c.Division[i].ValueOurs, c.OurFundingAmount+c.TheirFundingAmount-c.Division[i].ValueOurs) } }<|fim▁end|>
<|file_name|>kml_view.py<|end_file_name|><|fim▁begin|>from django.views.generic import ListView from django.http import HttpResponse from .models import Job from geoq.maps.models import FeatureType from django.shortcuts import get_object_or_404 from datetime import datetime from pytz import timezone from webcolors import name_to_hex, normalize_hex from xml.sax.saxutils import escape as xml_escape class JobKML(ListView): model = Job def get(self, request, *args, **kwargs): job = get_object_or_404(Job, pk=self.kwargs.get('pk')) feature_types = FeatureType.objects.all() aoi_count = job.total_count() aoi_complete = job.complete_count() aoi_work = job.in_work_count() cookie_url_trailer = get_cookie_trailer(request) description = 'Job #'+str(job.id)+': '+str(job.name)+'\n'+str(job.project.name)+'\n' if aoi_count == 0: output = '<?xml version="1.0" encoding="UTF-8"?>\n' output += '<kml xmlns="http://www.opengis.net/kml/2.2">\n' output += ' <Document>\n' output += ' <name>Empty Job</name>\n' output += ' <description>'+description+'</description>\n' output += ' </Document>\n' output += '</kml>\n' return HttpResponse(output, mimetype="application/vnd.google-earth.kml+xml", status=200) aoi_comp_pct = (100 * float(aoi_complete)/float(aoi_count)) aoi_work_pct = int(100 * float(aoi_work)/float(aoi_count)) aoi_tot_pct = int(100 * float(aoi_work+aoi_complete)/float(aoi_count)) doc_name = 'GeoQ C:'+str(aoi_complete)+', W:'+str(aoi_work)+', Tot:'+str(aoi_count)+' ['+str(aoi_tot_pct)+'%]' description = description + 'Complete Cells: ' + str(aoi_complete) + ' ['+str(aoi_comp_pct)+'%], In Work: ' + str(aoi_work) + ' ['+str(aoi_work_pct)+'%], Total: ' + str(aoi_count) output = '<?xml version="1.0" encoding="UTF-8"?>\n' output += '<kml xmlns="http://www.opengis.net/kml/2.2">\n' output += ' <Document>\n' output += ' <name>'+doc_name+'</name>\n' output += ' <description>'+description+'</description>\n' output += ' <Style id="geoq_inwork">\n' output += ' <LineStyle>\n' output += ' <width>4</width>\n' output += ' <color>7f0186cf</color>\n' output += ' </LineStyle>\n' output += ' <PolyStyle>\n' output += ' <fill>0</fill>\n' output += ' <outline>1</outline>\n' output += ' </PolyStyle>\n' output += ' </Style>\n' output += ' <Style id="geoq_complete">\n' output += ' <LineStyle>\n' output += ' <width>3</width>\n' output += ' <color>7f0101cf</color>\n' output += ' </LineStyle>\n' output += ' <PolyStyle>\n' output += ' <fill>0</fill>\n' output += ' <outline>1</outline>\n' output += ' </PolyStyle>\n' output += ' </Style>\n' output += ' <Style id="geoq_unassigned">\n' output += ' <LineStyle>\n' output += ' <width>2</width>\n' output += ' <color>7f00ff00</color>\n' output += ' </LineStyle>\n' output += ' <PolyStyle>\n' output += ' <fill>0</fill>\n' output += ' <outline>1</outline>\n' output += ' </PolyStyle>\n' output += ' </Style>\n' for feature in feature_types: output += ' <Style id="geoq_'+str(feature.id)+'">\n' out_color = '7f0066ff' if feature.style == None: output += ' </Style>\n' continue if 'color' in feature.style: color = feature.style['color'] #convert to a kml-recognized color if color[0:1] == '#' and len(color) == 4: color = normalize_hex(color) try: c = name_to_hex(color) out_color = '7f' + c[5:7] + c[3:5] + c[1:3] except Exception: out_color = '7f0066ff' output += ' <PolyStyle>\n' output += ' <color>'+out_color+'</color>\n' output += ' <colorMode>normal</colorMode>\n' output += ' <fill>1</fill>\n' output += ' <outline>1</outline>\n' output += ' </PolyStyle>\n' if 'weight' in feature.style: output += ' <LineStyle>\n' output += ' <width>'+str(feature.style['weight'])+'</width>\n' if 'color' in feature.style: output += ' <color>'+out_color+'</color>\n' output += ' </LineStyle>\n' if 'iconUrl' in feature.style: icon_url = str(feature.style['iconUrl']) if not icon_url.startswith("http"): icon_url = request.build_absolute_uri(icon_url) else: icon_url += cookie_url_trailer output += ' <IconStyle>\n' output += ' <Icon>\n' output += ' <href>' + xml_escape(icon_url) + '</href>\n' output += ' </Icon>\n' output += ' </IconStyle>\n' output += ' </Style>\n' # locations = job.feature_set.all().order_by('template') locations = job.feature_set.all()\ .extra(tables=['maps_featuretype'])\ .extra(where=['maps_featuretype.id=maps_feature.template_id'])\ .order_by('maps_featuretype.name') last_template = "" skip_the_first = True template_has_started = False for loc in locations: template_name = str(loc.template.name) if template_name != last_template: if skip_the_first: skip_the_first = False else: output += ' </Folder>\n' output += ' <Folder><name>'+template_name+'</name>\n' last_template = template_name template_has_started = True analyst_name = str(loc.analyst.username) dtg = str(loc.created_at) job_id = str(loc.job.id) #TODO: Add links to Jobs and Projects datetime_obj = datetime.strptime(dtg, "%Y-%m-%d %H:%M:%S.%f+00:00") datetime_obj_utc = datetime_obj.replace(tzinfo=timezone('UTC')) date_time = datetime_obj_utc.strftime('%Y-%m-%dT%H:%M:%SZ') date_time_desc = datetime_obj_utc.strftime('%Y-%m-%d %H:%M:%S') desc = 'Posted by '+analyst_name+' at '+date_time_desc+' Zulu (UTC) in Job #'+job_id #TODO: Add more details #TODO: Add links to linked objects #Simplify polygons to reduce points in complex shapes if loc.the_geom.num_coords > 0: #skip empty locations simplegeom = loc.the_geom.simplify(0.0002) if simplegeom.num_coords > 0: kml = str(loc.the_geom.simplify(0.0002).kml) else: kml = str(loc.the_geom.kml) if '<Polygon><outerBoundaryIs><LinearRing><coordinates>' in kml: add_text = '<altitudeMode>clampToGround</altitudeMode>' kml = kml.replace('<coordinates>', add_text+'<coordinates>') kml = kml.replace('</outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing>', '') output += ' <Placemark><name>'+template_name+'</name>\n' output += ' <TimeStamp><when>'+date_time+'</when></TimeStamp>\n' output += ' <description>'+desc+'</description>\n' output += ' <styleUrl>#geoq_'+str(loc.template.id)+'</styleUrl>\n' output += ' '+str(kml)+'\n' output += ' </Placemark>\n' if template_has_started: output += ' </Folder>\n' output += ' <Folder><name>Work Cells</name>\n' aois = job.aois.order_by('status') for aoi in aois: style = 'complete' if aoi.status == 'In work': style = 'inwork' if aoi.status == 'Unassigned': style = 'unassigned' aoi_name = "#"+str(aoi.id)+", "+str(aoi.status)+" - Priority:"+str(aoi.priority) kml = str(aoi.polygon.simplify(0.0002).kml) if '<Polygon><outerBoundaryIs><LinearRing><coordinates>' in kml: add_text = '<tessellate>1</tessellate><altitudeMode>clampToGround</altitudeMode>' kml = kml.replace('<coordinates>', add_text+'<coordinates>') output += ' <Placemark>\n' output += ' <name>'+aoi_name+'</name>\n' output += ' <styleUrl>#geoq_'+style+'</styleUrl>\n' output += ' '+kml+'\n' output += ' </Placemark>\n' output += ' </Folder>\n' output += ' </Document>\n' output += '</kml>' return HttpResponse(output, content_type="application/vnd.google-earth.kml+xml", status=200) def get_cookie_trailer(request):<|fim▁hole|> cookies_to_look_for = ['iPlanetDirectoryPro'] #TODO: Pull this from an admin setting cookie_url_trailer = '' for cook in cookies_to_look_for: cookie = request.COOKIES.get(cook, None) if cookie: cookie_url_trailer += cook + "=" + cookie if cookie_url_trailer: cookie_url_trailer = "?" + cookie_url_trailer return cookie_url_trailer class JobKMLNetworkLink(ListView): model = Job def get(self, request, *args, **kwargs): id = self.kwargs.get('pk') job = get_object_or_404(Job, pk=id) setting_zoom_auto = True #TODO: Pull from settings settings_refresh_every = 90 #TODO: Pull from settings cookie_url_trailer = get_cookie_trailer(request) url = request.build_absolute_uri('/geoq/api/job/'+id+'.kml' + cookie_url_trailer) aoi_count = job.total_count() aoi_complete = job.complete_count() aoi_work = job.in_work_count() aoi_comp_pct = int(100 * float(aoi_complete)/float(aoi_count)) if aoi_count > 0 else 0 aoi_work_pct = int(100 * float(aoi_work)/float(aoi_count)) if aoi_count > 0 else 0 aoi_tot_pct = int(100 * float(aoi_work+aoi_complete)/float(aoi_count)) if aoi_count > 0 else 0 doc_name = 'GeoQ C:'+str(aoi_complete)+', W:'+str(aoi_work)+', Tot:'+str(aoi_count)+' ['+str(aoi_tot_pct)+'%]' description = 'Job #'+str(job.id)+': '+str(job.name)+'\n'+str(job.project.name)+'\n' description = description + 'Complete Cells: ' + str(aoi_complete) + ' ['+str(aoi_comp_pct)+'%], In Work: ' + str(aoi_work) + ' ['+str(aoi_work_pct)+'%], Total: ' + str(aoi_count) output = '<?xml version="1.0" encoding="UTF-8"?>\n' output += '<kml xmlns="http://www.opengis.net/kml/2.2">\n' output += ' <Folder>\n' output += ' <name>GeoQ Worked Cells</name>\n' output += ' <visibility>1</visibility>\n' output += ' <open>1</open>\n' output += ' <description>Work progress from GeoQ</description>\n' output += ' <NetworkLink id="GeoQ-'+id+'">\n' output += ' <name>'+doc_name+'</name>\n' output += ' <visibility>1</visibility>\n' output += ' <open>1</open>\n' output += ' <description>'+description+'</description>\n' output += ' <refreshVisibility>0</refreshVisibility>\n' if setting_zoom_auto: output += ' <flyToView>1</flyToView>\n' output += ' <Link>\n' output += ' <href>'+url+'</href>\n' if settings_refresh_every: output += ' <refreshInterval>'+str(settings_refresh_every)+'</refreshInterval>\n' # Refresh every n seconds output += ' <refreshMode>onInterval</refreshMode>\n' output += ' <viewRefreshTime>5</viewRefreshTime>\n' # Also refresh after viewscreen movement output += ' <viewRefreshMode>onStop</viewRefreshMode>\n' output += ' </Link>\n' output += ' </NetworkLink>\n' output += ' </Folder>\n' output += '</kml>' return HttpResponse(output, content_type="application/vnd.google-earth.kml+xml", status=200)<|fim▁end|>
<|file_name|>01-run.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 import sys from testrunner import run def testfunc(child): child.expect("All up, running the shell now") child.sendline("ifconfig")<|fim▁hole|> child.expect(r"Iface\s+(\d+)\s+HWaddr:") if __name__ == "__main__": sys.exit(run(testfunc, timeout=1, echo=False))<|fim▁end|>
<|file_name|>facebook.py<|end_file_name|><|fim▁begin|>from flask import Flask, redirect, url_for, session, request from flask_oauthlib.client import OAuth, OAuthException FACEBOOK_APP_ID = '188477911223606' FACEBOOK_APP_SECRET = '621413ddea2bcc5b2e83d42fc40495de' app = Flask(__name__) app.debug = True app.secret_key = 'development' oauth = OAuth(app) facebook = oauth.remote_app( 'facebook', consumer_key=FACEBOOK_APP_ID, consumer_secret=FACEBOOK_APP_SECRET, request_token_params={'scope': 'email'}, base_url='https://graph.facebook.com', request_token_url=None, access_token_url='/oauth/access_token', authorize_url='https://www.facebook.com/dialog/oauth' ) @app.route('/') def index(): return redirect(url_for('login')) @app.route('/login') def login(): callback = url_for( 'facebook_authorized', next=request.args.get('next') or request.referrer or None, _external=True ) return facebook.authorize(callback=callback) @app.route('/login/authorized') def facebook_authorized(): resp = facebook.authorized_response() if resp is None: return 'Access denied: reason=%s error=%s' % ( request.args['error_reason'], request.args['error_description'] ) if isinstance(resp, OAuthException): return 'Access denied: %s' % resp.message session['oauth_token'] = (resp['access_token'], '') me = facebook.get('/me') return 'Logged in as id=%s name=%s redirect=%s' % \<|fim▁hole|> (me.data['id'], me.data['name'], request.args.get('next')) @facebook.tokengetter def get_facebook_oauth_token(): return session.get('oauth_token') if __name__ == '__main__': app.run()<|fim▁end|>
<|file_name|>distance.py<|end_file_name|><|fim▁begin|>class WallsGate(object): def dfs(self, rooms): queue = [(i, j, 0) for i, rows in enumerate(rooms) for j, v in enumerate(rows) if not v] while queue: i, j, step = queue.pop() if rooms[i][j] > step: rooms[i][j] = step for newi, newj in ((i + 1, j), (i - 1, j), (i, j + 1), (i, j - 1)): if 0 <= newi < len(rooms) and 0 <= newj < len(rooms[0]) and step < rooms[newi][newj]: queue.append((newi, newj, step + 1)) def bfs(self, rooms): row=len(rooms) col=len(rooms[0]) queue=[] for i in xrange(row): for j in xrange(col): if rooms[i][j]==0: queue.append(i*col+j) while queue: x=queue.pop(0)<|fim▁hole|> queue.append(newi*col+newj) def naivedfs(self, rooms): for i in xrange(len(rooms)): for j in xrange(len(rooms[0])): if rooms[i][j]==0: self._dfsrev(rooms,i,j) def _dfsrev(self,rooms,i,j): for newi,newj in (i+1,j),(i-1,j),(i,j+1),(i,j-1): if 0 <= newi < len(rooms) and 0 <= newj < len(rooms[0]) and rooms[newi][newj]<rooms[i][j]: rooms[newi][newj]=rooms[i][j]+1 self._dfsrev(rooms,newi,newi)<|fim▁end|>
i,j=x/col,x%col for newi,newj in (i+1,j),(i-1,j),(i,j+1),(i,j-1): if 0 <= newi < len(rooms) and 0 <= newj < len(rooms[0]) and rooms[newi][newj]==INF: rooms[newi][newj]=rooms[i][j]+1
<|file_name|>invibesBidAdapter_spec.js<|end_file_name|><|fim▁begin|>import { expect } from 'chai'; import { spec, resetInvibes, stubDomainOptions } from 'modules/invibesBidAdapter'; describe('invibesBidAdapter:', function () { const BIDDER_CODE = 'invibes'; const PLACEMENT_ID = '12345'; const ENDPOINT = '//bid.videostep.com/Bid/VideoAdContent'; const SYNC_ENDPOINT = '//k.r66net.com/GetUserSync'; let bidRequests = [ { bidId: 'b1', bidder: BIDDER_CODE, bidderRequestId: 'r1', params: { placementId: PLACEMENT_ID }, adUnitCode: 'test-div', auctionId: 'a1', sizes: [ [300, 250], [400, 300], [125, 125] ], transactionId: 't1' }, { bidId: 'b2', bidder: BIDDER_CODE, bidderRequestId: 'r2', params: { placementId: 'abcde' }, adUnitCode: 'test-div', auctionId: 'a2', sizes: [ [300, 250], [400, 300] ], transactionId: 't2' } ]; let StubbedPersistence = function(initialValue) { var value = initialValue; return { load: function () { let str = value || ''; try { return JSON.parse(str); } catch (e) { } }, save: function (obj) { value = JSON.stringify(obj); } } }; beforeEach(function () { resetInvibes(); document.cookie = ''; this.cStub1 = sinon.stub(console, 'info'); }); afterEach(function () { this.cStub1.restore(); }); describe('isBidRequestValid:', function () { context('valid bid request:', function () { it('returns true when bidder params.placementId is set', function() { const validBid = { bidder: BIDDER_CODE, params: { placementId: PLACEMENT_ID } } expect(spec.isBidRequestValid(validBid)).to.be.true; }) }); context('invalid bid request:', function () { it('returns false when no params', function () { const invalidBid = { bidder: BIDDER_CODE } expect(spec.isBidRequestValid(invalidBid)).to.be.false; }); it('returns false when placementId is not set', function() { const invalidBid = { bidder: BIDDER_CODE, params: { id: '5' } } expect(spec.isBidRequestValid(invalidBid)).to.be.false; }); it('returns false when bid response was previously received', function() { const validBid = { bidder: BIDDER_CODE, params: { placementId: PLACEMENT_ID } } top.window.invibes.bidResponse = { prop: 'prop' }; expect(spec.isBidRequestValid(validBid)).to.be.false; }); }); }); describe('buildRequests', function () { it('sends bid request to ENDPOINT via GET', function () { const request = spec.buildRequests(bidRequests); expect(request.url).to.equal(ENDPOINT); expect(request.method).to.equal('GET'); }); it('sends cookies with the bid request', function () { const request = spec.buildRequests(bidRequests); expect(request.options.withCredentials).to.equal(true); }); it('has location, html id, placement and width/height', function () { const request = spec.buildRequests(bidRequests, { auctionStart: Date.now() }); const parsedData = request.data; expect(parsedData.location).to.exist; expect(parsedData.videoAdHtmlId).to.exist; expect(parsedData.vId).to.exist; expect(parsedData.width).to.exist; expect(parsedData.height).to.exist; }); it('has capped ids if local storage variable is correctly formatted', function () { localStorage.ivvcap = '{"9731":[1,1768600800000]}'; const request = spec.buildRequests(bidRequests, { auctionStart: Date.now() }); expect(request.data.capCounts).to.equal('9731=1'); }); it('does not have capped ids if local storage variable is incorrectly formatted', function () { localStorage.ivvcap = ':[1,1574334216992]}'; const request = spec.buildRequests(bidRequests, { auctionStart: Date.now() }); expect(request.data.capCounts).to.equal(''); }); it('does not have capped ids if local storage variable is expired', function () { localStorage.ivvcap = '{"9731":[1,1574330064104]}'; const request = spec.buildRequests(bidRequests, { auctionStart: Date.now() }); expect(request.data.capCounts).to.equal(''); }); it('sends query string params from localstorage 1', function () { localStorage.ivbs = JSON.stringify({ bvci: 1 }); const request = spec.buildRequests(bidRequests, { auctionStart: Date.now() }); expect(request.data.bvci).to.equal(1); }); it('sends query string params from localstorage 2', function () { localStorage.ivbs = JSON.stringify({ invibbvlog: true }); const request = spec.buildRequests(bidRequests, { auctionStart: Date.now() }); expect(request.data.invibbvlog).to.equal(true); }); it('does not send query string params from localstorage if unknwon', function () { localStorage.ivbs = JSON.stringify({ someparam: true }); const request = spec.buildRequests(bidRequests, { auctionStart: Date.now() }); expect(request.data.someparam).to.be.undefined; }); it('sends all Placement Ids', function () { const request = spec.buildRequests(bidRequests); expect(JSON.parse(request.data.bidParamsJson).placementIds).to.contain(bidRequests[0].params.placementId); expect(JSON.parse(request.data.bidParamsJson).placementIds).to.contain(bidRequests[1].params.placementId); }); it('uses cookies', function () { global.document.cookie = 'ivNoCookie=1'; let request = spec.buildRequests(bidRequests); expect(request.data.lId).to.be.undefined; }); it('doesnt send the domain id if not graduated', function () { global.document.cookie = 'ivbsdid={"id":"dvdjkams6nkq","cr":1522929537626,"hc":1}'; let request = spec.buildRequests(bidRequests); expect(request.data.lId).to.not.exist; }); it('try to graduate but not enough count - doesnt send the domain id', function () { global.document.cookie = 'ivbsdid={"id":"dvdjkams6nkq","cr":1521818537626,"hc":0}'; let bidderRequest = { gdprConsent: { vendorData: { vendorConsents: { 436: true } } } }; let request = spec.buildRequests(bidRequests, bidderRequest); expect(request.data.lId).to.not.exist; }); it('try to graduate but not old enough - doesnt send the domain id', function () { let bidderRequest = { gdprConsent: { vendorData: { vendorConsents: { 436: true } } } }; global.document.cookie = 'ivbsdid={"id":"dvdjkams6nkq","cr":' + Date.now() + ',"hc":5}'; let request = spec.buildRequests(bidRequests, bidderRequest); expect(request.data.lId).to.not.exist; }); it('graduate and send the domain id', function () { let bidderRequest = { gdprConsent: { vendorData: { vendorConsents: { 436: true } } } }; stubDomainOptions(new StubbedPersistence('{"id":"dvdjkams6nkq","cr":1521818537626,"hc":7}')); let request = spec.buildRequests(bidRequests, bidderRequest); expect(request.data.lId).to.exist; }); it('send the domain id if already graduated', function () { let bidderRequest = { gdprConsent: { vendorData: { vendorConsents: { 436: true } } } }; stubDomainOptions(new StubbedPersistence('{"id":"f8zoh044p9oi"}')); let request = spec.buildRequests(bidRequests, bidderRequest); expect(request.data.lId).to.exist; expect(top.window.invibes.dom.tempId).to.exist; }); it('send the domain id after replacing it with new format', function () { let bidderRequest = { gdprConsent: { vendorData: { vendorConsents: { 436: true } } } }; stubDomainOptions(new StubbedPersistence('{"id":"f8zoh044p9oi.8537626"}')); let request = spec.buildRequests(bidRequests, bidderRequest); expect(request.data.lId).to.exist; expect(top.window.invibes.dom.tempId).to.exist; }); it('dont send the domain id if consent declined', function () { let bidderRequest = { gdprConsent: { vendorData: { vendorConsents: { 436: false } } } }; stubDomainOptions(new StubbedPersistence('{"id":"f8zoh044p9oi.8537626"}')); let request = spec.buildRequests(bidRequests, bidderRequest); expect(request.data.lId).to.not.exist; expect(top.window.invibes.dom.tempId).to.not.exist; }); it('dont send the domain id if no consent', function () { let bidderRequest = { }; stubDomainOptions(new StubbedPersistence('{"id":"f8zoh044p9oi.8537626"}')); let request = spec.buildRequests(bidRequests, bidderRequest); expect(request.data.lId).to.not.exist; expect(top.window.invibes.dom.tempId).to.not.exist; }); it('try to init id but was already loaded on page - does not increment the id again', function () { let bidderRequest = { gdprConsent: { vendorData: { vendorConsents: { 436: true } } } }; global.document.cookie = 'ivbsdid={"id":"dvdjkams6nkq","cr":1521818537626,"hc":0}'; let request = spec.buildRequests(bidRequests, bidderRequest); request = spec.buildRequests(bidRequests, bidderRequest); expect(request.data.lId).to.not.exist; expect(top.window.invibes.dom.tempId).to.exist; }); }); describe('interpretResponse', function () { let response = { Ads: [{ BidPrice: 0.5, VideoExposedId: 123 }], BidModel: { BidVersion: 1, PlacementId: '12345', AuctionStartTime: Date.now(), CreativeHtml: '<!-- Creative -->' } }; let expectedResponse = [{ requestId: bidRequests[0].bidId, cpm: 0.5, width: 400, height: 300, creativeId: 123, currency: 'EUR', netRevenue: true, ttl: 300, ad: `<html> <head><script type='text/javascript'>inDapIF=true;</script></head> <body style='margin : 0; padding: 0;'> <!-- Creative --> </body> </html>` }]; context('when the response is not valid', function () { it('handles response with no bids requested', function () { let emptyResult = spec.interpretResponse({ body: response }); expect(emptyResult).to.be.empty; }); it('handles empty response', function () { let emptyResult = spec.interpretResponse(null, { bidRequests }); expect(emptyResult).to.be.empty; }); it('handles response with bidding is not configured', function () { let emptyResult = spec.interpretResponse({ body: { Ads: [{ BidPrice: 1 }] } }, { bidRequests }); expect(emptyResult).to.be.empty; }); it('handles response with no ads are received', function () { let emptyResult = spec.interpretResponse({ body: { BidModel: { PlacementId: '12345' }, AdReason: 'No ads' } }, { bidRequests }); expect(emptyResult).to.be.empty; }); it('handles response with no ads are received - no ad reason', function () { let emptyResult = spec.interpretResponse({ body: { BidModel: { PlacementId: '12345' } } }, { bidRequests }); expect(emptyResult).to.be.empty; }); it('handles response when no placement Id matches', function () { let emptyResult = spec.interpretResponse({ body: { BidModel: { PlacementId: '123456' }, Ads: [{ BidPrice: 1 }] } }, { bidRequests }); expect(emptyResult).to.be.empty; }); it('handles response when placement Id is not present', function () {<|fim▁hole|> }); context('when the response is valid', function () { it('responds with a valid bid', function () { top.window.invibes.setCookie('a', 'b', 370); top.window.invibes.setCookie('c', 'd', 0); let result = spec.interpretResponse({ body: response }, { bidRequests }); expect(Object.keys(result[0])).to.have.members(Object.keys(expectedResponse[0])); }); it('responds with a valid bid and uses logger', function () { localStorage.InvibesDEBUG = true; let result = spec.interpretResponse({ body: response }, { bidRequests }); expect(Object.keys(result[0])).to.have.members(Object.keys(expectedResponse[0])); }); it('does not make multiple bids', function () { localStorage.InvibesDEBUG = false; let result = spec.interpretResponse({ body: response }, { bidRequests }); let secondResult = spec.interpretResponse({ body: response }, { bidRequests }); expect(secondResult).to.be.empty; }); }); }); describe('getUserSyncs', function () { it('returns an iframe if enabled', function () { let response = spec.getUserSyncs({iframeEnabled: true}); expect(response.type).to.equal('iframe'); expect(response.url).to.include(SYNC_ENDPOINT); }); it('returns an iframe with params if enabled', function () { top.window.invibes.optIn = 1; global.document.cookie = 'ivvbks=17639.0,1,2'; let response = spec.getUserSyncs({ iframeEnabled: true }); expect(response.type).to.equal('iframe'); expect(response.url).to.include(SYNC_ENDPOINT); expect(response.url).to.include('optIn'); expect(response.url).to.include('ivvbks'); expect(response.url).to.include('ivbsdid'); }); it('returns undefined if iframe not enabled ', function () { let response = spec.getUserSyncs({ iframeEnabled: false }); expect(response).to.equal(undefined); }); }); });<|fim▁end|>
let emptyResult = spec.interpretResponse({ BidModel: { }, Ads: [{ BidPrice: 1 }] }, { bidRequests }); expect(emptyResult).to.be.empty; });
<|file_name|>activation.js<|end_file_name|><|fim▁begin|>'use strict'; <|fim▁hole|><|fim▁end|>
exports.name = '/activation';
<|file_name|>edalize_common.py<|end_file_name|><|fim▁begin|>from collections import OrderedDict import os.path import shutil import pytest from edalize import get_edatool tests_dir = os.path.dirname(__file__) class TestFixture: """A fixture that makes an edalize backend with work_root directory Create this object using the make_edalize_test factory fixture. This passes through its `tool_name` and sets up a temporary directory for `work_root`, then passes its keyword arguments through to the TestFixture initializer. Args: tool_name: The name of the tool work_root: The directory to treat as a work root test_name: The name to call the backend. Defaults to `'test_<tool_name>_0'` param_types: A list of parameter types. Defaults to `['plusarg', 'vlogdefine', 'vlogparam']` (the parameter types supported by most simulators). files: A list of files to use. Defaults to `None`, which means to use :py:data:`FILES`. tool_options: Dictionary passed to _setup_backend. Defaults to `{}`. ref_dir: A reference directory relative to `test_<tool_name>`. Defaults to `'.'` use_vpi: If true, set up backend with definitions from :attr:`VPI`. Defaults to `False`. """ def __init__( self, tool_name, work_root, test_name=None, param_types=["plusarg", "vlogdefine", "vlogparam"], files=None, tool_options={}, ref_dir=".", use_vpi=False, toplevel="top_module", ): raw_ref_dir = os.path.join(tests_dir, "test_" + tool_name, ref_dir) self.test_name = ( "test_{}_0".format(tool_name) if test_name is None else test_name ) self.ref_dir = os.path.normpath(raw_ref_dir) self.work_root = work_root self.backend = _setup_backend( self.test_name, tool_name, param_types, files, tool_options, work_root, use_vpi, toplevel, ) def compare_files(self, files, ref_subdir="."): """Check some files in the work root match those in the ref directory The files argument gives the list of files to check. These are interpreted as paths relative to the work directory and relative to self.ref_dir / ref_subdir. This is a wrapper around edalize_common.compare_files: see its documentation for how to use the :envvar:`GOLDEN_RUN` environment variable to copy across a golden reference. """ ref_dir = os.path.normpath(os.path.join(self.ref_dir, ref_subdir)) return compare_files(ref_dir, self.work_root, files) def copy_to_work_root(self, path): shutil.copy( os.path.join(self.ref_dir, path), os.path.join(self.work_root, path) ) @pytest.fixture def make_edalize_test(monkeypatch, tmpdir): """A factory fixture to make an edalize backend with work_root directory The returned factory method takes a `tool_name` (the name of the tool) and the keyword arguments supported by :class:`TestFixture`. It returns a :class:`TestFixture` object, whose `work_root` is a temporary directory. """ # Prepend directory `mock_commands` to PATH environment variable monkeypatch.setenv("PATH", os.path.join(tests_dir, "mock_commands"), ":") created = [] def _fun(tool_name, **kwargs): work_root = tmpdir / str(len(created)) work_root.mkdir() fixture = TestFixture(tool_name, str(work_root), **kwargs) created.append(fixture) return fixture return _fun def compare_files(ref_dir, work_root, files): """Check that all *files* in *work_root* match those in *ref_dir*. If the environment variable :envvar:`GOLDEN_RUN` is set, the *files* in *work_root* are copied to *ref_dir* to become the new reference. """ for f in files: reference_file = os.path.join(ref_dir, f) generated_file = os.path.join(work_root, f) assert os.path.exists(generated_file) if "GOLDEN_RUN" in os.environ: shutil.copy(generated_file, reference_file) with open(reference_file) as fref, open(generated_file) as fgen: assert fref.read() == fgen.read(), f def param_gen(paramtypes): """Generate dictionary of definitions in *paramtypes* list.""" defs = OrderedDict() for paramtype in paramtypes: for datatype in ["bool", "int", "str"]: if datatype == "int": default = 42 elif datatype == "str": default = "hello" else: default = True defs[paramtype + "_" + datatype] = { "datatype": datatype, "default": default, "description": "", "paramtype": paramtype, } return defs def _setup_backend( name, tool, paramtypes, files, tool_options, work_root, use_vpi, toplevel ): """Set up a backend. The backend is called *name*, is set up for *tool* with *tool_options*, *paramtypes*, and, if *use_vpi* is ``True``, definitions from :attr:`VPI`. If *files* is None, files are taken from :attr:`FILES`. """ parameters = param_gen(paramtypes) _vpi = [] if use_vpi: _vpi = VPI for v in VPI: for f in v["src_files"]: _f = os.path.join(work_root, f) if not os.path.exists(os.path.dirname(_f)): os.makedirs(os.path.dirname(_f)) with open(_f, "a"): os.utime(_f, None) edam = { "name": name, "files": FILES if files is None else files, "parameters": parameters, "tool_options": {tool: tool_options}, "toplevel": toplevel, "vpi": _vpi, } return get_edatool(tool)(edam=edam, work_root=work_root) FILES = [ {"name": "qip_file.qip", "file_type": "QIP"}, {"name": "qsys_file", "file_type": "QSYS"}, {"name": "sdc_file", "file_type": "SDC"}, {"name": "bmm_file", "file_type": "BMM"}, {"name": "sv_file.sv", "file_type": "systemVerilogSource"}, {"name": "pcf_file.pcf", "file_type": "PCF"}, {"name": "ucf_file.ucf", "file_type": "UCF"}, {"name": "user_file", "file_type": "user"}, {"name": "tcl_file.tcl", "file_type": "tclSource"}, {"name": "waiver_file.waiver", "file_type": "waiver"}, {"name": "vlog_file.v", "file_type": "verilogSource"}, {"name": "vlog05_file.v", "file_type": "verilogSource-2005"}, {"name": "vlog_incfile", "file_type": "verilogSource", "is_include_file": True}, {"name": "vhdl_file.vhd", "file_type": "vhdlSource"}, {"name": "vhdl_lfile", "file_type": "vhdlSource", "logical_name": "libx"}, {"name": "vhdl2008_file", "file_type": "vhdlSource-2008"}, {"name": "xci_file.xci", "file_type": "xci"}, {"name": "xdc_file.xdc", "file_type": "xdc"}, {"name": "bootrom.mem", "file_type": "mem"}, {"name": "c_file.c", "file_type": "cSource"}, {"name": "cpp_file.cpp", "file_type": "cppSource"}, {"name": "c_header.h", "file_type": "cSource", "is_include_file": True}, {"name": "c_header.h", "file_type": "cppSource", "is_include_file": True}, {"name": "config.vbl", "file_type": "veribleLintRules"}, {"name": "verible_waiver.vbw", "file_type": "veribleLintWaiver"}, {"name": "verible_waiver2.vbw", "file_type": "veribleLintWaiver"}, {"name": "config.sby.j2", "file_type": "sbyConfigTemplate"}, {"name": "another_sv_file.sv", "file_type": "systemVerilogSource"}, {"name": "pdc_constraint_file.pdc", "file_type": "PDC"}, {"name": "pdc_floorplan_constraint_file.pdc", "file_type": "FPPDC"}, {"name": "lpf_file.lpf", "file_type": "LPF"},<|fim▁hole|>] """Files of all supported file types.""" VPI = [ { "src_files": ["src/vpi_1/f1", "src/vpi_1/f3"], "include_dirs": ["src/vpi_1/"], "libs": ["some_lib"], "name": "vpi1", }, {"src_files": ["src/vpi_2/f4"], "include_dirs": [], "libs": [], "name": "vpi2"}, ] """Predefined VPI modules to build."""<|fim▁end|>
<|file_name|>AssociationListTest.java<|end_file_name|><|fim▁begin|><|fim▁hole|> * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.stunner.bpmn.client.marshall.converters.customproperties; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.assertEquals; public class AssociationListTest { private AssociationList tested; public static final String VALUE = "[din]var1->input1,[din]var2->input2,[dout]var3->output1," + "[dout]var5->output2"; public static final String VALUE_WITH_COMMA = "[din]var1->input1,[din]var2->input2,input22,input33," + "[dout]var3->output1,[dout]var5->output2,output22,ouput23"; @Before public void setUp() { tested = new AssociationList(); } @Test public void fromString() { AssociationList list = tested.fromString(VALUE); assertEquals(2, list.getInputs().size()); assertEquals(2, list.getOutputs().size()); } @Test public void fromStringWithComma() { AssociationList list = tested.fromString(VALUE_WITH_COMMA); assertEquals(2, list.getInputs().size()); assertEquals(2, list.getOutputs().size()); } }<|fim▁end|>
/* * Copyright 2019 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License");
<|file_name|>_cryptic_nodes.py<|end_file_name|><|fim▁begin|>from typing import NamedTuple, List from data import crossword class Clue(str): def __init__(self, value) -> None: super(Clue, self).__init__(value)<|fim▁hole|> self._tokens = crossword.tokenize_clue(value) class _Node(object): _clue: Clue _occupied: int def __init__(self, clue: Clue, occupied: int) -> None: self._clue = clue self._occupied = occupied class Parsed(List): pass # A list of nodes, initially Nulls<|fim▁end|>
<|file_name|>webdriver.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # # Copyright 2011-2013 Software freedom conservancy # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 from selenium.webdriver.remote.command import Command from selenium.webdriver.remote.webdriver import WebDriver as RemoteWebDriver from selenium.common.exceptions import WebDriverException from .service import Service from .options import Options class WebDriver(RemoteWebDriver): """ Controls the ChromeDriver and allows you to drive the browser. You will need to download the ChromeDriver executable from http://chromedriver.storage.googleapis.com/index.html """ def __init__(self, executable_path="chromedriver", port=0, chrome_options=None, service_args=None, desired_capabilities=None, service_log_path=None): """ Creates a new instance of the chrome driver. Starts the service and then creates new instance of chrome driver. :Args: - executable_path - path to the executable. If the default is used it assumes the executable is in the $PATH - port - port you would like the service to run, if left as 0, a free port will be found. - desired_capabilities: Dictionary object with non-browser specific capabilities only, such as "proxy" or "loggingPref". - chrome_options: this takes an instance of ChromeOptions """ if chrome_options is None:<|fim▁hole|> desired_capabilities = Options().to_capabilities() else: if desired_capabilities is None: desired_capabilities = chrome_options.to_capabilities() else: desired_capabilities.update(chrome_options.to_capabilities()) self.service = Service(executable_path, port=port, service_args=service_args, log_path=service_log_path) self.service.start() try: RemoteWebDriver.__init__(self, command_executor=self.service.service_url, desired_capabilities=desired_capabilities, keep_alive=True) except: self.quit() raise self._is_remote = False def quit(self): """ Closes the browser and shuts down the ChromeDriver executable that is started when starting the ChromeDriver """ try: RemoteWebDriver.quit(self) except: # We don't care about the message because something probably has gone wrong pass finally: self.service.stop()<|fim▁end|>
# desired_capabilities stays as passed in if desired_capabilities is None:
<|file_name|>builtin.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Lints in the Rust compiler. //! //! This contains lints which can feasibly be implemented as their own //! AST visitor. Also see `rustc::lint::builtin`, which contains the //! definitions of lints that are emitted directly inside the main //! compiler. //! //! To add a new lint to rustc, declare it here using `declare_lint!()`. //! Then add code to emit the new lint in the appropriate circumstances. //! You can do that in an existing `LintPass` if it makes sense, or in a //! new `LintPass`, or using `Session::add_lint` elsewhere in the //! compiler. Only do the latter if the check can't be written cleanly as a //! `LintPass` (also, note that such lints will need to be defined in //! `rustc::lint::builtin`, not here). //! //! If you define a new `LintPass`, you will also need to add it to the //! `add_builtin!` or `add_builtin_with_new!` invocation in `lib.rs`. //! Use the former for unit-like structs and the latter for structs with //! a `pub fn new()`. use metadata::{csearch, decoder}; use middle::def::*; use middle::subst::Substs; use middle::ty::{self, Ty}; use middle::{def, pat_util, stability}; use middle::const_eval::{eval_const_expr_partial, const_int, const_uint}; use middle::cfg; use util::ppaux::ty_to_string; use util::nodemap::{FnvHashMap, NodeSet}; use lint::{Level, Context, LintPass, LintArray, Lint}; use std::collections::{HashSet, BitSet}; use std::collections::hash_map::Entry::{Occupied, Vacant}; use std::{cmp, slice}; use std::{i8, i16, i32, i64, u8, u16, u32, u64, f32, f64}; use syntax::{abi, ast, ast_map}; use syntax::ast_util::{self, is_shift_binop, local_def}; use syntax::attr::{self, AttrMetaMethods}; use syntax::codemap::{self, Span}; use syntax::feature_gate::{KNOWN_ATTRIBUTES, AttributeType}; use syntax::parse::token; use syntax::ast::{TyIs, TyUs, TyI8, TyU8, TyI16, TyU16, TyI32, TyU32, TyI64, TyU64}; use syntax::ptr::P; use syntax::visit::{self, Visitor}; // hardwired lints from librustc pub use lint::builtin::*; declare_lint! { WHILE_TRUE, Warn, "suggest using `loop { }` instead of `while true { }`" } #[derive(Copy, Clone)] pub struct WhileTrue; impl LintPass for WhileTrue { fn get_lints(&self) -> LintArray { lint_array!(WHILE_TRUE) } fn check_expr(&mut self, cx: &Context, e: &ast::Expr) { if let ast::ExprWhile(ref cond, _, _) = e.node { if let ast::ExprLit(ref lit) = cond.node { if let ast::LitBool(true) = lit.node { cx.span_lint(WHILE_TRUE, e.span, "denote infinite loops with loop { ... }"); } } } } } declare_lint! { UNSIGNED_NEGATION, Warn, "using an unary minus operator on unsigned type" } declare_lint! { UNUSED_COMPARISONS, Warn, "comparisons made useless by limits of the types involved" } declare_lint! { OVERFLOWING_LITERALS, Warn, "literal out of range for its type" } declare_lint! { EXCEEDING_BITSHIFTS, Deny, "shift exceeds the type's number of bits" } #[derive(Copy, Clone)] pub struct TypeLimits { /// Id of the last visited negated expression negated_expr_id: ast::NodeId, } impl TypeLimits { pub fn new() -> TypeLimits { TypeLimits { negated_expr_id: !0, } } } impl LintPass for TypeLimits { fn get_lints(&self) -> LintArray { lint_array!(UNSIGNED_NEGATION, UNUSED_COMPARISONS, OVERFLOWING_LITERALS, EXCEEDING_BITSHIFTS) } fn check_expr(&mut self, cx: &Context, e: &ast::Expr) { match e.node { ast::ExprUnary(ast::UnNeg, ref expr) => { match expr.node { ast::ExprLit(ref lit) => { match lit.node { ast::LitInt(_, ast::UnsignedIntLit(_)) => { cx.span_lint(UNSIGNED_NEGATION, e.span, "negation of unsigned int literal may \ be unintentional"); }, _ => () } }, _ => { let t = ty::expr_ty(cx.tcx, &**expr); match t.sty { ty::ty_uint(_) => { cx.span_lint(UNSIGNED_NEGATION, e.span, "negation of unsigned int variable may \ be unintentional"); }, _ => () } } }; // propagate negation, if the negation itself isn't negated if self.negated_expr_id != e.id { self.negated_expr_id = expr.id; } }, ast::ExprParen(ref expr) if self.negated_expr_id == e.id => { self.negated_expr_id = expr.id; }, ast::ExprBinary(binop, ref l, ref r) => { if is_comparison(binop) && !check_limits(cx.tcx, binop, &**l, &**r) { cx.span_lint(UNUSED_COMPARISONS, e.span, "comparison is useless due to type limits"); } if is_shift_binop(binop.node) { let opt_ty_bits = match ty::expr_ty(cx.tcx, &**l).sty { ty::ty_int(t) => Some(int_ty_bits(t, cx.sess().target.int_type)), ty::ty_uint(t) => Some(uint_ty_bits(t, cx.sess().target.uint_type)), _ => None }; if let Some(bits) = opt_ty_bits { let exceeding = if let ast::ExprLit(ref lit) = r.node { if let ast::LitInt(shift, _) = lit.node { shift >= bits } else { false } } else { match eval_const_expr_partial(cx.tcx, &**r, Some(cx.tcx.types.usize)) { Ok(const_int(shift)) => { shift as u64 >= bits }, Ok(const_uint(shift)) => { shift >= bits }, _ => { false } } }; if exceeding { cx.span_lint(EXCEEDING_BITSHIFTS, e.span, "bitshift exceeds the type's number of bits"); } }; } }, ast::ExprLit(ref lit) => { match ty::expr_ty(cx.tcx, e).sty { ty::ty_int(t) => { match lit.node { ast::LitInt(v, ast::SignedIntLit(_, ast::Plus)) | ast::LitInt(v, ast::UnsuffixedIntLit(ast::Plus)) => { let int_type = if let ast::TyIs = t { cx.sess().target.int_type } else { t }; let (_, max) = int_ty_range(int_type); let negative = self.negated_expr_id == e.id; // Detect literal value out of range [min, max] inclusive // avoiding use of -min to prevent overflow/panic if (negative && v > max as u64 + 1) || (!negative && v > max as u64) { cx.span_lint(OVERFLOWING_LITERALS, e.span, &*format!("literal out of range for {:?}", t)); return; } } _ => panic!() }; }, ty::ty_uint(t) => { let uint_type = if let ast::TyUs = t { cx.sess().target.uint_type } else { t }; let (min, max) = uint_ty_range(uint_type); let lit_val: u64 = match lit.node { ast::LitByte(_v) => return, // _v is u8, within range by definition ast::LitInt(v, _) => v, _ => panic!() }; if lit_val < min || lit_val > max { cx.span_lint(OVERFLOWING_LITERALS, e.span, &*format!("literal out of range for {:?}", t)); } }, ty::ty_float(t) => { let (min, max) = float_ty_range(t); let lit_val: f64 = match lit.node { ast::LitFloat(ref v, _) | ast::LitFloatUnsuffixed(ref v) => { match v.parse() { Ok(f) => f, Err(_) => return } } _ => panic!() }; if lit_val < min || lit_val > max { cx.span_lint(OVERFLOWING_LITERALS, e.span, &*format!("literal out of range for {:?}", t)); } }, _ => () }; }, _ => () }; fn is_valid<T:cmp::PartialOrd>(binop: ast::BinOp, v: T, min: T, max: T) -> bool { match binop.node { ast::BiLt => v > min && v <= max, ast::BiLe => v >= min && v < max, ast::BiGt => v >= min && v < max, ast::BiGe => v > min && v <= max, ast::BiEq | ast::BiNe => v >= min && v <= max, _ => panic!() } } fn rev_binop(binop: ast::BinOp) -> ast::BinOp { codemap::respan(binop.span, match binop.node { ast::BiLt => ast::BiGt, ast::BiLe => ast::BiGe, ast::BiGt => ast::BiLt, ast::BiGe => ast::BiLe, _ => return binop }) } // for isize & usize, be conservative with the warnings, so that the // warnings are consistent between 32- and 64-bit platforms fn int_ty_range(int_ty: ast::IntTy) -> (i64, i64) { match int_ty { ast::TyIs => (i64::MIN, i64::MAX), ast::TyI8 => (i8::MIN as i64, i8::MAX as i64), ast::TyI16 => (i16::MIN as i64, i16::MAX as i64), ast::TyI32 => (i32::MIN as i64, i32::MAX as i64), ast::TyI64 => (i64::MIN, i64::MAX) } } fn uint_ty_range(uint_ty: ast::UintTy) -> (u64, u64) { match uint_ty { ast::TyUs => (u64::MIN, u64::MAX), ast::TyU8 => (u8::MIN as u64, u8::MAX as u64), ast::TyU16 => (u16::MIN as u64, u16::MAX as u64), ast::TyU32 => (u32::MIN as u64, u32::MAX as u64), ast::TyU64 => (u64::MIN, u64::MAX) } } fn float_ty_range(float_ty: ast::FloatTy) -> (f64, f64) { match float_ty { ast::TyF32 => (f32::MIN as f64, f32::MAX as f64), ast::TyF64 => (f64::MIN, f64::MAX) } } fn int_ty_bits(int_ty: ast::IntTy, target_int_ty: ast::IntTy) -> u64 { match int_ty { ast::TyIs => int_ty_bits(target_int_ty, target_int_ty), ast::TyI8 => i8::BITS as u64, ast::TyI16 => i16::BITS as u64, ast::TyI32 => i32::BITS as u64, ast::TyI64 => i64::BITS as u64 } } fn uint_ty_bits(uint_ty: ast::UintTy, target_uint_ty: ast::UintTy) -> u64 { match uint_ty { ast::TyUs => uint_ty_bits(target_uint_ty, target_uint_ty), ast::TyU8 => u8::BITS as u64, ast::TyU16 => u16::BITS as u64, ast::TyU32 => u32::BITS as u64, ast::TyU64 => u64::BITS as u64 } } fn check_limits(tcx: &ty::ctxt, binop: ast::BinOp, l: &ast::Expr, r: &ast::Expr) -> bool { let (lit, expr, swap) = match (&l.node, &r.node) { (&ast::ExprLit(_), _) => (l, r, true), (_, &ast::ExprLit(_)) => (r, l, false), _ => return true }; // Normalize the binop so that the literal is always on the RHS in // the comparison let norm_binop = if swap { rev_binop(binop) } else { binop }; match ty::expr_ty(tcx, expr).sty { ty::ty_int(int_ty) => { let (min, max) = int_ty_range(int_ty); let lit_val: i64 = match lit.node { ast::ExprLit(ref li) => match li.node { ast::LitInt(v, ast::SignedIntLit(_, ast::Plus)) | ast::LitInt(v, ast::UnsuffixedIntLit(ast::Plus)) => v as i64, ast::LitInt(v, ast::SignedIntLit(_, ast::Minus)) | ast::LitInt(v, ast::UnsuffixedIntLit(ast::Minus)) => -(v as i64), _ => return true }, _ => panic!() }; is_valid(norm_binop, lit_val, min, max) } ty::ty_uint(uint_ty) => { let (min, max): (u64, u64) = uint_ty_range(uint_ty); let lit_val: u64 = match lit.node { ast::ExprLit(ref li) => match li.node { ast::LitInt(v, _) => v, _ => return true }, _ => panic!() }; is_valid(norm_binop, lit_val, min, max) } _ => true } } fn is_comparison(binop: ast::BinOp) -> bool { match binop.node { ast::BiEq | ast::BiLt | ast::BiLe | ast::BiNe | ast::BiGe | ast::BiGt => true, _ => false } } } } declare_lint! { IMPROPER_CTYPES, Warn, "proper use of libc types in foreign modules" } struct ImproperCTypesVisitor<'a, 'tcx: 'a> { cx: &'a Context<'a, 'tcx> } impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { fn check_def(&mut self, sp: Span, id: ast::NodeId) { match self.cx.tcx.def_map.borrow().get(&id).unwrap().full_def() { def::DefPrimTy(ast::TyInt(ast::TyIs)) => { self.cx.span_lint(IMPROPER_CTYPES, sp, "found rust type `isize` in foreign module, while \ libc::c_int or libc::c_long should be used"); } def::DefPrimTy(ast::TyUint(ast::TyUs)) => { self.cx.span_lint(IMPROPER_CTYPES, sp, "found rust type `usize` in foreign module, while \ libc::c_uint or libc::c_ulong should be used"); } def::DefTy(..) => { let tty = match self.cx.tcx.ast_ty_to_ty_cache.borrow().get(&id) { Some(&t) => t, None => panic!("ast_ty_to_ty_cache was incomplete after typeck!") }; if !ty::is_ffi_safe(self.cx.tcx, tty) { self.cx.span_lint(IMPROPER_CTYPES, sp, "found type without foreign-function-safe \ representation annotation in foreign module, consider \ adding a #[repr(...)] attribute to the type"); } } _ => () } } } impl<'a, 'tcx, 'v> Visitor<'v> for ImproperCTypesVisitor<'a, 'tcx> { fn visit_ty(&mut self, ty: &ast::Ty) { if let ast::TyPath(..) = ty.node { self.check_def(ty.span, ty.id); } visit::walk_ty(self, ty); } } #[derive(Copy, Clone)] pub struct ImproperCTypes; impl LintPass for ImproperCTypes { fn get_lints(&self) -> LintArray { lint_array!(IMPROPER_CTYPES) } fn check_item(&mut self, cx: &Context, it: &ast::Item) { fn check_ty(cx: &Context, ty: &ast::Ty) { let mut vis = ImproperCTypesVisitor { cx: cx }; vis.visit_ty(ty); } fn check_foreign_fn(cx: &Context, decl: &ast::FnDecl) { for input in &decl.inputs { check_ty(cx, &*input.ty); } if let ast::Return(ref ret_ty) = decl.output { check_ty(cx, &**ret_ty); } } match it.node { ast::ItemForeignMod(ref nmod) if nmod.abi != abi::RustIntrinsic => { for ni in &nmod.items { match ni.node { ast::ForeignItemFn(ref decl, _) => check_foreign_fn(cx, &**decl), ast::ForeignItemStatic(ref t, _) => check_ty(cx, &**t) } } } _ => (), } } } declare_lint! { BOX_POINTERS, Allow, "use of owned (Box type) heap memory" } #[derive(Copy, Clone)] pub struct BoxPointers; impl BoxPointers { fn check_heap_type<'a, 'tcx>(&self, cx: &Context<'a, 'tcx>, span: Span, ty: Ty<'tcx>) { let mut n_uniq: usize = 0; ty::fold_ty(cx.tcx, ty, |t| { match t.sty { ty::ty_uniq(_) => { n_uniq += 1; } _ => () }; t }); if n_uniq > 0 { let s = ty_to_string(cx.tcx, ty); let m = format!("type uses owned (Box type) pointers: {}", s); cx.span_lint(BOX_POINTERS, span, &m[..]); } } } impl LintPass for BoxPointers { fn get_lints(&self) -> LintArray { lint_array!(BOX_POINTERS) } fn check_item(&mut self, cx: &Context, it: &ast::Item) { match it.node { ast::ItemFn(..) | ast::ItemTy(..) | ast::ItemEnum(..) | ast::ItemStruct(..) => self.check_heap_type(cx, it.span, ty::node_id_to_type(cx.tcx, it.id)), _ => () } // If it's a struct, we also have to check the fields' types match it.node { ast::ItemStruct(ref struct_def, _) => { for struct_field in &struct_def.fields { self.check_heap_type(cx, struct_field.span, ty::node_id_to_type(cx.tcx, struct_field.node.id)); } } _ => () } } fn check_expr(&mut self, cx: &Context, e: &ast::Expr) { let ty = ty::expr_ty(cx.tcx, e); self.check_heap_type(cx, e.span, ty); } } declare_lint! { RAW_POINTER_DERIVE, Warn, "uses of #[derive] with raw pointers are rarely correct" } struct RawPtrDeriveVisitor<'a, 'tcx: 'a> { cx: &'a Context<'a, 'tcx> } impl<'a, 'tcx, 'v> Visitor<'v> for RawPtrDeriveVisitor<'a, 'tcx> { fn visit_ty(&mut self, ty: &ast::Ty) { const MSG: &'static str = "use of `#[derive]` with a raw pointer"; if let ast::TyPtr(..) = ty.node { self.cx.span_lint(RAW_POINTER_DERIVE, ty.span, MSG); } visit::walk_ty(self, ty); } // explicit override to a no-op to reduce code bloat fn visit_expr(&mut self, _: &ast::Expr) {} fn visit_block(&mut self, _: &ast::Block) {} } pub struct RawPointerDerive { checked_raw_pointers: NodeSet, } impl RawPointerDerive { pub fn new() -> RawPointerDerive { RawPointerDerive { checked_raw_pointers: NodeSet(), } } } impl LintPass for RawPointerDerive { fn get_lints(&self) -> LintArray { lint_array!(RAW_POINTER_DERIVE) } fn check_item(&mut self, cx: &Context, item: &ast::Item) { if !attr::contains_name(&item.attrs, "automatically_derived") { return; } let did = match item.node { ast::ItemImpl(_, _, _, ref t_ref_opt, _, _) => { // Deriving the Copy trait does not cause a warning if let &Some(ref trait_ref) = t_ref_opt { let def_id = ty::trait_ref_to_def_id(cx.tcx, trait_ref); if Some(def_id) == cx.tcx.lang_items.copy_trait() { return; } } match ty::node_id_to_type(cx.tcx, item.id).sty { ty::ty_enum(did, _) => did, ty::ty_struct(did, _) => did, _ => return, } } _ => return, }; if !ast_util::is_local(did) { return; } let item = match cx.tcx.map.find(did.node) { Some(ast_map::NodeItem(item)) => item, _ => return, }; if !self.checked_raw_pointers.insert(item.id) { return; } match item.node { ast::ItemStruct(..) | ast::ItemEnum(..) => { let mut visitor = RawPtrDeriveVisitor { cx: cx }; visit::walk_item(&mut visitor, &*item); } _ => {} } } } declare_lint! { UNUSED_ATTRIBUTES, Warn, "detects attributes that were not used by the compiler" } #[derive(Copy, Clone)] pub struct UnusedAttributes; impl LintPass for UnusedAttributes { fn get_lints(&self) -> LintArray { lint_array!(UNUSED_ATTRIBUTES) } fn check_attribute(&mut self, cx: &Context, attr: &ast::Attribute) { // Note that check_name() marks the attribute as used if it matches. for &(ref name, ty) in KNOWN_ATTRIBUTES { match ty { AttributeType::Whitelisted | AttributeType::Gated(_, _) if attr.check_name(name) => { break; }, _ => () } } let plugin_attributes = cx.sess().plugin_attributes.borrow_mut(); for &(ref name, ty) in plugin_attributes.iter() { if ty == AttributeType::Whitelisted && attr.check_name(&*name) { break; } } if !attr::is_used(attr) { cx.span_lint(UNUSED_ATTRIBUTES, attr.span, "unused attribute"); // Is it a builtin attribute that must be used at the crate level? let known_crate = KNOWN_ATTRIBUTES.contains(&(&attr.name(), AttributeType::CrateLevel)); // Has a plugin registered this attribute as one which must be used at // the crate level? let plugin_crate = plugin_attributes.iter() .find(|&&(ref x, t)| { &*attr.name() == &*x && AttributeType::CrateLevel == t }).is_some(); if known_crate || plugin_crate { let msg = match attr.node.style { ast::AttrOuter => "crate-level attribute should be an inner \ attribute: add an exclamation mark: #![foo]", ast::AttrInner => "crate-level attribute should be in the \ root module", }; cx.span_lint(UNUSED_ATTRIBUTES, attr.span, msg); } } } } declare_lint! { pub PATH_STATEMENTS, Warn, "path statements with no effect" } #[derive(Copy, Clone)] pub struct PathStatements; impl LintPass for PathStatements { fn get_lints(&self) -> LintArray { lint_array!(PATH_STATEMENTS) } fn check_stmt(&mut self, cx: &Context, s: &ast::Stmt) { match s.node { ast::StmtSemi(ref expr, _) => { match expr.node { ast::ExprPath(..) => cx.span_lint(PATH_STATEMENTS, s.span, "path statement with no effect"), _ => () } } _ => () } } } declare_lint! { pub UNUSED_MUST_USE, Warn, "unused result of a type flagged as #[must_use]" } declare_lint! { pub UNUSED_RESULTS, Allow, "unused result of an expression in a statement" } #[derive(Copy, Clone)] pub struct UnusedResults; impl LintPass for UnusedResults { fn get_lints(&self) -> LintArray { lint_array!(UNUSED_MUST_USE, UNUSED_RESULTS) } fn check_stmt(&mut self, cx: &Context, s: &ast::Stmt) { let expr = match s.node { ast::StmtSemi(ref expr, _) => &**expr, _ => return }; if let ast::ExprRet(..) = expr.node { return; } let t = ty::expr_ty(cx.tcx, expr); let warned = match t.sty { ty::ty_tup(ref tys) if tys.is_empty() => return, ty::ty_bool => return, ty::ty_struct(did, _) | ty::ty_enum(did, _) => { if ast_util::is_local(did) { if let ast_map::NodeItem(it) = cx.tcx.map.get(did.node) { check_must_use(cx, &it.attrs, s.span) } else { false } } else { let attrs = csearch::get_item_attrs(&cx.sess().cstore, did); check_must_use(cx, &attrs[..], s.span) } } _ => false, }; if !warned { cx.span_lint(UNUSED_RESULTS, s.span, "unused result"); } fn check_must_use(cx: &Context, attrs: &[ast::Attribute], sp: Span) -> bool { for attr in attrs { if attr.check_name("must_use") { let mut msg = "unused result which must be used".to_string(); // check for #[must_use="..."] match attr.value_str() { None => {} Some(s) => { msg.push_str(": "); msg.push_str(&s); } } cx.span_lint(UNUSED_MUST_USE, sp, &msg); return true; } } false } } } declare_lint! { pub NON_CAMEL_CASE_TYPES, Warn, "types, variants, traits and type parameters should have camel case names" } #[derive(Copy, Clone)] pub struct NonCamelCaseTypes; impl NonCamelCaseTypes { fn check_case(&self, cx: &Context, sort: &str, ident: ast::Ident, span: Span) { fn is_camel_case(ident: ast::Ident) -> bool { let ident = token::get_ident(ident); if ident.is_empty() { return true; } let ident = ident.trim_matches('_'); // start with a non-lowercase letter rather than non-uppercase // ones (some scripts don't have a concept of upper/lowercase) !ident.is_empty() && !ident.char_at(0).is_lowercase() && !ident.contains('_') } fn to_camel_case(s: &str) -> String { s.split('_').flat_map(|word| word.chars().enumerate().map(|(i, c)| if i == 0 { c.to_uppercase().collect::<String>() } else { c.to_lowercase().collect() } )).collect::<Vec<_>>().concat() } let s = token::get_ident(ident); if !is_camel_case(ident) { let c = to_camel_case(&s); let m = if c.is_empty() { format!("{} `{}` should have a camel case name such as `CamelCase`", sort, s) } else { format!("{} `{}` should have a camel case name such as `{}`", sort, s, c) }; cx.span_lint(NON_CAMEL_CASE_TYPES, span, &m[..]); } } } impl LintPass for NonCamelCaseTypes { fn get_lints(&self) -> LintArray { lint_array!(NON_CAMEL_CASE_TYPES) } fn check_item(&mut self, cx: &Context, it: &ast::Item) { let has_extern_repr = it.attrs.iter().any(|attr| { attr::find_repr_attrs(cx.tcx.sess.diagnostic(), attr).iter() .any(|r| r == &attr::ReprExtern) }); if has_extern_repr { return; } match it.node { ast::ItemTy(..) | ast::ItemStruct(..) => { self.check_case(cx, "type", it.ident, it.span) } ast::ItemTrait(..) => { self.check_case(cx, "trait", it.ident, it.span) } ast::ItemEnum(ref enum_definition, _) => { if has_extern_repr { return; } self.check_case(cx, "type", it.ident, it.span); for variant in &enum_definition.variants { self.check_case(cx, "variant", variant.node.name, variant.span); } } _ => () } } fn check_generics(&mut self, cx: &Context, it: &ast::Generics) { for gen in &*it.ty_params { self.check_case(cx, "type parameter", gen.ident, gen.span); } } } #[derive(PartialEq)] enum MethodContext { TraitDefaultImpl, TraitImpl, PlainImpl } fn method_context(cx: &Context, id: ast::NodeId, span: Span) -> MethodContext { match cx.tcx.impl_or_trait_items.borrow().get(&local_def(id)) { None => cx.sess().span_bug(span, "missing method descriptor?!"), Some(item) => match item.container() { ty::TraitContainer(..) => MethodContext::TraitDefaultImpl, ty::ImplContainer(cid) => { match ty::impl_trait_ref(cx.tcx, cid) { Some(_) => MethodContext::TraitImpl, None => MethodContext::PlainImpl } } } } } declare_lint! { pub NON_SNAKE_CASE, Warn, "methods, functions, lifetime parameters and modules should have snake case names" } #[derive(Copy, Clone)] pub struct NonSnakeCase; impl NonSnakeCase { fn to_snake_case(mut str: &str) -> String { let mut words = vec![]; // Preserve leading underscores str = str.trim_left_matches(|c: char| { if c == '_' { words.push(String::new()); true } else { false } }); for s in str.split('_') { let mut last_upper = false; let mut buf = String::new(); if s.is_empty() { continue; } for ch in s.chars() { if !buf.is_empty() && buf != "'" && ch.is_uppercase() && !last_upper { words.push(buf); buf = String::new(); } last_upper = ch.is_uppercase(); buf.extend(ch.to_lowercase()); } words.push(buf); } words.connect("_") } fn check_snake_case(&self, cx: &Context, sort: &str, name: &str, span: Option<Span>) { fn is_snake_case(ident: &str) -> bool { if ident.is_empty() { return true; } let ident = ident.trim_left_matches('\''); let ident = ident.trim_matches('_'); let mut allow_underscore = true; ident.chars().all(|c| { allow_underscore = match c { '_' if !allow_underscore => return false, '_' => false, // It would be more obvious to use `c.is_lowercase()`, // but some characters do not have a lowercase form c if !c.is_uppercase() => true, _ => return false, }; true }) } if !is_snake_case(name) { let sc = NonSnakeCase::to_snake_case(name); let msg = if sc != name { format!("{} `{}` should have a snake case name such as `{}`", sort, name, sc) } else { format!("{} `{}` should have a snake case name", sort, name) }; match span { Some(span) => cx.span_lint(NON_SNAKE_CASE, span, &msg), None => cx.lint(NON_SNAKE_CASE, &msg), } } } } impl LintPass for NonSnakeCase { fn get_lints(&self) -> LintArray { lint_array!(NON_SNAKE_CASE) } fn check_crate(&mut self, cx: &Context, cr: &ast::Crate) { let attr_crate_name = cr.attrs.iter().find(|at| at.check_name("crate_name")) .and_then(|at| at.value_str().map(|s| (at, s))); if let Some(ref name) = cx.tcx.sess.opts.crate_name { self.check_snake_case(cx, "crate", name, None); } else if let Some((attr, ref name)) = attr_crate_name { self.check_snake_case(cx, "crate", name, Some(attr.span)); } } fn check_fn(&mut self, cx: &Context, fk: visit::FnKind, _: &ast::FnDecl, _: &ast::Block, span: Span, id: ast::NodeId) { match fk { visit::FkMethod(ident, _, _) => match method_context(cx, id, span) { MethodContext::PlainImpl => { self.check_snake_case(cx, "method", &token::get_ident(ident), Some(span)) }, MethodContext::TraitDefaultImpl => { self.check_snake_case(cx, "trait method", &token::get_ident(ident), Some(span)) }, _ => (), }, visit::FkItemFn(ident, _, _, _, _, _) => { self.check_snake_case(cx, "function", &token::get_ident(ident), Some(span)) }, _ => (), } } fn check_item(&mut self, cx: &Context, it: &ast::Item) { if let ast::ItemMod(_) = it.node { self.check_snake_case(cx, "module", &token::get_ident(it.ident), Some(it.span)); } } fn check_trait_item(&mut self, cx: &Context, trait_item: &ast::TraitItem) { if let ast::MethodTraitItem(_, None) = trait_item.node { self.check_snake_case(cx, "trait method", &token::get_ident(trait_item.ident), Some(trait_item.span)); } } fn check_lifetime_def(&mut self, cx: &Context, t: &ast::LifetimeDef) { self.check_snake_case(cx, "lifetime", &token::get_ident(t.lifetime.name.ident()), Some(t.lifetime.span)); } fn check_pat(&mut self, cx: &Context, p: &ast::Pat) { if let &ast::PatIdent(_, ref path1, _) = &p.node { let def = cx.tcx.def_map.borrow().get(&p.id).map(|d| d.full_def()); if let Some(def::DefLocal(_)) = def { self.check_snake_case(cx, "variable", &token::get_ident(path1.node), Some(p.span)); } } } fn check_struct_def(&mut self, cx: &Context, s: &ast::StructDef, _: ast::Ident, _: &ast::Generics, _: ast::NodeId) { for sf in &s.fields { if let ast::StructField_ { kind: ast::NamedField(ident, _), .. } = sf.node { self.check_snake_case(cx, "structure field", &token::get_ident(ident), Some(sf.span)); } } } } declare_lint! { pub NON_UPPER_CASE_GLOBALS, Warn, "static constants should have uppercase identifiers" } #[derive(Copy, Clone)] pub struct NonUpperCaseGlobals; impl NonUpperCaseGlobals { fn check_upper_case(cx: &Context, sort: &str, ident: ast::Ident, span: Span) { let s = token::get_ident(ident); if s.chars().any(|c| c.is_lowercase()) { let uc = NonSnakeCase::to_snake_case(&s).to_uppercase(); if uc != &s[..] { cx.span_lint(NON_UPPER_CASE_GLOBALS, span, &format!("{} `{}` should have an upper case name such as `{}`", sort, s, uc)); } else { cx.span_lint(NON_UPPER_CASE_GLOBALS, span, &format!("{} `{}` should have an upper case name", sort, s)); } } } } impl LintPass for NonUpperCaseGlobals { fn get_lints(&self) -> LintArray { lint_array!(NON_UPPER_CASE_GLOBALS) } fn check_item(&mut self, cx: &Context, it: &ast::Item) { match it.node { // only check static constants ast::ItemStatic(_, ast::MutImmutable, _) => { NonUpperCaseGlobals::check_upper_case(cx, "static constant", it.ident, it.span); } ast::ItemConst(..) => { NonUpperCaseGlobals::check_upper_case(cx, "constant", it.ident, it.span); } _ => {} } } fn check_trait_item(&mut self, cx: &Context, ti: &ast::TraitItem) { match ti.node { ast::ConstTraitItem(..) => { NonUpperCaseGlobals::check_upper_case(cx, "associated constant", ti.ident, ti.span); } _ => {} } } fn check_impl_item(&mut self, cx: &Context, ii: &ast::ImplItem) { match ii.node { ast::ConstImplItem(..) => { NonUpperCaseGlobals::check_upper_case(cx, "associated constant", ii.ident, ii.span); } _ => {} } } fn check_pat(&mut self, cx: &Context, p: &ast::Pat) { // Lint for constants that look like binding identifiers (#7526) match (&p.node, cx.tcx.def_map.borrow().get(&p.id).map(|d| d.full_def())) { (&ast::PatIdent(_, ref path1, _), Some(def::DefConst(..))) => { NonUpperCaseGlobals::check_upper_case(cx, "constant in pattern", path1.node, p.span); } _ => {} } } } declare_lint! { UNUSED_PARENS, Warn, "`if`, `match`, `while` and `return` do not need parentheses" } #[derive(Copy, Clone)] pub struct UnusedParens; impl UnusedParens { fn check_unused_parens_core(&self, cx: &Context, value: &ast::Expr, msg: &str, struct_lit_needs_parens: bool) { if let ast::ExprParen(ref inner) = value.node { let necessary = struct_lit_needs_parens && contains_exterior_struct_lit(&**inner); if !necessary { cx.span_lint(UNUSED_PARENS, value.span, &format!("unnecessary parentheses around {}", msg)) } } /// Expressions that syntactically contain an "exterior" struct /// literal i.e. not surrounded by any parens or other /// delimiters, e.g. `X { y: 1 }`, `X { y: 1 }.method()`, `foo /// == X { y: 1 }` and `X { y: 1 } == foo` all do, but `(X { /// y: 1 }) == foo` does not. fn contains_exterior_struct_lit(value: &ast::Expr) -> bool { match value.node { ast::ExprStruct(..) => true, ast::ExprAssign(ref lhs, ref rhs) | ast::ExprAssignOp(_, ref lhs, ref rhs) | ast::ExprBinary(_, ref lhs, ref rhs) => { // X { y: 1 } + X { y: 2 } contains_exterior_struct_lit(&**lhs) || contains_exterior_struct_lit(&**rhs) } ast::ExprUnary(_, ref x) | ast::ExprCast(ref x, _) | ast::ExprField(ref x, _) | ast::ExprTupField(ref x, _) | ast::ExprIndex(ref x, _) => { // &X { y: 1 }, X { y: 1 }.y contains_exterior_struct_lit(&**x) } ast::ExprMethodCall(_, _, ref exprs) => { // X { y: 1 }.bar(...) contains_exterior_struct_lit(&*exprs[0]) } _ => false } } } } impl LintPass for UnusedParens { fn get_lints(&self) -> LintArray { lint_array!(UNUSED_PARENS) } fn check_expr(&mut self, cx: &Context, e: &ast::Expr) { let (value, msg, struct_lit_needs_parens) = match e.node { ast::ExprIf(ref cond, _, _) => (cond, "`if` condition", true), ast::ExprWhile(ref cond, _, _) => (cond, "`while` condition", true), ast::ExprMatch(ref head, _, source) => match source { ast::MatchSource::Normal => (head, "`match` head expression", true), ast::MatchSource::IfLetDesugar { .. } => (head, "`if let` head expression", true), ast::MatchSource::WhileLetDesugar => (head, "`while let` head expression", true), ast::MatchSource::ForLoopDesugar => (head, "`for` head expression", true), }, ast::ExprRet(Some(ref value)) => (value, "`return` value", false), ast::ExprAssign(_, ref value) => (value, "assigned value", false), ast::ExprAssignOp(_, _, ref value) => (value, "assigned value", false), _ => return }; self.check_unused_parens_core(cx, &**value, msg, struct_lit_needs_parens); } fn check_stmt(&mut self, cx: &Context, s: &ast::Stmt) { let (value, msg) = match s.node { ast::StmtDecl(ref decl, _) => match decl.node { ast::DeclLocal(ref local) => match local.init { Some(ref value) => (value, "assigned value"), None => return }, _ => return }, _ => return }; self.check_unused_parens_core(cx, &**value, msg, false); } } declare_lint! { UNUSED_IMPORT_BRACES, Allow, "unnecessary braces around an imported item" } #[derive(Copy, Clone)] pub struct UnusedImportBraces; impl LintPass for UnusedImportBraces { fn get_lints(&self) -> LintArray { lint_array!(UNUSED_IMPORT_BRACES) } fn check_item(&mut self, cx: &Context, item: &ast::Item) { if let ast::ItemUse(ref view_path) = item.node { if let ast::ViewPathList(_, ref items) = view_path.node { if items.len() == 1 { if let ast::PathListIdent {ref name, ..} = items[0].node { let m = format!("braces around {} is unnecessary", &token::get_ident(*name)); cx.span_lint(UNUSED_IMPORT_BRACES, item.span, &m[..]); } } } } } } declare_lint! { NON_SHORTHAND_FIELD_PATTERNS, Warn, "using `Struct { x: x }` instead of `Struct { x }`" } #[derive(Copy, Clone)] pub struct NonShorthandFieldPatterns; impl LintPass for NonShorthandFieldPatterns { fn get_lints(&self) -> LintArray { lint_array!(NON_SHORTHAND_FIELD_PATTERNS) } fn check_pat(&mut self, cx: &Context, pat: &ast::Pat) { let def_map = cx.tcx.def_map.borrow(); if let ast::PatStruct(_, ref v, _) = pat.node { let field_pats = v.iter().filter(|fieldpat| { if fieldpat.node.is_shorthand { return false; } let def = def_map.get(&fieldpat.node.pat.id).map(|d| d.full_def()); def == Some(def::DefLocal(fieldpat.node.pat.id)) }); for fieldpat in field_pats { if let ast::PatIdent(_, ident, None) = fieldpat.node.pat.node { if ident.node.as_str() == fieldpat.node.ident.as_str() { cx.span_lint(NON_SHORTHAND_FIELD_PATTERNS, fieldpat.span, &format!("the `{}:` in this pattern is redundant and can \ be removed", ident.node.as_str())) } } } } } } declare_lint! { pub UNUSED_UNSAFE, Warn, "unnecessary use of an `unsafe` block" } #[derive(Copy, Clone)] pub struct UnusedUnsafe; impl LintPass for UnusedUnsafe { fn get_lints(&self) -> LintArray { lint_array!(UNUSED_UNSAFE) } fn check_expr(&mut self, cx: &Context, e: &ast::Expr) { if let ast::ExprBlock(ref blk) = e.node { // Don't warn about generated blocks, that'll just pollute the output. if blk.rules == ast::UnsafeBlock(ast::UserProvided) && !cx.tcx.used_unsafe.borrow().contains(&blk.id) { cx.span_lint(UNUSED_UNSAFE, blk.span, "unnecessary `unsafe` block"); } } } } declare_lint! { UNSAFE_CODE, Allow, "usage of `unsafe` code" } #[derive(Copy, Clone)] pub struct UnsafeCode; impl LintPass for UnsafeCode { fn get_lints(&self) -> LintArray { lint_array!(UNSAFE_CODE) } fn check_expr(&mut self, cx: &Context, e: &ast::Expr) { if let ast::ExprBlock(ref blk) = e.node { // Don't warn about generated blocks, that'll just pollute the output. if blk.rules == ast::UnsafeBlock(ast::UserProvided) { cx.span_lint(UNSAFE_CODE, blk.span, "usage of an `unsafe` block"); } } } fn check_item(&mut self, cx: &Context, it: &ast::Item) { match it.node { ast::ItemTrait(ast::Unsafety::Unsafe, _, _, _) => cx.span_lint(UNSAFE_CODE, it.span, "declaration of an `unsafe` trait"), ast::ItemImpl(ast::Unsafety::Unsafe, _, _, _, _, _) => cx.span_lint(UNSAFE_CODE, it.span, "implementation of an `unsafe` trait"), _ => return, } } fn check_fn(&mut self, cx: &Context, fk: visit::FnKind, _: &ast::FnDecl, _: &ast::Block, span: Span, _: ast::NodeId) { match fk { visit::FkItemFn(_, _, ast::Unsafety::Unsafe, _, _, _) => cx.span_lint(UNSAFE_CODE, span, "declaration of an `unsafe` function"), visit::FkMethod(_, sig, _) => { if sig.unsafety == ast::Unsafety::Unsafe { cx.span_lint(UNSAFE_CODE, span, "implementation of an `unsafe` method") } }, _ => (), } } fn check_trait_item(&mut self, cx: &Context, trait_item: &ast::TraitItem) { if let ast::MethodTraitItem(ref sig, None) = trait_item.node { if sig.unsafety == ast::Unsafety::Unsafe { cx.span_lint(UNSAFE_CODE, trait_item.span, "declaration of an `unsafe` method") } } } } declare_lint! { pub UNUSED_MUT, Warn, "detect mut variables which don't need to be mutable" } #[derive(Copy, Clone)] pub struct UnusedMut; impl UnusedMut { fn check_unused_mut_pat(&self, cx: &Context, pats: &[P<ast::Pat>]) { // collect all mutable pattern and group their NodeIDs by their Identifier to // avoid false warnings in match arms with multiple patterns let mut mutables = FnvHashMap(); for p in pats { pat_util::pat_bindings(&cx.tcx.def_map, &**p, |mode, id, _, path1| { let ident = path1.node; if let ast::BindByValue(ast::MutMutable) = mode { if !token::get_ident(ident).starts_with("_") { match mutables.entry(ident.name.usize()) { Vacant(entry) => { entry.insert(vec![id]); }, Occupied(mut entry) => { entry.get_mut().push(id); }, } } } }); } let used_mutables = cx.tcx.used_mut_nodes.borrow(); for (_, v) in &mutables { if !v.iter().any(|e| used_mutables.contains(e)) { cx.span_lint(UNUSED_MUT, cx.tcx.map.span(v[0]), "variable does not need to be mutable"); } } } } impl LintPass for UnusedMut { fn get_lints(&self) -> LintArray { lint_array!(UNUSED_MUT) } fn check_expr(&mut self, cx: &Context, e: &ast::Expr) { if let ast::ExprMatch(_, ref arms, _) = e.node { for a in arms { self.check_unused_mut_pat(cx, &a.pats) } } } fn check_stmt(&mut self, cx: &Context, s: &ast::Stmt) { if let ast::StmtDecl(ref d, _) = s.node { if let ast::DeclLocal(ref l) = d.node { self.check_unused_mut_pat(cx, slice::ref_slice(&l.pat)); } } } fn check_fn(&mut self, cx: &Context, _: visit::FnKind, decl: &ast::FnDecl, _: &ast::Block, _: Span, _: ast::NodeId) { for a in &decl.inputs { self.check_unused_mut_pat(cx, slice::ref_slice(&a.pat)); } } } declare_lint! { UNUSED_ALLOCATION, Warn, "detects unnecessary allocations that can be eliminated" } #[derive(Copy, Clone)] pub struct UnusedAllocation; impl LintPass for UnusedAllocation { fn get_lints(&self) -> LintArray { lint_array!(UNUSED_ALLOCATION) } fn check_expr(&mut self, cx: &Context, e: &ast::Expr) { match e.node { ast::ExprUnary(ast::UnUniq, _) => (), _ => return } if let Some(adjustment) = cx.tcx.adjustments.borrow().get(&e.id) { if let ty::AdjustDerefRef(ty::AutoDerefRef { ref autoref, .. }) = *adjustment { match autoref { &Some(ty::AutoPtr(_, ast::MutImmutable)) => { cx.span_lint(UNUSED_ALLOCATION, e.span, "unnecessary allocation, use & instead"); } &Some(ty::AutoPtr(_, ast::MutMutable)) => { cx.span_lint(UNUSED_ALLOCATION, e.span, "unnecessary allocation, use &mut instead"); } _ => () } } } } } declare_lint! { MISSING_DOCS, Allow, "detects missing documentation for public members" } pub struct MissingDoc { /// Stack of IDs of struct definitions. struct_def_stack: Vec<ast::NodeId>, /// True if inside variant definition in_variant: bool, /// Stack of whether #[doc(hidden)] is set /// at each level which has lint attributes. doc_hidden_stack: Vec<bool>, /// Private traits or trait items that leaked through. Don't check their methods. private_traits: HashSet<ast::NodeId>, } impl MissingDoc { pub fn new() -> MissingDoc { MissingDoc { struct_def_stack: vec!(), in_variant: false, doc_hidden_stack: vec!(false), private_traits: HashSet::new(), } } fn doc_hidden(&self) -> bool { *self.doc_hidden_stack.last().expect("empty doc_hidden_stack") } fn check_missing_docs_attrs(&self, cx: &Context, id: Option<ast::NodeId>, attrs: &[ast::Attribute], sp: Span, desc: &'static str) { // If we're building a test harness, then warning about // documentation is probably not really relevant right now. if cx.sess().opts.test { return; } // `#[doc(hidden)]` disables missing_docs check. if self.doc_hidden() { return; } // Only check publicly-visible items, using the result from the privacy pass. // It's an option so the crate root can also use this function (it doesn't // have a NodeId). if let Some(ref id) = id { if !cx.exported_items.contains(id) { return; } } let has_doc = attrs.iter().any(|a| { match a.node.value.node { ast::MetaNameValue(ref name, _) if *name == "doc" => true, _ => false } }); if !has_doc { cx.span_lint(MISSING_DOCS, sp, &format!("missing documentation for {}", desc)); } } } impl LintPass for MissingDoc { fn get_lints(&self) -> LintArray { lint_array!(MISSING_DOCS) } fn enter_lint_attrs(&mut self, _: &Context, attrs: &[ast::Attribute]) { let doc_hidden = self.doc_hidden() || attrs.iter().any(|attr| { attr.check_name("doc") && match attr.meta_item_list() { None => false, Some(l) => attr::contains_name(&l[..], "hidden"), } }); self.doc_hidden_stack.push(doc_hidden); } fn exit_lint_attrs(&mut self, _: &Context, _: &[ast::Attribute]) { self.doc_hidden_stack.pop().expect("empty doc_hidden_stack"); } fn check_struct_def(&mut self, _: &Context, _: &ast::StructDef, _: ast::Ident, _: &ast::Generics, id: ast::NodeId) { self.struct_def_stack.push(id); } fn check_struct_def_post(&mut self, _: &Context, _: &ast::StructDef, _: ast::Ident, _: &ast::Generics, id: ast::NodeId) { let popped = self.struct_def_stack.pop().expect("empty struct_def_stack"); assert!(popped == id); } fn check_crate(&mut self, cx: &Context, krate: &ast::Crate) { self.check_missing_docs_attrs(cx, None, &krate.attrs, krate.span, "crate"); } fn check_item(&mut self, cx: &Context, it: &ast::Item) { let desc = match it.node { ast::ItemFn(..) => "a function", ast::ItemMod(..) => "a module", ast::ItemEnum(..) => "an enum", ast::ItemStruct(..) => "a struct", ast::ItemTrait(_, _, _, ref items) => { // Issue #11592, traits are always considered exported, even when private. if it.vis == ast::Visibility::Inherited { self.private_traits.insert(it.id); for itm in items { self.private_traits.insert(itm.id); } return } "a trait" }, ast::ItemTy(..) => "a type alias", ast::ItemImpl(_, _, _, Some(ref trait_ref), _, ref impl_items) => { // If the trait is private, add the impl items to private_traits so they don't get // reported for missing docs. let real_trait = ty::trait_ref_to_def_id(cx.tcx, trait_ref); match cx.tcx.map.find(real_trait.node) { Some(ast_map::NodeItem(item)) => if item.vis == ast::Visibility::Inherited { for itm in impl_items { self.private_traits.insert(itm.id); } }, _ => { } } return }, _ => return }; self.check_missing_docs_attrs(cx, Some(it.id), &it.attrs, it.span, desc); } fn check_trait_item(&mut self, cx: &Context, trait_item: &ast::TraitItem) { if self.private_traits.contains(&trait_item.id) { return } let desc = match trait_item.node { ast::ConstTraitItem(..) => "an associated constant", ast::MethodTraitItem(..) => "a trait method", ast::TypeTraitItem(..) => "an associated type", }; self.check_missing_docs_attrs(cx, Some(trait_item.id), &trait_item.attrs, trait_item.span, desc); } fn check_impl_item(&mut self, cx: &Context, impl_item: &ast::ImplItem) { // If the method is an impl for a trait, don't doc. if method_context(cx, impl_item.id, impl_item.span) == MethodContext::TraitImpl { return; } let desc = match impl_item.node { ast::ConstImplItem(..) => "an associated constant", ast::MethodImplItem(..) => "a method", ast::TypeImplItem(_) => "an associated type", ast::MacImplItem(_) => "an impl item macro", }; self.check_missing_docs_attrs(cx, Some(impl_item.id), &impl_item.attrs, impl_item.span, desc); } fn check_struct_field(&mut self, cx: &Context, sf: &ast::StructField) { if let ast::NamedField(_, vis) = sf.node.kind { if vis == ast::Public || self.in_variant { let cur_struct_def = *self.struct_def_stack.last() .expect("empty struct_def_stack"); self.check_missing_docs_attrs(cx, Some(cur_struct_def), &sf.node.attrs, sf.span, "a struct field") } } } fn check_variant(&mut self, cx: &Context, v: &ast::Variant, _: &ast::Generics) { self.check_missing_docs_attrs(cx, Some(v.node.id), &v.node.attrs, v.span, "a variant"); assert!(!self.in_variant); self.in_variant = true; } fn check_variant_post(&mut self, _: &Context, _: &ast::Variant, _: &ast::Generics) { assert!(self.in_variant); self.in_variant = false; } } declare_lint! { pub MISSING_COPY_IMPLEMENTATIONS, Allow, "detects potentially-forgotten implementations of `Copy`" } #[derive(Copy, Clone)] pub struct MissingCopyImplementations; impl LintPass for MissingCopyImplementations { fn get_lints(&self) -> LintArray { lint_array!(MISSING_COPY_IMPLEMENTATIONS) } fn check_item(&mut self, cx: &Context, item: &ast::Item) { if !cx.exported_items.contains(&item.id) { return; } if cx.tcx.destructor_for_type.borrow().contains_key(&local_def(item.id)) { return; } let ty = match item.node { ast::ItemStruct(_, ref ast_generics) => { if ast_generics.is_parameterized() { return; } ty::mk_struct(cx.tcx, local_def(item.id), cx.tcx.mk_substs(Substs::empty())) } ast::ItemEnum(_, ref ast_generics) => { if ast_generics.is_parameterized() { return; } ty::mk_enum(cx.tcx, local_def(item.id), cx.tcx.mk_substs(Substs::empty())) } _ => return, }; let parameter_environment = ty::empty_parameter_environment(cx.tcx); if !ty::type_moves_by_default(&parameter_environment, item.span, ty) { return; } if ty::can_type_implement_copy(&parameter_environment, item.span, ty).is_ok() { cx.span_lint(MISSING_COPY_IMPLEMENTATIONS, item.span, "type could implement `Copy`; consider adding `impl \ Copy`") } } } declare_lint! { MISSING_DEBUG_IMPLEMENTATIONS, Allow, "detects missing implementations of fmt::Debug" } pub struct MissingDebugImplementations { impling_types: Option<NodeSet>, } impl MissingDebugImplementations { pub fn new() -> MissingDebugImplementations { MissingDebugImplementations { impling_types: None, } } } impl LintPass for MissingDebugImplementations { fn get_lints(&self) -> LintArray { lint_array!(MISSING_DEBUG_IMPLEMENTATIONS) } fn check_item(&mut self, cx: &Context, item: &ast::Item) { if !cx.exported_items.contains(&item.id) { return; } match item.node { ast::ItemStruct(..) | ast::ItemEnum(..) => {}, _ => return, } let debug = match cx.tcx.lang_items.debug_trait() { Some(debug) => debug, None => return, }; if self.impling_types.is_none() { let debug_def = ty::lookup_trait_def(cx.tcx, debug); let mut impls = NodeSet(); debug_def.for_each_impl(cx.tcx, |d| { if d.krate == ast::LOCAL_CRATE { if let Some(ty_def) = ty::ty_to_def_id(ty::node_id_to_type(cx.tcx, d.node)) { impls.insert(ty_def.node); } } }); self.impling_types = Some(impls); debug!("{:?}", self.impling_types); } if !self.impling_types.as_ref().unwrap().contains(&item.id) { cx.span_lint(MISSING_DEBUG_IMPLEMENTATIONS, item.span, "type does not implement `fmt::Debug`; consider adding #[derive(Debug)] \ or a manual implementation") } } } declare_lint! { DEPRECATED, Warn, "detects use of #[deprecated] items" } /// Checks for use of items with `#[deprecated]` attributes #[derive(Copy, Clone)] pub struct Stability; impl Stability { fn lint(&self, cx: &Context, _id: ast::DefId, span: Span, stability: &Option<&attr::Stability>) { // Deprecated attributes apply in-crate and cross-crate. let (lint, label) = match *stability { Some(&attr::Stability { deprecated_since: Some(_), .. }) => (DEPRECATED, "deprecated"), _ => return }; output(cx, span, stability, lint, label); fn output(cx: &Context, span: Span, stability: &Option<&attr::Stability>, lint: &'static Lint, label: &'static str) { let msg = match *stability { Some(&attr::Stability { reason: Some(ref s), .. }) => { format!("use of {} item: {}", label, *s) } _ => format!("use of {} item", label) }; cx.span_lint(lint, span, &msg[..]); } } } impl LintPass for Stability { fn get_lints(&self) -> LintArray { lint_array!(DEPRECATED) } fn check_item(&mut self, cx: &Context, item: &ast::Item) { stability::check_item(cx.tcx, item, false, &mut |id, sp, stab| self.lint(cx, id, sp, stab)); } fn check_expr(&mut self, cx: &Context, e: &ast::Expr) { stability::check_expr(cx.tcx, e, &mut |id, sp, stab| self.lint(cx, id, sp, stab)); } fn check_path(&mut self, cx: &Context, path: &ast::Path, id: ast::NodeId) { stability::check_path(cx.tcx, path, id, &mut |id, sp, stab| self.lint(cx, id, sp, stab)); } fn check_pat(&mut self, cx: &Context, pat: &ast::Pat) { stability::check_pat(cx.tcx, pat, &mut |id, sp, stab| self.lint(cx, id, sp, stab)) } } declare_lint! { pub UNCONDITIONAL_RECURSION, Warn, "functions that cannot return without calling themselves" } #[derive(Copy, Clone)] pub struct UnconditionalRecursion; impl LintPass for UnconditionalRecursion { fn get_lints(&self) -> LintArray { lint_array![UNCONDITIONAL_RECURSION] } fn check_fn(&mut self, cx: &Context, fn_kind: visit::FnKind, _: &ast::FnDecl, blk: &ast::Block, sp: Span, id: ast::NodeId) { // FIXME(#23542) Replace with type ascription. #![allow(trivial_casts)] type F = for<'tcx> fn(&ty::ctxt<'tcx>, ast::NodeId, ast::NodeId, ast::Ident, ast::NodeId) -> bool; let (name, checker) = match fn_kind { visit::FkItemFn(name, _, _, _, _, _) => (name, id_refers_to_this_fn as F), visit::FkMethod(name, _, _) => (name, id_refers_to_this_method as F), // closures can't recur, so they don't matter. visit::FkFnBlock => return }; let impl_def_id = ty::impl_of_method(cx.tcx, local_def(id)) .unwrap_or(local_def(ast::DUMMY_NODE_ID)); assert!(ast_util::is_local(impl_def_id)); let impl_node_id = impl_def_id.node; // Walk through this function (say `f`) looking to see if // every possible path references itself, i.e. the function is // called recursively unconditionally. This is done by trying // to find a path from the entry node to the exit node that // *doesn't* call `f` by traversing from the entry while // pretending that calls of `f` are sinks (i.e. ignoring any // exit edges from them). // // NB. this has an edge case with non-returning statements, // like `loop {}` or `panic!()`: control flow never reaches // the exit node through these, so one can have a function // that never actually calls itselfs but is still picked up by // this lint: // // fn f(cond: bool) { // if !cond { panic!() } // could come from `assert!(cond)` // f(false) // } // // In general, functions of that form may be able to call // itself a finite number of times and then diverge. The lint // considers this to be an error for two reasons, (a) it is // easier to implement, and (b) it seems rare to actually want // to have behaviour like the above, rather than // e.g. accidentally recurring after an assert. let cfg = cfg::CFG::new(cx.tcx, blk); let mut work_queue = vec![cfg.entry]; let mut reached_exit_without_self_call = false; let mut self_call_spans = vec![]; let mut visited = BitSet::new(); while let Some(idx) = work_queue.pop() { if idx == cfg.exit { // found a path! reached_exit_without_self_call = true; break; } let cfg_id = idx.node_id(); if visited.contains(&cfg_id) { // already done continue; } visited.insert(cfg_id); let node_id = cfg.graph.node_data(idx).id(); // is this a recursive call? if node_id != ast::DUMMY_NODE_ID && checker(cx.tcx, impl_node_id, id, name, node_id) { self_call_spans.push(cx.tcx.map.span(node_id)); // this is a self call, so we shouldn't explore past // this node in the CFG. continue; } // add the successors of this node to explore the graph further. for (_, edge) in cfg.graph.outgoing_edges(idx) { let target_idx = edge.target(); let target_cfg_id = target_idx.node_id(); if !visited.contains(&target_cfg_id) { work_queue.push(target_idx) } } } // Check the number of self calls because a function that // doesn't return (e.g. calls a `-> !` function or `loop { /* // no break */ }`) shouldn't be linted unless it actually // recurs. if !reached_exit_without_self_call && !self_call_spans.is_empty() { cx.span_lint(UNCONDITIONAL_RECURSION, sp, "function cannot return without recurring"); // FIXME #19668: these could be span_lint_note's instead of this manual guard. if cx.current_level(UNCONDITIONAL_RECURSION) != Level::Allow { let sess = cx.sess(); // offer some help to the programmer. for call in &self_call_spans { sess.span_note(*call, "recursive call site") } sess.fileline_help(sp, "a `loop` may express intention \ better if this is on purpose") } } // all done return; // Functions for identifying if the given NodeId `id` // represents a call to the function `fn_id`/method // `method_id`. fn id_refers_to_this_fn<'tcx>(tcx: &ty::ctxt<'tcx>, _: ast::NodeId, fn_id: ast::NodeId, _: ast::Ident, id: ast::NodeId) -> bool { tcx.def_map.borrow().get(&id) .map_or(false, |def| def.def_id() == local_def(fn_id)) } // check if the method call `id` refers to method `method_id` // (with name `method_name` contained in impl `impl_id`). fn id_refers_to_this_method<'tcx>(tcx: &ty::ctxt<'tcx>, impl_id: ast::NodeId, method_id: ast::NodeId, method_name: ast::Ident, id: ast::NodeId) -> bool { let did = match tcx.method_map.borrow().get(&ty::MethodCall::expr(id)) { None => return false, Some(m) => match m.origin { // There's no way to know if a method call via a // vtable is recursion, so we assume it's not. ty::MethodTraitObject(_) => return false, // This `did` refers directly to the method definition. ty::MethodStatic(did) | ty::MethodStaticClosure(did) => did, // MethodTypeParam are methods from traits: // The `impl ... for ...` of this method call // isn't known, e.g. it might be a default method // in a trait, so we get the def-id of the trait // method instead. ty::MethodTypeParam( ty::MethodParam { ref trait_ref, method_num, impl_def_id: None, }) => { ty::trait_item(tcx, trait_ref.def_id, method_num).def_id() } // The `impl` is known, so we check that with a // special case: ty::MethodTypeParam( ty::MethodParam { impl_def_id: Some(impl_def_id), .. }) => { let name = match tcx.map.expect_expr(id).node { ast::ExprMethodCall(ref sp_ident, _, _) => sp_ident.node, _ => tcx.sess.span_bug( tcx.map.span(id),<|fim▁hole|> // It matches if it comes from the same impl, // and has the same method name. return ast_util::is_local(impl_def_id) && impl_def_id.node == impl_id && method_name.name == name.name } } }; ast_util::is_local(did) && did.node == method_id } } } declare_lint! { PLUGIN_AS_LIBRARY, Warn, "compiler plugin used as ordinary library in non-plugin crate" } #[derive(Copy, Clone)] pub struct PluginAsLibrary; impl LintPass for PluginAsLibrary { fn get_lints(&self) -> LintArray { lint_array![PLUGIN_AS_LIBRARY] } fn check_item(&mut self, cx: &Context, it: &ast::Item) { if cx.sess().plugin_registrar_fn.get().is_some() { // We're compiling a plugin; it's fine to link other plugins. return; } match it.node { ast::ItemExternCrate(..) => (), _ => return, }; let md = match cx.sess().cstore.find_extern_mod_stmt_cnum(it.id) { Some(cnum) => cx.sess().cstore.get_crate_data(cnum), None => { // Probably means we aren't linking the crate for some reason. // // Not sure if / when this could happen. return; } }; if decoder::get_plugin_registrar_fn(md.data()).is_some() { cx.span_lint(PLUGIN_AS_LIBRARY, it.span, "compiler plugin used as an ordinary library"); } } } declare_lint! { PRIVATE_NO_MANGLE_FNS, Warn, "functions marked #[no_mangle] should be exported" } declare_lint! { PRIVATE_NO_MANGLE_STATICS, Warn, "statics marked #[no_mangle] should be exported" } declare_lint! { NO_MANGLE_CONST_ITEMS, Deny, "const items will not have their symbols exported" } #[derive(Copy, Clone)] pub struct InvalidNoMangleItems; impl LintPass for InvalidNoMangleItems { fn get_lints(&self) -> LintArray { lint_array!(PRIVATE_NO_MANGLE_FNS, PRIVATE_NO_MANGLE_STATICS, NO_MANGLE_CONST_ITEMS) } fn check_item(&mut self, cx: &Context, it: &ast::Item) { match it.node { ast::ItemFn(..) => { if attr::contains_name(&it.attrs, "no_mangle") && !cx.exported_items.contains(&it.id) { let msg = format!("function {} is marked #[no_mangle], but not exported", it.ident); cx.span_lint(PRIVATE_NO_MANGLE_FNS, it.span, &msg); } }, ast::ItemStatic(..) => { if attr::contains_name(&it.attrs, "no_mangle") && !cx.exported_items.contains(&it.id) { let msg = format!("static {} is marked #[no_mangle], but not exported", it.ident); cx.span_lint(PRIVATE_NO_MANGLE_STATICS, it.span, &msg); } }, ast::ItemConst(..) => { if attr::contains_name(&it.attrs, "no_mangle") { // Const items do not refer to a particular location in memory, and therefore // don't have anything to attach a symbol to let msg = "const items should never be #[no_mangle], consider instead using \ `pub static`"; cx.span_lint(NO_MANGLE_CONST_ITEMS, it.span, msg); } } _ => {}, } } } #[derive(Clone, Copy)] pub struct MutableTransmutes; declare_lint! { MUTABLE_TRANSMUTES, Deny, "mutating transmuted &mut T from &T may cause undefined behavior" } impl LintPass for MutableTransmutes { fn get_lints(&self) -> LintArray { lint_array!(MUTABLE_TRANSMUTES) } fn check_expr(&mut self, cx: &Context, expr: &ast::Expr) { use syntax::ast::DefId; use syntax::abi::RustIntrinsic; let msg = "mutating transmuted &mut T from &T may cause undefined behavior,\ consider instead using an UnsafeCell"; match get_transmute_from_to(cx, expr) { Some((&ty::ty_rptr(_, from_mt), &ty::ty_rptr(_, to_mt))) => { if to_mt.mutbl == ast::Mutability::MutMutable && from_mt.mutbl == ast::Mutability::MutImmutable { cx.span_lint(MUTABLE_TRANSMUTES, expr.span, msg); } } _ => () } fn get_transmute_from_to<'a, 'tcx>(cx: &Context<'a, 'tcx>, expr: &ast::Expr) -> Option<(&'tcx ty::sty<'tcx>, &'tcx ty::sty<'tcx>)> { match expr.node { ast::ExprPath(..) => (), _ => return None } if let DefFn(did, _) = ty::resolve_expr(cx.tcx, expr) { if !def_id_is_transmute(cx, did) { return None; } let typ = ty::node_id_to_type(cx.tcx, expr.id); match typ.sty { ty::ty_bare_fn(_, ref bare_fn) if bare_fn.abi == RustIntrinsic => { if let ty::FnConverging(to) = bare_fn.sig.0.output { let from = bare_fn.sig.0.inputs[0]; return Some((&from.sty, &to.sty)); } }, _ => () } } None } fn def_id_is_transmute(cx: &Context, def_id: DefId) -> bool { match ty::lookup_item_type(cx.tcx, def_id).ty.sty { ty::ty_bare_fn(_, ref bfty) if bfty.abi == RustIntrinsic => (), _ => return false } ty::with_path(cx.tcx, def_id, |path| match path.last() { Some(ref last) => last.name().as_str() == "transmute", _ => false }) } } } /// Forbids using the `#[feature(...)]` attribute #[derive(Copy, Clone)] pub struct UnstableFeatures; declare_lint! { UNSTABLE_FEATURES, Allow, "enabling unstable features" } impl LintPass for UnstableFeatures { fn get_lints(&self) -> LintArray { lint_array!(UNSTABLE_FEATURES) } fn check_attribute(&mut self, ctx: &Context, attr: &ast::Attribute) { if attr::contains_name(&[attr.node.value.clone()], "feature") { if let Some(items) = attr.node.value.meta_item_list() { for item in items { ctx.span_lint(UNSTABLE_FEATURES, item.span, "unstable feature"); } } } } } /// Lints for attempts to impl Drop on types that have `#[repr(C)]` /// attribute (see issue #24585). #[derive(Copy, Clone)] pub struct DropWithReprExtern; declare_lint! { DROP_WITH_REPR_EXTERN, Warn, "use of #[repr(C)] on a type that implements Drop" } impl LintPass for DropWithReprExtern { fn get_lints(&self) -> LintArray { lint_array!(DROP_WITH_REPR_EXTERN) } fn check_crate(&mut self, ctx: &Context, _: &ast::Crate) { for dtor_did in ctx.tcx.destructors.borrow().iter() { let (drop_impl_did, dtor_self_type) = if dtor_did.krate == ast::LOCAL_CRATE { let impl_did = ctx.tcx.map.get_parent_did(dtor_did.node); let ty = ty::lookup_item_type(ctx.tcx, impl_did).ty; (impl_did, ty) } else { continue; }; match dtor_self_type.sty { ty::ty_enum(self_type_did, _) | ty::ty_struct(self_type_did, _) | ty::ty_closure(self_type_did, _) => { let hints = ty::lookup_repr_hints(ctx.tcx, self_type_did); if hints.iter().any(|attr| *attr == attr::ReprExtern) && ty::ty_dtor(ctx.tcx, self_type_did).has_drop_flag() { let drop_impl_span = ctx.tcx.map.def_id_span(drop_impl_did, codemap::DUMMY_SP); let self_defn_span = ctx.tcx.map.def_id_span(self_type_did, codemap::DUMMY_SP); ctx.span_lint(DROP_WITH_REPR_EXTERN, drop_impl_span, "implementing Drop adds hidden state to types, \ possibly conflicting with `#[repr(C)]`"); // FIXME #19668: could be span_lint_note instead of manual guard. if ctx.current_level(DROP_WITH_REPR_EXTERN) != Level::Allow { ctx.sess().span_note(self_defn_span, "the `#[repr(C)]` attribute is attached here"); } } } _ => {} } } } }<|fim▁end|>
"non-method call expr behaving like a method call?") };
<|file_name|>RandomizedQueue.java<|end_file_name|><|fim▁begin|>import java.util.Iterator; import java.util.NoSuchElementException; @SuppressWarnings("unchecked") public class RandomizedQueue<Item> implements Iterable<Item> { private Item[] _arr; private int _length = 0; private void resize(int newLength) { if (newLength > _arr.length) newLength = 2 * _arr.length; else if (newLength < _arr.length / 4) newLength = _arr.length / 2; else return; Item[] newArr = (Item[])(new Object[newLength]); for (int i = 0; i < _length; ++i) { newArr[i] = _arr[i]; } _arr = newArr; } public RandomizedQueue() { _arr = (Item[])(new Object[1]); } public boolean isEmpty() { return _length == 0; } public int size() { return _length; } public void enqueue(Item item) { if (item == null) throw new NullPointerException(); resize(_length + 1); _arr[_length] = item; ++_length; } public Item dequeue() { if (_length == 0) throw new NoSuchElementException(); int idx = StdRandom.uniform(_length); Item ret = _arr[idx]; _arr[idx] = _arr[_length - 1]; _arr[_length - 1] = null; --_length; resize(_length); return ret; } public Item sample() { if (_length == 0) throw new NoSuchElementException(); return _arr[StdRandom.uniform(_length)]; } private class RandomizedQueueIterator implements Iterator<Item> { Item[] _state; int _current = 0; public RandomizedQueueIterator() { _state = (Item[])(new Object[_length]); for (int i = 0; i < _length; ++i) { _state[i] = _arr[i]; } StdRandom.shuffle(_state); } <|fim▁hole|> } public Item next() { if (!hasNext()) throw new NoSuchElementException(); return _state[_current++]; } public void remove() { throw new UnsupportedOperationException(); } } public Iterator<Item> iterator() { return new RandomizedQueueIterator(); } public static void main(String[] args) { RandomizedQueue<Integer> queue = new RandomizedQueue<Integer>(); for (int i = 0; i < 10; ++i) { queue.enqueue(i); } for (int e: queue) { StdOut.println(e); } } }<|fim▁end|>
public boolean hasNext() { return _current != _state.length;
<|file_name|>config_toml_test.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # Copyright 2018 Vote inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from luigi.configuration import LuigiTomlParser, get_config, add_config_path from helpers import LuigiTestCase class TomlConfigParserTest(LuigiTestCase): @classmethod def setUpClass(cls): add_config_path('test/testconfig/luigi.toml') add_config_path('test/testconfig/luigi_local.toml') def setUp(self): LuigiTomlParser._instance = None super(TomlConfigParserTest, self).setUp() def test_get_config(self): config = get_config('toml') self.assertIsInstance(config, LuigiTomlParser) def test_file_reading(self): config = get_config('toml') self.assertIn('hdfs', config.data) def test_get(self): config = get_config('toml') # test getting self.assertEqual(config.get('hdfs', 'client'), 'hadoopcli') self.assertEqual(config.get('hdfs', 'client', 'test'), 'hadoopcli') # test default self.assertEqual(config.get('hdfs', 'test', 'check'), 'check') with self.assertRaises(KeyError): config.get('hdfs', 'test') # test override self.assertEqual(config.get('hdfs', 'namenode_host'), 'localhost') # test non-string values<|fim▁hole|> self.assertEqual(config.get('hdfs', 'client'), 'hadoopcli') config.set('hdfs', 'client', 'test') self.assertEqual(config.get('hdfs', 'client'), 'test') config.set('hdfs', 'check', 'test me') self.assertEqual(config.get('hdfs', 'check'), 'test me') def test_has_option(self): config = get_config('toml') self.assertTrue(config.has_option('hdfs', 'client')) self.assertFalse(config.has_option('hdfs', 'nope')) self.assertFalse(config.has_option('nope', 'client')) class HelpersTest(LuigiTestCase): def test_add_without_install(self): enabled = LuigiTomlParser.enabled LuigiTomlParser.enabled = False with self.assertRaises(ImportError): add_config_path('test/testconfig/luigi.toml') LuigiTomlParser.enabled = enabled def test_get_without_install(self): enabled = LuigiTomlParser.enabled LuigiTomlParser.enabled = False with self.assertRaises(ImportError): get_config('toml') LuigiTomlParser.enabled = enabled<|fim▁end|>
self.assertEqual(config.get('hdfs', 'namenode_port'), 50030) def test_set(self): config = get_config('toml')
<|file_name|>new-age.js<|end_file_name|><|fim▁begin|>(function($) { "use strict"; // Start of use strict // jQuery for page scrolling feature - requires jQuery Easing plugin $('a.page-scroll').bind('click', function(event) { var $anchor = $(this); $('html, body').stop().animate({ scrollTop: ($($anchor.attr('href')).offset().top - 50) }, 1250, 'easeInOutExpo'); event.preventDefault(); }); // Highlight the top nav as scrolling occurs $('body').scrollspy({ target: '.navbar-fixed-top', offset: 100 }); // Closes the Responsive Menu on Menu Item Click $('.navbar-collapse ul li a').click(function() { $('.navbar-toggle:visible').click(); }); // Offset for Main Navigation $('#mainNav').affix({ offset: { top: 50 } }) <|fim▁hole|>};<|fim▁end|>
})(jQuery); // End of use strict ((window.gitter = {}).chat = {}).options = { room: 'FMeat/Lobby'
<|file_name|>set_algorithm.hpp<|end_file_name|><|fim▁begin|>/// \file // Range v3 library // // Copyright Eric Niebler 2013-present // Copyright Tomislav Ivek 2015-2016 // // Use, modification and distribution is subject to the // Boost Software License, Version 1.0. (See accompanying // file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) // // Project home: https://github.com/ericniebler/range-v3 // #ifndef RANGES_V3_VIEW_SET_ALGORITHM_HPP #define RANGES_V3_VIEW_SET_ALGORITHM_HPP #include <algorithm> #include <iterator> #include <type_traits> #include <utility> #include <meta/meta.hpp> #include <range/v3/range_fwd.hpp> #include <range/v3/functional/comparisons.hpp> #include <range/v3/functional/identity.hpp> #include <range/v3/functional/invoke.hpp> #include <range/v3/iterator/default_sentinel.hpp> #include <range/v3/range/access.hpp> #include <range/v3/range/primitives.hpp> #include <range/v3/range/traits.hpp> #include <range/v3/utility/move.hpp> #include <range/v3/utility/semiregular_box.hpp><|fim▁hole|>#include <range/v3/utility/static_const.hpp> #include <range/v3/view/all.hpp> #include <range/v3/view/facade.hpp> #include <range/v3/view/view.hpp> #include <range/v3/detail/disable_warnings.hpp> namespace ranges { /// \cond namespace detail { template<typename Rng1, typename Rng2, typename C, typename P1, typename P2, template<bool, typename...> class Cursor, cardinality Cardinality> struct set_algorithm_view : view_facade<set_algorithm_view<Rng1, Rng2, C, P1, P2, Cursor, Cardinality>, Cardinality> { private: friend range_access; semiregular_box_t<C> pred_; semiregular_box_t<P1> proj1_; semiregular_box_t<P2> proj2_; Rng1 rng1_; Rng2 rng2_; template<bool IsConst> using cursor = Cursor<IsConst, Rng1, Rng2, C, P1, P2>; cursor<simple_view<Rng1>() && simple_view<Rng2>()> begin_cursor() { return {pred_, proj1_, proj2_, ranges::begin(rng1_), ranges::end(rng1_), ranges::begin(rng2_), ranges::end(rng2_)}; } CPP_member auto begin_cursor() const -> CPP_ret(cursor<true>)( // requires range<Rng1 const> && range<Rng2 const>) { return {pred_, proj1_, proj2_, ranges::begin(rng1_), ranges::end(rng1_), ranges::begin(rng2_), ranges::end(rng2_)}; } public: set_algorithm_view() = default; set_algorithm_view(Rng1 rng1, Rng2 rng2, C pred, P1 proj1, P2 proj2) : pred_(std::move(pred)) , proj1_(std::move(proj1)) , proj2_(std::move(proj2)) , rng1_(std::move(rng1)) , rng2_(std::move(rng2)) {} }; template<bool IsConst, typename Rng1, typename Rng2, typename C, typename P1, typename P2> struct set_difference_cursor { private: friend struct set_difference_cursor<!IsConst, Rng1, Rng2, C, P1, P2>; using pred_ref_ = semiregular_box_ref_or_val_t<C, IsConst>; using proj1_ref_ = semiregular_box_ref_or_val_t<P1, IsConst>; using proj2_ref_ = semiregular_box_ref_or_val_t<P2, IsConst>; pred_ref_ pred_; proj1_ref_ proj1_; proj2_ref_ proj2_; template<typename T> using constify_if = meta::const_if_c<IsConst, T>; using R1 = constify_if<Rng1>; using R2 = constify_if<Rng2>; iterator_t<R1> it1_; sentinel_t<R1> end1_; iterator_t<R2> it2_; sentinel_t<R2> end2_; void satisfy() { while(it1_ != end1_) { if(it2_ == end2_) return; if(invoke(pred_, invoke(proj1_, *it1_), invoke(proj2_, *it2_))) return; if(!invoke(pred_, invoke(proj2_, *it2_), invoke(proj1_, *it1_))) ++it1_; ++it2_; } } public: using value_type = range_value_t<constify_if<Rng1>>; using single_pass = meta::or_c<single_pass_iterator_<iterator_t<R1>>, single_pass_iterator_<iterator_t<R2>>>; set_difference_cursor() = default; set_difference_cursor(pred_ref_ pred, proj1_ref_ proj1, proj2_ref_ proj2, iterator_t<R1> it1, sentinel_t<R1> end1, iterator_t<R2> it2, sentinel_t<R2> end2) : pred_(std::move(pred)) , proj1_(std::move(proj1)) , proj2_(std::move(proj2)) , it1_(std::move(it1)) , end1_(std::move(end1)) , it2_(std::move(it2)) , end2_(std::move(end2)) { satisfy(); } CPP_template(bool Other)( // requires IsConst && (!Other)) // set_difference_cursor( set_difference_cursor<Other, Rng1, Rng2, C, P1, P2> that) : pred_(std::move(that.pred_)) , proj1_(std::move(that.proj1_)) , proj2_(std::move(that.proj2_)) , it1_(std::move(that.it1_)) , end1_(std::move(that.end1_)) , it2_(std::move(that.it2_)) , end2_(std::move(that.end2_)) {} // clang-format off auto CPP_auto_fun(read)()(const) ( return *it1_ ) // clang-format on void next() { ++it1_; satisfy(); } CPP_member auto equal(set_difference_cursor const & that) const -> CPP_ret(bool)( // requires forward_range<Rng1>) { // does not support comparing iterators from different ranges return it1_ == that.it1_; } bool equal(default_sentinel_t) const { return it1_ == end1_; } // clang-format off auto CPP_auto_fun(move)()(const) ( return iter_move(it1_) ) // clang-format on }; constexpr cardinality set_difference_cardinality(cardinality c1, cardinality c2) { return (c1 == unknown) ? unknown : (c1 >= 0) || (c1 == finite) ? finite : // else, c1 == infinite (c2 >= 0) || (c2 == finite) ? infinite : unknown; } } // namespace detail /// \endcond template<typename Rng1, typename Rng2, typename C, typename P1, typename P2> using set_difference_view = detail::set_algorithm_view<Rng1, Rng2, C, P1, P2, detail::set_difference_cursor, detail::set_difference_cardinality( range_cardinality<Rng1>::value, range_cardinality<Rng2>::value)>; namespace views { struct set_difference_base_fn { template<typename Rng1, typename Rng2, typename C = less, typename P1 = identity, typename P2 = identity> auto operator()(Rng1 && rng1, Rng2 && rng2, C pred = C{}, P1 proj1 = P1{}, P2 proj2 = P2{}) const -> CPP_ret(set_difference_view<all_t<Rng1>, all_t<Rng2>, C, P1, P2>)( // requires viewable_range<Rng1> && input_range<Rng1> && viewable_range<Rng2> && input_range<Rng2> && indirect_relation<C, projected<iterator_t<Rng1>, P1>, projected<iterator_t<Rng2>, P2>>) { return {all(static_cast<Rng1 &&>(rng1)), all(static_cast<Rng2 &&>(rng2)), std::move(pred), std::move(proj1), std::move(proj2)}; } }; struct set_difference_fn : set_difference_base_fn { using set_difference_base_fn::operator(); template<typename Rng2, typename C = less, typename P1 = identity, typename P2 = identity> constexpr auto CPP_fun(operator())(Rng2 && rng2, C && pred = C{}, P1 proj1 = P1{}, P2 proj2 = P2{})( const // requires viewable_range<Rng2> && input_range<Rng2> && (!range<C>)) { return make_view_closure(bind_back(set_difference_base_fn{}, all(rng2), static_cast<C &&>(pred), std::move(proj1), std::move(proj2))); } }; /// \relates set_difference_fn /// \ingroup group-views RANGES_INLINE_VARIABLE(set_difference_fn, set_difference) } // namespace views /// @} /// \cond namespace detail { template<bool IsConst, typename Rng1, typename Rng2, typename C, typename P1, typename P2> struct set_intersection_cursor { private: friend struct set_intersection_cursor<!IsConst, Rng1, Rng2, C, P1, P2>; using pred_ref_ = semiregular_box_ref_or_val_t<C, IsConst>; using proj1_ref_ = semiregular_box_ref_or_val_t<P1, IsConst>; using proj2_ref_ = semiregular_box_ref_or_val_t<P2, IsConst>; pred_ref_ pred_; proj1_ref_ proj1_; proj2_ref_ proj2_; template<typename T> using constify_if = meta::const_if_c<IsConst, T>; using R1 = constify_if<Rng1>; using R2 = constify_if<Rng2>; iterator_t<R1> it1_; sentinel_t<R1> end1_; iterator_t<R2> it2_; sentinel_t<R2> end2_; void satisfy() { while(it1_ != end1_ && it2_ != end2_) { if(invoke(pred_, invoke(proj1_, *it1_), invoke(proj2_, *it2_))) ++it1_; else { if(!invoke(pred_, invoke(proj2_, *it2_), invoke(proj1_, *it1_))) return; ++it2_; } } } public: using value_type = range_value_t<R1>; using single_pass = meta::or_c<single_pass_iterator_<iterator_t<R1>>, single_pass_iterator_<iterator_t<R2>>>; set_intersection_cursor() = default; set_intersection_cursor(pred_ref_ pred, proj1_ref_ proj1, proj2_ref_ proj2, iterator_t<R1> it1, sentinel_t<R1> end1, iterator_t<R2> it2, sentinel_t<R2> end2) : pred_(std::move(pred)) , proj1_(std::move(proj1)) , proj2_(std::move(proj2)) , it1_(std::move(it1)) , end1_(std::move(end1)) , it2_(std::move(it2)) , end2_(std::move(end2)) { satisfy(); } CPP_template(bool Other)( // requires IsConst && (!Other)) // set_intersection_cursor( set_intersection_cursor<Other, Rng1, Rng2, C, P1, P2> that) : pred_(std::move(that.pred_)) , proj1_(std::move(that.proj1_)) , proj2_(std::move(that.proj2_)) , it1_(std::move(that.it1_)) , end1_(std::move(that.end1_)) , it2_(std::move(that.it2_)) , end2_(std::move(that.end2_)) {} // clang-format off auto CPP_auto_fun(read)()(const) ( return *it1_ ) // clang-format on void next() { ++it1_; ++it2_; satisfy(); } CPP_member auto equal(set_intersection_cursor const & that) const -> CPP_ret(bool)( // requires forward_range<Rng1>) { // does not support comparing iterators from different ranges return it1_ == that.it1_; } bool equal(default_sentinel_t) const { return (it1_ == end1_) || (it2_ == end2_); } // clang-format off auto CPP_auto_fun(move)()(const) ( return iter_move(it1_) ) // clang-format on }; constexpr cardinality set_intersection_cardinality(cardinality c1, cardinality c2) { return (c1 == unknown) || (c2 == unknown) ? unknown : (c1 >= 0 || c1 == finite) || (c2 >= 0 || c2 == finite) ? finite : unknown; } } // namespace detail /// \endcond template<typename Rng1, typename Rng2, typename C, typename P1, typename P2> using set_intersection_view = detail::set_algorithm_view<Rng1, Rng2, C, P1, P2, detail::set_intersection_cursor, detail::set_intersection_cardinality( range_cardinality<Rng1>::value, range_cardinality<Rng2>::value)>; namespace views { struct set_intersection_base_fn { template<typename Rng1, typename Rng2, typename C = less, typename P1 = identity, typename P2 = identity> auto operator()(Rng1 && rng1, Rng2 && rng2, C pred = C{}, P1 proj1 = P1{}, P2 proj2 = P2{}) const -> CPP_ret(set_intersection_view<all_t<Rng1>, all_t<Rng2>, C, P1, P2>)( // requires viewable_range<Rng1> && input_range<Rng1> && viewable_range<Rng2> && input_range<Rng2> && indirect_relation<C, projected<iterator_t<Rng1>, P1>, projected<iterator_t<Rng2>, P2>>) { return {all(static_cast<Rng1 &&>(rng1)), all(static_cast<Rng2 &&>(rng2)), std::move(pred), std::move(proj1), std::move(proj2)}; } }; struct set_intersection_fn : set_intersection_base_fn { using set_intersection_base_fn::operator(); template<typename Rng2, typename C = less, typename P1 = identity, typename P2 = identity> constexpr auto CPP_fun(operator())(Rng2 && rng2, C && pred = C{}, P1 proj1 = P1{}, P2 proj2 = P2{})( const // requires viewable_range<Rng2> && input_range<Rng2> && (!range<C>)) { return make_view_closure(bind_back(set_intersection_base_fn{}, all(rng2), static_cast<C &&>(pred), std::move(proj1), std::move(proj2))); } }; /// \relates set_intersection_fn /// \ingroup group-views RANGES_INLINE_VARIABLE(set_intersection_fn, set_intersection) } // namespace views /// @} /// \cond namespace detail { template<bool IsConst, typename Rng1, typename Rng2, typename C, typename P1, typename P2> struct set_union_cursor { private: friend struct set_union_cursor<!IsConst, Rng1, Rng2, C, P1, P2>; using pred_ref_ = semiregular_box_ref_or_val_t<C, IsConst>; using proj1_ref_ = semiregular_box_ref_or_val_t<P1, IsConst>; using proj2_ref_ = semiregular_box_ref_or_val_t<P2, IsConst>; pred_ref_ pred_; proj1_ref_ proj1_; proj2_ref_ proj2_; template<typename T> using constify_if = meta::const_if_c<IsConst, T>; using R1 = constify_if<Rng1>; using R2 = constify_if<Rng2>; iterator_t<R1> it1_; sentinel_t<R1> end1_; iterator_t<R2> it2_; sentinel_t<R2> end2_; enum class state_t { FIRST, SECOND } state; void satisfy() { if(it1_ == end1_) { state = state_t::SECOND; return; } if(it2_ == end2_) { state = state_t::FIRST; return; } if(invoke(pred_, invoke(proj2_, *it2_), invoke(proj1_, *it1_))) { state = state_t::SECOND; return; } if(!invoke(pred_, invoke(proj1_, *it1_), invoke(proj2_, *it2_))) ++it2_; state = state_t::FIRST; } public: using value_type = common_type_t<range_value_t<R1>, range_value_t<R2>>; using reference_type = common_reference_t<range_reference_t<R1>, range_reference_t<R2>>; using rvalue_reference_type = common_reference_t<range_rvalue_reference_t<R1>, range_rvalue_reference_t<R2>>; using single_pass = meta::or_c<single_pass_iterator_<iterator_t<R1>>, single_pass_iterator_<iterator_t<R2>>>; set_union_cursor() = default; set_union_cursor(pred_ref_ pred, proj1_ref_ proj1, proj2_ref_ proj2, iterator_t<R1> it1, sentinel_t<R1> end1, iterator_t<R2> it2, sentinel_t<R2> end2) : pred_(std::move(pred)) , proj1_(std::move(proj1)) , proj2_(std::move(proj2)) , it1_(std::move(it1)) , end1_(std::move(end1)) , it2_(std::move(it2)) , end2_(std::move(end2)) { satisfy(); } CPP_template(bool Other)( // requires IsConst && (!Other)) set_union_cursor(set_union_cursor<Other, Rng1, Rng2, C, P1, P2> that) : pred_(std::move(that.pred_)) , proj1_(std::move(that.proj1_)) , proj2_(std::move(that.proj2_)) , it1_(std::move(that.it1_)) , end1_(std::move(that.end1_)) , it2_(std::move(that.it2_)) , end2_(std::move(that.end2_)) {} reference_type read() const noexcept(noexcept(*it1_) && noexcept(*it2_)) { if(state == state_t::SECOND) return *it2_; else return *it1_; } void next() { if(state == state_t::FIRST) ++it1_; else ++it2_; satisfy(); } CPP_member auto equal(set_union_cursor const & that) const -> CPP_ret(bool)( // requires forward_range<Rng1> && forward_range<Rng2>) { // does not support comparing iterators from different ranges return (it1_ == that.it1_) && (it2_ == that.it2_); } bool equal(default_sentinel_t) const { return (it1_ == end1_) && (it2_ == end2_); } rvalue_reference_type move() const noexcept(noexcept(iter_move(it1_)) && noexcept(iter_move(it2_))) { if(state == state_t::SECOND) return iter_move(it2_); else return iter_move(it1_); } }; constexpr cardinality set_union_cardinality(cardinality c1, cardinality c2) { return (c1 == infinite) || (c2 == infinite) ? infinite : (c1 == unknown) || (c2 == unknown) ? unknown : finite; } } // namespace detail /// \endcond template<typename Rng1, typename Rng2, typename C, typename P1, typename P2> using set_union_view = detail::set_algorithm_view<Rng1, Rng2, C, P1, P2, detail::set_union_cursor, detail::set_union_cardinality( range_cardinality<Rng1>::value, range_cardinality<Rng2>::value)>; namespace views { struct set_union_base_fn { public: template<typename Rng1, typename Rng2, typename C = less, typename P1 = identity, typename P2 = identity> auto operator()(Rng1 && rng1, Rng2 && rng2, C pred = C{}, P1 proj1 = P1{}, P2 proj2 = P2{}) const -> CPP_ret(set_union_view<all_t<Rng1>, all_t<Rng2>, C, P1, P2>)( // requires viewable_range<Rng1> && input_range<Rng1> && viewable_range<Rng2> && input_range<Rng2> && common_with< range_value_t<Rng1>, range_value_t<Rng2>> && common_reference_with<range_reference_t<Rng1>, range_reference_t<Rng2>> && common_reference_with<range_rvalue_reference_t<Rng1>, range_rvalue_reference_t<Rng2>> && indirect_relation<C, projected<iterator_t<Rng1>, P1>, projected<iterator_t<Rng2>, P2>>) { return {all(static_cast<Rng1 &&>(rng1)), all(static_cast<Rng2 &&>(rng2)), std::move(pred), std::move(proj1), std::move(proj2)}; } }; struct set_union_fn : set_union_base_fn { using set_union_base_fn::operator(); template<typename Rng2, typename C = less, typename P1 = identity, typename P2 = identity> constexpr auto CPP_fun(operator())(Rng2 && rng2, C && pred = C{}, P1 proj1 = P1{}, P2 proj2 = P2{})( const // requires viewable_range<Rng2> && input_range<Rng2> && (!range<C>)) { return make_view_closure(bind_back(set_union_base_fn{}, all(rng2), static_cast<C &&>(pred), std::move(proj1), std::move(proj2))); } }; /// \relates set_union_fn /// \ingroup group-views RANGES_INLINE_VARIABLE(set_union_fn, set_union) } // namespace views /// @} /// \cond namespace detail { template<bool IsConst, typename Rng1, typename Rng2, typename C, typename P1, typename P2> struct set_symmetric_difference_cursor { private: friend struct set_symmetric_difference_cursor<!IsConst, Rng1, Rng2, C, P1, P2>; using pred_ref_ = semiregular_box_ref_or_val_t<C, IsConst>; using proj1_ref_ = semiregular_box_ref_or_val_t<P1, IsConst>; using proj2_ref_ = semiregular_box_ref_or_val_t<P2, IsConst>; pred_ref_ pred_; proj1_ref_ proj1_; proj2_ref_ proj2_; template<typename T> using constify_if = meta::const_if_c<IsConst, T>; using R1 = constify_if<Rng1>; using R2 = constify_if<Rng2>; iterator_t<R1> it1_; sentinel_t<R1> end1_; iterator_t<R2> it2_; sentinel_t<R2> end2_; enum class state_t { FIRST, SECOND, ONLY_FIRST, ONLY_SECOND } state; void satisfy() { while(it1_ != end1_) { if(it2_ == end2_) { state = state_t::ONLY_FIRST; return; } if(invoke(pred_, invoke(proj1_, *it1_), invoke(proj2_, *it2_))) { state = state_t::FIRST; return; } else { if(invoke(pred_, invoke(proj2_, *it2_), invoke(proj1_, *it1_))) { state = state_t::SECOND; return; } else { ++it1_; ++it2_; } } } state = state_t::ONLY_SECOND; } public: using value_type = common_type_t<range_value_t<R1>, range_value_t<R2>>; using reference_type = common_reference_t<range_reference_t<R1>, range_reference_t<R2>>; using rvalue_reference_type = common_reference_t<range_rvalue_reference_t<R1>, range_rvalue_reference_t<R2>>; using single_pass = meta::or_c<single_pass_iterator_<iterator_t<R1>>, single_pass_iterator_<iterator_t<R2>>>; set_symmetric_difference_cursor() = default; set_symmetric_difference_cursor(pred_ref_ pred, proj1_ref_ proj1, proj2_ref_ proj2, iterator_t<R1> it1, sentinel_t<R1> end1, iterator_t<R2> it2, sentinel_t<R2> end2) : pred_(std::move(pred)) , proj1_(std::move(proj1)) , proj2_(std::move(proj2)) , it1_(std::move(it1)) , end1_(std::move(end1)) , it2_(std::move(it2)) , end2_(std::move(end2)) , state() { satisfy(); } CPP_template(bool Other)( // requires IsConst && (!Other)) // set_symmetric_difference_cursor( set_symmetric_difference_cursor<Other, Rng1, Rng2, C, P1, P2> that) : pred_(std::move(that.pred_)) , proj1_(std::move(that.proj1_)) , proj2_(std::move(that.proj2_)) , it1_(std::move(that.it1_)) , end1_(std::move(that.end1_)) , it2_(std::move(that.it2_)) , end2_(std::move(that.end2_)) , state(that.state) {} reference_type read() const noexcept(noexcept(*it1_) && noexcept(*it2_)) { if(state == state_t::SECOND || state == state_t::ONLY_SECOND) return *it2_; else return *it1_; } void next() { switch(state) { case state_t::FIRST: ++it1_; satisfy(); break; case state_t::ONLY_FIRST: ++it1_; break; case state_t::SECOND: ++it2_; satisfy(); break; case state_t::ONLY_SECOND: ++it2_; break; } } CPP_member auto equal(set_symmetric_difference_cursor const & that) const -> CPP_ret(bool)( // requires forward_range<R1> && forward_range<R2>) { // does not support comparing iterators from different ranges: return (it1_ == that.it1_) && (it2_ == that.it2_); } bool equal(default_sentinel_t) const { return (it1_ == end1_) && (it2_ == end2_); } rvalue_reference_type move() const noexcept(noexcept(iter_move(it1_)) && noexcept(iter_move(it2_))) { if(state == state_t::SECOND || state == state_t::ONLY_SECOND) return iter_move(it2_); else return iter_move(it1_); } }; constexpr cardinality set_symmetric_difference_cardinality(cardinality c1, cardinality c2) { return (c1 == unknown) || (c2 == unknown) ? unknown : (c1 == infinite) != (c2 == infinite) ? infinite : (c1 == infinite) && (c2 == infinite) ? unknown : finite; } } // namespace detail /// \endcond template<typename Rng1, typename Rng2, typename C, typename P1, typename P2> using set_symmetric_difference_view = detail::set_algorithm_view< Rng1, Rng2, C, P1, P2, detail::set_symmetric_difference_cursor, detail::set_symmetric_difference_cardinality(range_cardinality<Rng1>::value, range_cardinality<Rng2>::value)>; namespace views { struct set_symmetric_difference_base_fn { template<typename Rng1, typename Rng2, typename C = less, typename P1 = identity, typename P2 = identity> auto operator()(Rng1 && rng1, Rng2 && rng2, C pred = C{}, P1 proj1 = P1{}, P2 proj2 = P2{}) const -> CPP_ret(set_symmetric_difference_view<all_t<Rng1>, all_t<Rng2>, C, P1, P2>)( // requires viewable_range<Rng1> && input_range<Rng1> && viewable_range<Rng2> && input_range<Rng2> && common_with< range_value_t<Rng1>, range_value_t<Rng2>> && common_reference_with<range_reference_t<Rng1>, range_reference_t<Rng2>> && common_reference_with<range_rvalue_reference_t<Rng1>, range_rvalue_reference_t<Rng2>> && indirect_relation<C, projected<iterator_t<Rng1>, P1>, projected<iterator_t<Rng2>, P2>>) { return {all(static_cast<Rng1 &&>(rng1)), all(static_cast<Rng2 &&>(rng2)), std::move(pred), std::move(proj1), std::move(proj2)}; } }; struct set_symmetric_difference_fn : set_symmetric_difference_base_fn { using set_symmetric_difference_base_fn::operator(); template<typename Rng2, typename C = less, typename P1 = identity, typename P2 = identity> constexpr auto CPP_fun(operator())(Rng2 && rng2, C && pred = C{}, P1 proj1 = P1{}, P2 proj2 = P2{})( const // requires viewable_range<Rng2> && input_range<Rng2> && (!range<C>)) { return make_view_closure(bind_back(set_symmetric_difference_base_fn{}, all(rng2), static_cast<C &&>(pred), std::move(proj1), std::move(proj2))); } }; /// \relates set_symmetric_difference_fn /// \ingroup group-views RANGES_INLINE_VARIABLE(set_symmetric_difference_fn, set_symmetric_difference) } // namespace views /// @} } // namespace ranges #include <range/v3/detail/reenable_warnings.hpp> #endif<|fim▁end|>
<|file_name|>utils.js<|end_file_name|><|fim▁begin|>export function allDaysDisabledBefore (day, unit, { minDate, includeDates } = {}) { const dateBefore = day.clone().subtract(1, unit) return (minDate && dateBefore.isBefore(minDate, unit)) || (includeDates && includeDates.every(includeDate => dateBefore.isBefore(includeDate, unit))) ||<|fim▁hole|> false }<|fim▁end|>
<|file_name|>oracle.py<|end_file_name|><|fim▁begin|>import os.path import sys import re import warnings import cx_Oracle from django.db import connection, models from django.db.backends.util import truncate_name from django.core.management.color import no_style from django.db.models.fields import NOT_PROVIDED from django.db.utils import DatabaseError # In revision r16016 function get_sequence_name has been transformed into # method of DatabaseOperations class. To make code backward-compatible we # need to handle both situations. try: from django.db.backends.oracle.base import get_sequence_name\ as original_get_sequence_name except ImportError: original_get_sequence_name = None from south.db import generic warnings.warn("! WARNING: South's Oracle support is still alpha. " "Be wary of possible bugs.") class DatabaseOperations(generic.DatabaseOperations): """ Oracle implementation of database operations. """ backend_name = 'oracle' alter_string_set_type = 'ALTER TABLE %(table_name)s MODIFY %(column)s %(type)s %(nullity)s;' alter_string_set_default = 'ALTER TABLE %(table_name)s MODIFY %(column)s DEFAULT %(default)s;' add_column_string = 'ALTER TABLE %s ADD %s;' delete_column_string = 'ALTER TABLE %s DROP COLUMN %s;' add_constraint_string = 'ALTER TABLE %(table_name)s ADD CONSTRAINT %(constraint)s %(clause)s' allows_combined_alters = False has_booleans = False constraints_dict = { 'P': 'PRIMARY KEY', 'U': 'UNIQUE', 'C': 'CHECK', 'R': 'FOREIGN KEY' } def get_sequence_name(self, table_name): if original_get_sequence_name is None: return self._get_connection().ops._get_sequence_name(table_name) else: return original_get_sequence_name(table_name) #TODO: This will cause very obscure bugs if anyone uses a column name or string value # that looks like a column definition (with 'CHECK', 'DEFAULT' and/or 'NULL' in it) # e.g. "CHECK MATE" varchar(10) DEFAULT 'NULL' def adj_column_sql(self, col): # Syntax fixes -- Oracle is picky about clause order col = re.sub('(?P<constr>CHECK \(.*\))(?P<any>.*)(?P<default>DEFAULT \d+)', lambda mo: '%s %s%s'%(mo.group('default'), mo.group('constr'), mo.group('any')), col) #syntax fix for boolean/integer field only col = re.sub('(?P<not_null>(NOT )?NULL) (?P<misc>(.* )?)(?P<default>DEFAULT.+)', lambda mo: '%s %s %s'%(mo.group('default'),mo.group('not_null'),mo.group('misc') or ''), col) #fix order of NULL/NOT NULL and DEFAULT return col def check_meta(self, table_name): return table_name in [ m._meta.db_table for m in models.get_models() ] #caching provided by Django def normalize_name(self, name): """ Get the properly shortened and uppercased identifier as returned by quote_name(), but without the actual quotes. """ nn = self.quote_name(name) if nn[0] == '"' and nn[-1] == '"': nn = nn[1:-1] return nn @generic.invalidate_table_constraints def create_table(self, table_name, fields): qn = self.quote_name(table_name) columns = [] autoinc_sql = '' for field_name, field in fields: col = self.column_sql(table_name, field_name, field) if not col: continue col = self.adj_column_sql(col) columns.append(col) if isinstance(field, models.AutoField): autoinc_sql = connection.ops.autoinc_sql(table_name, field_name) sql = 'CREATE TABLE %s (%s);' % (qn, ', '.join([col for col in columns])) self.execute(sql) if autoinc_sql: self.execute(autoinc_sql[0]) self.execute(autoinc_sql[1]) @generic.invalidate_table_constraints def delete_table(self, table_name, cascade=True): qn = self.quote_name(table_name) # Note: PURGE is not valid syntax for Oracle 9i (it was added in 10) if cascade: self.execute('DROP TABLE %s CASCADE CONSTRAINTS;' % qn) else: self.execute('DROP TABLE %s;' % qn) # If the table has an AutoField a sequence was created. sequence_sql = """ DECLARE i INTEGER; BEGIN SELECT COUNT(*) INTO i FROM USER_CATALOG WHERE TABLE_NAME = '%(sq_name)s' AND TABLE_TYPE = 'SEQUENCE'; IF i = 1 THEN EXECUTE IMMEDIATE 'DROP SEQUENCE "%(sq_name)s"'; END IF; END; /""" % {'sq_name': self.get_sequence_name(table_name)} self.execute(sequence_sql) @generic.invalidate_table_constraints def alter_column(self, table_name, name, field, explicit_name=True): if self.dry_run: if self.debug: print ' - no dry run output for alter_column() due to dynamic DDL, sorry' return qn = self.quote_name(table_name) # hook for the field to do any resolution prior to it's attributes being queried if hasattr(field, 'south_init'): field.south_init() field = self._field_sanity(field) # Add _id or whatever if we need to field.set_attributes_from_name(name) if not explicit_name: name = field.column qn_col = self.quote_name(name) # First, change the type # This will actually also add any CHECK constraints needed, # since e.g. 'type' for a BooleanField is 'NUMBER(1) CHECK (%(qn_column)s IN (0,1))' <|fim▁hole|> 'table_name':qn, 'column': qn_col, 'type': self._db_type_for_alter_column(field), 'nullity': 'NOT NULL', 'default': 'NULL' } if field.null: params['nullity'] = 'NULL' if not field.null and field.has_default(): params['default'] = self._default_value_workaround(field.get_default()) sql_templates = [ (self.alter_string_set_type, params), (self.alter_string_set_default, params.copy()), ] # drop CHECK constraints. Make sure this is executed before the ALTER TABLE statements # generated above, since those statements recreate the constraints we delete here. check_constraints = self._constraints_affecting_columns(table_name, [name], "CHECK") for constraint in check_constraints: self.execute(self.delete_check_sql % { 'table': self.quote_name(table_name), 'constraint': self.quote_name(constraint), }) for sql_template, params in sql_templates: try: self.execute(sql_template % params) except DatabaseError, exc: description = str(exc) # Oracle complains if a column is already NULL/NOT NULL if 'ORA-01442' in description or 'ORA-01451' in description: # so we just drop NULL/NOT NULL part from target sql and retry params['nullity'] = '' sql = sql_template % params self.execute(sql) # Oracle also has issues if we try to change a regular column # to a LOB or vice versa (also REF, object, VARRAY or nested # table, but these don't come up much in Django apps) elif 'ORA-22858' in description or 'ORA-22859' in description: self._alter_column_lob_workaround(table_name, name, field) else: raise def _alter_column_lob_workaround(self, table_name, name, field): """ Oracle refuses to change a column type from/to LOB to/from a regular column. In Django, this shows up when the field is changed from/to a TextField. What we need to do instead is: - Rename the original column - Add the desired field as new - Update the table to transfer values from old to new - Drop old column """ renamed = self._generate_temp_name(name) self.rename_column(table_name, name, renamed) self.add_column(table_name, name, field, keep_default=False) self.execute("UPDATE %s set %s=%s" % ( self.quote_name(table_name), self.quote_name(name), self.quote_name(renamed), )) self.delete_column(table_name, renamed) def _generate_temp_name(self, for_name): suffix = hex(hash(for_name)).upper()[1:] return self.normalize_name(for_name + "_" + suffix) @generic.copy_column_constraints #TODO: Appears to be nulled by the delete decorator below... @generic.delete_column_constraints def rename_column(self, table_name, old, new): if old == new: # Short-circuit out return [] self.execute('ALTER TABLE %s RENAME COLUMN %s TO %s;' % ( self.quote_name(table_name), self.quote_name(old), self.quote_name(new), )) @generic.invalidate_table_constraints def add_column(self, table_name, name, field, keep_default=True): sql = self.column_sql(table_name, name, field) sql = self.adj_column_sql(sql) if sql: params = ( self.quote_name(table_name), sql ) sql = self.add_column_string % params self.execute(sql) # Now, drop the default if we need to if not keep_default and field.default is not None: field.default = NOT_PROVIDED self.alter_column(table_name, name, field, explicit_name=False) def delete_column(self, table_name, name): return super(DatabaseOperations, self).delete_column(self.quote_name(table_name), name) def lookup_constraint(self, db_name, table_name, column_name=None): if column_name: # Column names in the constraint cache come from the database, # make sure we use the properly shortened/uppercased version # for lookup. column_name = self.normalize_name(column_name) return super(DatabaseOperations, self).lookup_constraint(db_name, table_name, column_name) def _constraints_affecting_columns(self, table_name, columns, type="UNIQUE"): if columns: columns = [self.normalize_name(c) for c in columns] return super(DatabaseOperations, self)._constraints_affecting_columns(table_name, columns, type) def _field_sanity(self, field): """ This particular override stops us sending DEFAULTs for BooleanField. """ if isinstance(field, models.BooleanField) and field.has_default(): field.default = int(field.to_python(field.get_default())) return field def _default_value_workaround(self, value): from datetime import date,time,datetime if isinstance(value, (date,time,datetime)): return "'%s'" % value else: return super(DatabaseOperations, self)._default_value_workaround(value) def _fill_constraint_cache(self, db_name, table_name): self._constraint_cache.setdefault(db_name, {}) self._constraint_cache[db_name][table_name] = {} rows = self.execute(""" SELECT user_cons_columns.constraint_name, user_cons_columns.column_name, user_constraints.constraint_type FROM user_constraints JOIN user_cons_columns ON user_constraints.table_name = user_cons_columns.table_name AND user_constraints.constraint_name = user_cons_columns.constraint_name WHERE user_constraints.table_name = '%s' """ % self.normalize_name(table_name)) for constraint, column, kind in rows: self._constraint_cache[db_name][table_name].setdefault(column, set()) self._constraint_cache[db_name][table_name][column].add((self.constraints_dict[kind], constraint)) return<|fim▁end|>
params = {
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>// Copyright 2018, Google, LLC. // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /*! THIS FILE IS AUTO-GENERATED */ import {getAPI, GoogleConfigurable} from 'googleapis-common'; import {androidmanagement_v1} from './v1'; export const VERSIONS = {<|fim▁hole|> androidmanagement_v1.Androidmanagement; export function androidmanagement(options: androidmanagement_v1.Options): androidmanagement_v1.Androidmanagement; export function androidmanagement<T = androidmanagement_v1.Androidmanagement>( this: GoogleConfigurable, versionOrOptions: 'v1'|androidmanagement_v1.Options) { return getAPI<T>('androidmanagement', versionOrOptions, VERSIONS, this); }<|fim▁end|>
'v1': androidmanagement_v1.Androidmanagement, }; export function androidmanagement(version: 'v1'):
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ Created on 17 Aug 2012 @author: Éric Piel Copyright © 2012 Éric Piel, Delmic This file is part of Odemis. Odemis is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License version 2 as published by the Free Software Foundation. Odemis is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR<|fim▁hole|>Odemis. If not, see http://www.gnu.org/licenses/. """ from __future__ import division # for listing all the types of file format supported import importlib import logging from odemis.dataio import tiff import os # The interface of a "format manager" is as follows: # * one module # * FORMAT (string): user friendly name of the format # * EXTENSIONS (list of strings): possible file-name extensions # * export (callable): write model.DataArray into a file # * read_data (callable): read a file into model.DataArray # * read_thumbnail (callable): read the thumbnail(s) of a file # if it doesn't support writing, then is has no .export(), and if it doesn't # support reading, then it has not read_data(). __all__ = ["tiff", "stiff", "hdf5", "png", "csv"] def get_available_formats(mode=os.O_RDWR, allowlossy=False): """ Find the available file formats mode (os.O_RDONLY, os.O_WRONLY, or os.O_RDWR): whether only list formats which can be read, which can be written, or all of them. allowlossy (bool): If True, will also return the formats that can lose some of the original information (when writting the data to a file) return (dict string -> list of strings): name of each format -> list of extensions """ formats = {} # Look dynamically which format is available for module_name in __all__: try: exporter = importlib.import_module("." + module_name, "odemis.dataio") except Exception: logging.info("Skipping exporter %s, which failed to load", module_name) continue # module cannot be loaded if not allowlossy and hasattr(exporter, "LOSSY") and exporter.LOSSY: logging.debug("Skipping exporter %s as it is lossy", module_name) continue if ((mode == os.O_RDONLY and not hasattr(exporter, "read_data")) or (mode == os.O_WRONLY and not hasattr(exporter, "export"))): continue formats[exporter.FORMAT] = exporter.EXTENSIONS if not formats: logging.error("No file converter found!") return formats def get_converter(fmt): """ Return the converter corresponding to a format name :param fmt: (string) the format name :returns: (module) the converter :raises ValueError: in case no exporter can be found """ # Look dynamically which format is available for module_name in __all__: try: converter = importlib.import_module("." + module_name, "odemis.dataio") except (ValueError, TypeError, ImportError): logging.info("Import of converter %s failed", module_name, exc_info=True) continue # module cannot be loaded if fmt == converter.FORMAT: return converter raise ValueError("No converter for format %s found" % fmt) def find_fittest_converter(filename, default=tiff, mode=os.O_WRONLY, allowlossy=False): """ Find the most fitting exporter according to a filename (actually, its extension) filename (string): (path +) filename with extension default (dataio. Module): default exporter to pick if no really fitting exporter is found mode: cf get_available_formats() allowlossy: cf get_available_formats() returns (dataio. Module): the right exporter """ # Find the extension of the file basename = os.path.basename(filename).lower() if basename == "": raise ValueError("Filename should have at least one letter: '%s'" % filename) # make sure we pick the format with the longest fitting extension best_len = 0 best_fmt = None for fmt, exts in get_available_formats(mode, allowlossy).items(): for e in exts: if filename.endswith(e) and len(e) > best_len: best_len = len(e) best_fmt = fmt if best_fmt is not None: logging.debug("Determined that '%s' corresponds to %s format", basename, best_fmt) conv = get_converter(best_fmt) else: conv = default return conv<|fim▁end|>
PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>## This file is part of conftron. ## ## Copyright (C) 2011 Matt Peddie <[email protected]> ## ## This program is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as ## published by the Free Software Foundation; either version 2 of the ## License, or (at your option) any later version. ## ## This program is distributed in the hope that it will be useful, but ## WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ## General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with this program; if not, write to the Free Software ## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA ## 02110-1301, USA. import genconfig, baseio from settings_templates import * class LCMSettingField(baseio.TagInheritance): required_tags = ['default', 'step', 'min', 'max'] def __init__(self, hsh, parent): self.__dict__.update(hsh) self._inherit(parent) if self.has_key('absmax'): self.min = -float(self.absmax) self.max = float(self.absmax) self.parent = parent self.parentname = parent.name self._musthave(parent, parse_settings_noval) self.classname = parent.classname parent.die += self._filter() def field_setting(self): return lcm_settings_field_template_mm % self def _filter(self): die = 0 die += self._are_defaults_sane() return die def _are_defaults_sane(self): ## Default values outside the range given by the bounds ## don't make sense either. die = 0 if (float(self['min']) > float(self['default']) or float(self['max']) < float(self['default'])): print parse_settings_badval % {"sp":'default', "f":self['name'], "s":self.parent['name'], "max":self['max'], "min":self['min'], "val":self['default']} die += 1 if float(self['step']) > (float(self['max']) - float(self['min'])): print parse_settings_badval % {"sp":'default', "f":self['name'], "s":self.parent['name'], "max":self['max'], "min":self['min'], "val":self['step']} die += 1 return die class LCMSetting(baseio.CHeader, baseio.LCMFile, baseio.CCode, baseio.TagInheritance, baseio.IncludePasting): def __init__(self, s, parent): self.__dict__.update(s.attrib) self.classname = parent.name self._inherit(parent) self.lcm_folder = genconfig.lcm_folder self.die = 0 self.make_fields(s.getchildren()) self.field_settings = "\n".join([f.field_setting() for f in self.fields]) def make_fields(self, fields): flattened = self.insert_includes(fields, ['member']) self.check_includes(flattened, ['member']) self.fields = [LCMSettingField(dict(f.attrib, **{'varname':self.varname}), self) for f in flattened] def to_settings_file(self): basename = "%(classname)s_%(type)s_%(varname)s" % self filename = genconfig.settings_folder + "/" + basename def sf(cf): cf.write("#include <lcm/lcm.h>\n" % self) cf.write("#include <math.h>\n" % self) cf.write("#include <%(classname)s_settings.h>\n" % self) if self.has_key('channel'): cf.write(lcm_settings_init_custom_chan_template % self) else: cf.write(lcm_settings_init_template % self) cf.write(lcm_settings_func_template % self) self.to_h(filename, sf) def to_settings_nop(self): filename = genconfig.stubs_folder + "/%(classname)s_%(type)s_%(varname)s_setting_stub" % self def stub_f(cf): cf.write("#include <lcm_settings_auto.h>\n\n") cf.write(lcm_settings_init_nop_template % self) cf.write(lcm_settings_set_nop_template % self) self.to_c_no_h(filename, stub_f) def to_settings_prototype(self, cf): cf.write(lcm_settings_prototype % self) class Settings(baseio.CHeader, baseio.LCMFile, baseio.CCode, baseio.TagInheritance, baseio.Searchable, baseio.IncludePasting): def __init__(self, name, children, class_structs, path, filename): self.name = name self.path = path self.file = filename self.classname = name self._filter_settings(children) self.class_struct_includes = self._class_struct_includes(class_structs) def merge(self, other): for k, v in other.__dict__.iteritems(): if not k in genconfig.reserved_tag_names: try: # Is it a method? getattr(getattr(self, k), "__call__") except AttributeError: # Nope. self.__dict__[k] = other.__dict__[k] self.settings.extend(other.settings) return self def search(self, searchname): return self._search(self.settings, searchname) def codegen(self): self.init_calls = "\n".join([lcm_settings_init_call_template % s for s in self.settings]) self.null_calls = "\n".join([lcm_settings_init_null_template % s for s in self.settings]) self.to_settings_h() self.settings_nops() def init_call(self): return " %(classname)s_settings_init(provider); \\\n" % self def check_call(self): return " %(classname)s_settings_check(); \\\n" % self def _filter_settings(self, structs): die = 0 flattened = self.insert_includes(structs, ['struct']) self.check_includes(flattened, ['struct']) outstructs = [LCMSetting(s, self) for s in flattened] die = sum([s.die for s in outstructs]) if die: print "Lots of settings errors detected; cannot continue code generation." sys.exit(1) self.settings = outstructs def settings_functions(self): for s in self.settings: s.to_settings_file() def settings_prototypes(self, cf): cf.write("/* Prototypes for all the functions defined in settings/ folder */\n") for s in self.settings: cf.write(lcm_settings_prototype % s) cf.write(lcm_settings_init_prototype % s) def settings_nops(self): for s in self.settings: s.to_settings_nop() def _class_struct_includes(self, structs): out = [] formatstr = "#include \"%(lcm_folder)s/%(classname)s_%(type)s.h\"" if (structs): out = [formatstr % s for s in structs] else: ## Orphaned settings module; include only types we know ## about out = [formatstr % s for s in self.settings] return "\n".join(out) def settings_includes(self, cf): cf.write(self.class_struct_includes) def to_settings_periodic(self): pass def to_settings_c(self): pass def to_settings_h(self): self.settings_functions() def settings_f(cf): cf.write("#include \"%(classname)s_types.h\"\n\n" % self) cf.write("#include \"%(classname)s_telemetry.h\"\n\n" % self) cf.write("#ifdef __cplusplus\n") cf.write("extern \"C\"{\n") cf.write("#endif\n\n") self.settings_prototypes(cf)<|fim▁hole|> cf.write("\n#ifdef __cplusplus\n") cf.write("}\n") cf.write("#endif\n") # Make initialization macro cf.write(lcm_settings_init_class_template % self) cf.write(lcm_check_call_template % self); self.to_h(self.name + "_settings", settings_f)<|fim▁end|>
<|file_name|>tag_test.cpp<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2004-2015 by Jakob Schröter <[email protected]> * This file is part of the gloox library. http://camaya.net/gloox * * This software is distributed under a license. The full license * agreement can be found in the file LICENSE in this distribution. * This software may not be copied, modified, sold or distributed * other than expressed in the named license agreement. * * This software is distributed without any warranty. */ #include "../../tag.h" #include "../../util.h" using namespace gloox; #include <stdio.h> #include <locale.h> #include <string> #include <cstdio> // [s]print[f] int main( int /*argc*/, char** /*argv*/ ) { int fail = 0; std::string name; Tag *t = new Tag( "toe" ); t->addAttribute( "foo", "bar" ); Tag *u = new Tag( t, "uni" ); u->addAttribute( "u3", "3u" ); Tag *v = new Tag( t, "vie" ); v->addAttribute( "v3", "3v" ); Tag *v2 = new Tag( t, "vie" ); v->addAttribute( "v32", "3v2" ); Tag *w = new Tag( u, "who" ); w->addAttribute( "w3", "3w" ); Tag *x = new Tag( v, "xep" ); x->addAttribute( "x3", "3x" ); Tag *y = new Tag( u, "yps" ); y->addAttribute( "y3", "3y" ); Tag *z = new Tag( w, "zoo" ); z->addAttribute( "z3", "3z" ); Tag *c = 0; Tag *d = 0; // ------- name = "simple ctor"; if( t->name() != "toe" ) { ++fail; fprintf( stderr, "test '%s' failed\n", name.c_str() ); } // ------- name = "cdata ctor"; c = new Tag( "cod", "foobar" ); if( c->name() != "cod" || c->cdata() != "foobar" ) { ++fail; fprintf( stderr, "test '%s' failed\n", name.c_str() ); } delete c; c = 0; //------- name = "clone test 1"; c = z->clone(); if( *z != *c ) { ++fail; fprintf( stderr, "test '%s' failed\n", name.c_str() ); } delete c; c = 0; //------- name = "clone test 2"; c = t->clone(); if( *t != *c ) { ++fail; fprintf( stderr, "test '%s' failed\n", name.c_str() ); } delete c; c = 0; //------- name = "operator== test 1"; c = new Tag( "name" ); if( *t == *c ) { ++fail; fprintf( stderr, "test '%s' failed\n", name.c_str() ); } delete c; c = 0; //------- name = "operator== test 2"; c = new Tag( "test" ); c->addAttribute( "me", "help" ); c->addChild( new Tag( "yes" ) ); if( *t == *c ) { ++fail; fprintf( stderr, "test '%s' failed\n", name.c_str() ); } delete c; c = 0; //------- name = "operator== test 3"; c = new Tag( "hello" ); c->addAttribute( "test", "bacd" ); c->addChild( new Tag( "hello" ) ); d = new Tag( "hello" ); d->addAttribute( "test", "bacd" ); d->addChild( new Tag( "helloo" ) ); if( *d == *c ) { ++fail; fprintf( stderr, "test '%s' failed\n", name.c_str() ); } delete c; delete d; c = 0; d = 0; //------- name = "operator!= test 1"; c = new Tag( "hello" ); c->addAttribute( "test", "bacd" ); c->addChild( new Tag( "hello" ) ); d = new Tag( "hello" ); d->addAttribute( "test", "bacd" ); d->addChild( new Tag( "hello" ) ); if( *d != *c ) { ++fail; fprintf( stderr, "test '%s' failed\n", name.c_str() ); } delete c; delete d; c = 0; d = 0; //------- name = "findChildren test"; TagList l = t->findChildren( "vie" ); TagList::const_iterator it = l.begin(); if( l.size() != 2 || (*it) != v || *(++it) != v2 ) { ++fail; fprintf( stderr, "test '%s' failed\n", name.c_str() ); } //------- name = "util::escape"; if ( util::escape( "&<>'\"" ) != "&amp;&lt;&gt;&apos;&quot;" ) { ++fail; fprintf( stderr, "test '%s' failed\n", name.c_str() ); } //------- name = "xml() 1"; if( t->xml() != "<toe foo='bar'><uni u3='3u'><who w3='3w'><zoo z3='3z'/></who><yps y3='3y'/>" "</uni><vie v3='3v' v32='3v2'><xep x3='3x'/></vie><vie/></toe>" ) { ++fail; fprintf( stderr, "test '%s' failed: %s\n", name.c_str(), t->xml().c_str() ); } //------- name = "xml() 2"; t->addAttribute( "test", "bacd" ); if( t->xml() != "<toe foo='bar' test='bacd'><uni u3='3u'><who w3='3w'><zoo z3='3z'/></who><yps y3='3y'/>" "</uni><vie v3='3v' v32='3v2'><xep x3='3x'/></vie><vie/></toe>" ) { ++fail; fprintf( stderr, "test '%s' failed: %s\n", name.c_str(), t->xml().c_str() ); } //------- name = "hasChild 1"; if( !t->hasChild( "uni" ) || !t->hasChild( "vie" ) || !u->hasChild( "who" ) || !w->hasChild( "zoo" ) || !u->hasChild( "yps" ) ) { ++fail; fprintf( stderr, "test '%s' failed: %s\n", name.c_str(), t->xml().c_str() ); } //------- name = "hasAttribute 1"; if( !t->hasAttribute( "test" ) || !t->hasAttribute( "test", "bacd" ) || !t->hasAttribute( "foo" ) || !t->hasAttribute( "foo", "bar" ) ) { ++fail; fprintf( stderr, "test '%s' failed: %s\n", name.c_str(), t->xml().c_str() ); } //------- name = "findAttribute 1"; if( t->findAttribute( "test" ) != "bacd" || t->findAttribute( "foo" ) != "bar" ) { ++fail; fprintf( stderr, "test '%s' failed: %s\n", name.c_str(), t->xml().c_str() ); } //------- name = "findChild 1"; c = t->findChild( "uni" ); if( c != u ) { ++fail; fprintf( stderr, "test '%s' failed: %s\n", name.c_str(), t->xml().c_str() ); } //------- name = "findChild 2"; c = t->findChild( "uni", "u3" ); if( c != u ) { ++fail; fprintf( stderr, "test '%s' failed: %s\n", name.c_str(), t->xml().c_str() ); } //------- name = "findChild 3"; c = t->findChild( "uni", "u3", "3u" ); if( c != u ) { ++fail; fprintf( stderr, "test '%s' failed: %s\n", name.c_str(), t->xml().c_str() ); } //------- name = "findChildWithAttrib 1"; c = t->findChildWithAttrib( "u3" ); if( c != u ) { ++fail; fprintf( stderr, "test '%s' failed: %s\n", name.c_str(), t->xml().c_str() ); } //------- name = "findChildWithAttrib 2"; c = t->findChildWithAttrib( "u3", "3u" ); if( c != u ) { ++fail; fprintf( stderr, "test '%s' failed: %s\n", name.c_str(), t->xml().c_str() ); } //------- name = "attribute order"; c = new Tag( "abc" ); c->addAttribute( "abc", "def" ); c->addAttribute( "xyz", "123" ); d = c->clone(); if( *c != *d ) { ++fail; fprintf( stderr, "test '%s' failed: %s\n", name.c_str(), d->xml().c_str() ); } delete c; c = 0; delete d; d = 0; //------- name = "mixed content 1"; c = new Tag( "abc" ); c->addCData( "cdata1" ); new Tag( c, "fgh" ); c->addCData( "cdata2" ); new Tag( c, "xyz" ); c->addCData( "cdata3" ); if( c->xml() != "<abc>cdata1<fgh/>cdata2<xyz/>cdata3</abc>" ) { ++fail; fprintf( stderr, "test '%s' failed: %s\n", name.c_str(), c->xml().c_str() ); } delete c; c = 0; //------- name = "operator bool()"; Tag tag1( "" ); if( tag1 ) { ++fail; fprintf( stderr, "test '%s' failed: %s\n", name.c_str(), tag1.xml().c_str() ); } //------- name = "bool operator!()"; Tag tag2( "abc" ); if( !tag2 ) { ++fail; fprintf( stderr, "test '%s' failed: %s\n", name.c_str(), d->xml().c_str() ); } //------- { name = "simple xmlns"; Tag t( "abc" ); t.setXmlns( "foo" ); if( t.xml() != "<abc xmlns='foo'/>" ) {<|fim▁hole|> ++fail; fprintf( stderr, "test '%s' failed: %s\n", name.c_str(), t.xml().c_str() ); } } //------- { name = "deep xmlns"; Tag t( "abc" ); Tag* f = new Tag( &t, "def" ); f = new Tag( f, "ghi" ); t.setXmlns( "foo" ); if( t.xml() != "<abc xmlns='foo'><def><ghi/></def></abc>" ) { ++fail; fprintf( stderr, "test '%s' failed: %s\n", name.c_str(), t.xml().c_str() ); } } //------- { name = "simple nested xmlns 2"; Tag t( "abc" ); t.setXmlns( "foo" ); Tag* d = new Tag( &t, "def" ); d->setXmlns( "foobar", "xyz" ); d->setPrefix( "xyz" ); if( t.xml() != "<abc xmlns='foo'><xyz:def xmlns:xyz='foobar'/></abc>" ) { ++fail; fprintf( stderr, "test '%s' failed: %s\n", name.c_str(), t.xml().c_str() ); } } //------- { name = "attribute with xmlns"; Tag t( "abc" ); t.setXmlns( "foo", "xyz" ); Tag::Attribute* a = new Tag::Attribute( "foo", "bar", "foo" ); a->setPrefix( "xyz" ); t.addAttribute( a ); if( t.xml() != "<abc xmlns:xyz='foo' xyz:foo='bar'/>" ) { ++fail; fprintf( stderr, "test '%s' failed: %s\n", name.c_str(), t.xml().c_str() ); } } //------- { name = "escape attribute value"; Tag t( "foo", "abc", "&amp;" ); if( t.xml() != "<foo abc='&amp;amp;'/>" ) { ++fail; fprintf( stderr, "test '%s' failed: %s\n", name.c_str(), t.xml().c_str() ); } } //------- { name = "remove child 1"; Tag t( "foo" ); t.addChild( new Tag( "test", "xmlns", "foo" ) ); t.addChild( new Tag( "abc", "xmlns", "foobar" ) ); t.addAttribute( "attr1", "value1" ); t.addAttribute( "attr2", "value2" ); t.removeChild( "test" ); if( t.hasChild( "test" ) ) { ++fail; fprintf( stderr, "test '%s' failed: %s\n", name.c_str(), t.xml().c_str() ); } name = "remove child 2"; t.removeChild( "abc", "foobar" ); if( t.hasChild( "abc", "xmlns", "foobar" ) ) { ++fail; fprintf( stderr, "test '%s' failed: %s\n", name.c_str(), t.xml().c_str() ); } name = "remove attrib 1"; t.removeAttribute( "attr1" ); if( t.hasAttribute( "attr1", "value1") ) { ++fail; fprintf( stderr, "test '%s' failed: %s\n", name.c_str(), t.xml().c_str() ); } name = "remove attrib 2"; t.removeAttribute( "attr2", "value2" ); if( t.hasAttribute( "attr2", "value2") ) { ++fail; fprintf( stderr, "test '%s' failed: %s\n", name.c_str(), t.xml().c_str() ); } } //------- { name = "invalid chars 1"; Tag t( "foo" ); bool check = t.addAttribute( "nul", std::string( 1, 0x00 ) ); if( check || t.hasAttribute( "nul" ) ) { ++fail; fprintf( stderr, "test '%s' failed:%s\n", name.c_str(), t.xml().c_str() ); } } //------- { name = "invalid chars 2"; for( int i = 0; i <= 0xff; ++i ) { Tag::Attribute a( "test", std::string( 1, i ) ); if( ( i < 0x09 || i == 0x0b || i == 0x0c || ( i > 0x0d && i < 0x20 ) || i == 0xc0 || i == 0xc1 || i >= 0xf5 ) && a ) { ++fail; fprintf( stderr, "test '%s' (branch 1) failed (i == %02X)\n", name.c_str(), i ); } else if( ( i == 0x09 || i == 0x0a || i == 0x0d || ( i >= 0x20 && i < 0xc0 ) || ( i > 0xc1 && i < 0xf5 ) ) && !a ) { ++fail; fprintf( stderr, "test '%s' (branch 2) failed (i == %02X)\n", name.c_str(), i ); } // printf( "i: 0x%02X, a: %d, value: %s\n", i, (bool)a, std::string( 1, i ).c_str() ); } } delete t; t = 0; if( fail == 0 ) { printf( "Tag: OK\n" ); return 0; } else { fprintf( stderr, "Tag: %d test(s) failed\n", fail ); return 1; } }<|fim▁end|>
<|file_name|>Fpage.java<|end_file_name|><|fim▁begin|>// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2011.09.09 at 01:22:27 PM CEST // package test;<|fim▁hole|>import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.XmlValue; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;attribute name="content-type" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" /> * &lt;attribute name="seq" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "content" }) @XmlRootElement(name = "fpage") public class Fpage { @XmlValue protected String content; @XmlAttribute(name = "content-type") @XmlSchemaType(name = "anySimpleType") protected String contentType; @XmlAttribute @XmlSchemaType(name = "anySimpleType") protected String seq; /** * Gets the value of the content property. * * @return * possible object is * {@link String } * */ public String getContent() { return content; } /** * Sets the value of the content property. * * @param value * allowed object is * {@link String } * */ public void setContent(String value) { this.content = value; } /** * Gets the value of the contentType property. * * @return * possible object is * {@link String } * */ public String getContentType() { return contentType; } /** * Sets the value of the contentType property. * * @param value * allowed object is * {@link String } * */ public void setContentType(String value) { this.contentType = value; } /** * Gets the value of the seq property. * * @return * possible object is * {@link String } * */ public String getSeq() { return seq; } /** * Sets the value of the seq property. * * @param value * allowed object is * {@link String } * */ public void setSeq(String value) { this.seq = value; } }<|fim▁end|>
import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType;
<|file_name|>query.service.ts<|end_file_name|><|fim▁begin|>import { Injectable } from '@angular/core'; import {Query} from './model/query'; import {QueryCategory} from './model/query-category'; import {QueryPart} from './model/query-part'; @Injectable() export class QueryService { // String to separate category-names an the values private categoryValueSeparator = ': '; /** * Creates a query-object from a query-string. The string can have the following syntax: * <CategoryName1>: <Value1> <CategoryName2>: <Value2> * * If the query-string starts with a string that is not in the list of categories, the query-object will have a part * with a null-category and the string as value. * * @param categories * @param queryString * @returns */ public getQueryFromString(categories: Array<QueryCategory>, queryString: string): Query { const queryParts: Array<QueryPart> = []; let remainingQueryString: string = queryString; while (true) { let lastPart: QueryPart; [lastPart, remainingQueryString] = this.popLastQueryPartFromString(categories, remainingQueryString); if (lastPart === null) { if (remainingQueryString.length > 0) { queryParts.unshift(new QueryPart(null, remainingQueryString)); } break; } queryParts.unshift(lastPart); } return new Query(queryParts); } /** * Extracts the last query-part and returns it and the shortened query-string * * @param categories * @param queryString * @returns */ private popLastQueryPartFromString(categories: Array<QueryCategory>, queryString: string): [QueryPart, string] { const lastPartRegexString = '([^\\s"\']*|("([^"]*)")|(\'([^\']*)\'))$'; // Try to match categories or the default category for (const category of categories.concat([null])) { const categoryPart = category ? category.name + this.categoryValueSeparator.trim() + '\\s*' : ''; const regexStr = categoryPart + lastPartRegexString; const regex = new RegExp(regexStr); const match = queryString.trim().match(regex); if (match && match[0].length > 0) { // Pick the correct match to not have quotes in result string const value = match[5] || match[3] || match[1] || ''; const queryPart = new QueryPart(category, value); const remainingQueryString = queryString.trim().replace(regex, '').trim(); return [queryPart, remainingQueryString]; } } return [null, queryString.trim()]; } /** * Appends the provided query-part to the query-string and returns the combined query-string. * * @param categories * @param queryString * @param appendPart */<|fim▁hole|> let newQuery; // If the current query has no last part it can be fully replaced if (!lastPart) { newQuery = ''; // If the category of the last part matches to one to be appended, it means that only the value should be updated } else if (lastPart.category === appendPart.category) { newQuery = remainingQueryString; // The category is different, so a new one will be added } else { newQuery = queryString; if (appendPart.category) { // Remove the beginning of the category-name if it was typed const categoryName = appendPart.category.name; for (let i = categoryName.length; i > 0 ; i--) { if (newQuery.toLowerCase().endsWith(categoryName.toLowerCase().substr(0, i))) { newQuery = newQuery.slice(0, -i); } } } } // Trim the query an add a whitespace only if the query is not empty newQuery = newQuery.trim(); newQuery += newQuery.length > 0 ? ' ' : ''; const value = appendPart.value.indexOf(' ') === -1 ? appendPart.value : '"' + appendPart.value + '"'; // Now that the current query is cleaned up, the actual append can start newQuery += (appendPart.category ? (appendPart.category.name + this.categoryValueSeparator) : '') + value; return newQuery; } }<|fim▁end|>
public appendQueryPartToQueryString(categories: Array<QueryCategory>, queryString: string, appendPart: QueryPart) { let lastPart: QueryPart, remainingQueryString: string; [lastPart, remainingQueryString] = this.popLastQueryPartFromString(categories, queryString);
<|file_name|>ClickAndSeek.js<|end_file_name|><|fim▁begin|>// ga.addEventBehavior(ga.gameEvents.MouseDown, undefined, undefined, undefined, function (e) { // var spriteClick = ga.CheckEventPosition(e.offsetX, e.offsetY); // if (spriteClick != undefined) { // if (this.lastClick != undefined) { // this.lastClick.unHighLight(); // } // this.lastClick = spriteClick; // spriteClick.highLight(0, 0, 0, 0, 255, 0, 0, 0); // var parentObj = this; // setTimeout(function () { // var seeker = new Seeker("walking", "attacking", 100); // ga.addEventBehavior(ga.gameEvents.MouseDown, "", spriteClick, "walking", function (e, sprite, engine) { // if (sprite == parentObj.lastClick) { <|fim▁hole|>// }, -1); // seeker.setFoundCallback(function (sprite) { // sprite.unHighLight(); // ga.removeEventBehavior(ga.gameEvents.MouseDown, sprite); // }); // }, 100); // } // }, 1);<|fim▁end|>
// seeker.execute(e, sprite, engine); // }
<|file_name|>hitter.py<|end_file_name|><|fim▁begin|>from hashlib import sha256 from .etl import ETL from kombu.mixins import ConsumerMixin from kombu import Connection import traceback import Queue import json import time import pytz from datetime import datetime from tzlocal import get_localzone import socket import logging import os class KnownHosts(object): HOST_FILE = "/etc/hosts" def __init__(self, filename=HOST_FILE): self.filename = filename try: os.stat(self.filename) except: raise self.mapping = self.read_hosts_file(filename) @classmethod def read_hosts_file(cls, filename): mapping = {} for line in open(filename).readlines(): if line.strip() == '': continue elif line.strip().find('#') == 0: continue elif len(line.split()) < 2: continue l = line.strip() ip = l.split()[0] host_names = l.split()[1:] if len(host_names) == 0: continue # FIXME this means the expected mapping[ip] = host<|fim▁hole|> ip_host_mappings = [(ip, h) for h in host_names] for ip, host in ip_host_mappings: mapping[host.strip()] = ip.strip() mapping[ip.strip()] = host.strip() return mapping def is_ip(self, ip): # FIXME track down a regex and use that d = ip.split('.') if len(d) != 3: return False if not all([i.isdigit() for i in d]): return False if not all([int(i, 10) >= 0 for i in d]): return False if not all([int(i, 10) <= 255 for i in d]): return False return True def resolve_host(self, ip_host): if ip_host in self.mapping and \ not self.is_ip(ip_host): return self.mapping[ip_host] name = ip_host try: name, _, _ = socket.gethostbyname(ip_host) self.mapping[ip_host] = name self.mapping[name] = ip_host except: name = ip_host self.mapping[ip_host] = name return name class HitterService(ConsumerMixin): NAME = 'processor' BROKER_URI = "redis://127.0.0.1:6379" BROKER_QUEUE = "mystified-catcher" KNOWN_HOSTS = KnownHosts() LOGSTASH_QUEUE = "logstash-results" SYSLOG_MSG_TYPE = { 0: "EMERGENCY", 1: "ALERT", 2: "CRITICAL", 3: "ERROR", 4: "WARNING", 5: "NOTICE", 6: "INFORMATIONAL", 7: "DEBUG", } MY_TZ = os.environ.get('CATCHER_TZ', 'NOT_SET') TZ_INFO = pytz.timezone(MY_TZ) if MY_TZ != 'NOT_SET' else None def __init__(self, broker_uri=BROKER_URI, broker_queue=BROKER_QUEUE, hosts_file=None, mongo_backend=None, etl_backend=ETL, msg_limit=100, # leaving it open to use kombu to buffer messages store_uri=BROKER_URI, store_queue=LOGSTASH_QUEUE): if hosts_file is not None: self.KNOWN_HOSTS = KnownHosts(filename=hosts_file) self.broker_uri = broker_uri self.broker_queue = broker_queue self.store_uri = store_uri self.store_queue = store_queue self.mongo_backend = mongo_backend self.etl_backend = etl_backend self.keep_running = False self.msg_limit = msg_limit @classmethod def split_alert_message(cls, data): t = '' msg = data end = data.find('>') start = data.find('<') if len(data) < end+1: return '', msg if start == 0 and end > 0 and end < 10: t = data[start+1:end] if not t.isdigit(): return '', data else: msg = data[end+1:] return t, msg @classmethod def calculate_msg_type(cls, data): t, msg = cls.split_alert_message(data) if len(t) == 0: return "UNKNOWN" v = int(t, 10) if v > 7: v &= 0x7 return cls.SYSLOG_MSG_TYPE[v] @classmethod def format_timestamp(self, tstamp): if self.TZ_INFO is not None: local_tz = self.TZ_INFO.localize(tstamp, is_dst=None) utc_tz = local_tz.astimezone(pytz.utc) return str(utc_tz.strftime("%Y-%m-%dT%H:%M:%S") +\ ".%03d" % (tstamp.microsecond / 1000) + "Z") return str(tstamp.strftime("%Y-%m-%dT%H:%M:%S") +\ ".%03d" % (tstamp.microsecond / 1000)) @classmethod def get_base_json(cls, syslog_msg, syslog_server_ip, catcher_name, catcher_host, catcher_tz): r = {'source': "syslog", 'raw': syslog_msg, 'type': 'json', '_id': sha256(syslog_msg).hexdigest(), '@timestamp': cls.format_timestamp(datetime.now()), '@version': "1", 'message': "transformed syslog", 'path': '', 'tags': [], 'catcher_tz': catcher_tz, 'catcher_host': catcher_host, 'catcher_name': catcher_name } t, msg = cls.split_alert_message(syslog_msg) r['syslog_level'] = cls.calculate_msg_type(syslog_msg) r['syslog_msg'] = msg r['syslog_tag'] = t r['syslog_server'] = cls.resolve_host(syslog_server_ip) r['syslog_server_ip'] = syslog_server_ip r['syslog_catcher'] = catcher_name return r @classmethod def resolve_host(cls, ip_host): return cls.KNOWN_HOSTS.resolve_host(ip_host) def process_message(self, syslog_msg, syslog_server_ip, catcher_name, catcher_host, catcher_tz): m = "Extracting and converting msg from %s msg (syslog: %s)" % (syslog_server_ip, catcher_name) logging.debug(m) r = self.get_base_json(syslog_msg, syslog_server_ip, catcher_name, catcher_host, catcher_tz) sm = {} try: result = self.etl_backend.syslog_et(syslog_msg) sm.update(result.get('rule_results', result)) if 'rule_name' in result: sm['rule_name'] = result.get('rule_name') sm['tags'] = [] if sm.get('syslog_level', None) is not None: sm['tags'].append(sm['syslog_level']) if sm.get('rule_name', None) is not None: sm['tags'].append(sm['rule_name']) except: tb = traceback.format_exc() logging.debug("[XXX] Error: "+tb) r.update(sm) return r def extract_message_components(self, msg_dict): syslog_msg = msg_dict.get('syslog_msg', '') syslog_server_ip = msg_dict.get('syslog_server_ip', '') catcher_host = msg_dict.get('catcher_host', '') catcher_name = msg_dict.get('catcher_name', '') catcher_tz = msg_dict.get('catcher_tz', str(get_localzone())) return self.process_message(syslog_msg, syslog_server_ip, catcher_name, catcher_host, catcher_tz) def process_and_report(self, incoming_msg): logging.debug("Processing and report syslog_msg") message = incoming_msg if isinstance(incoming_msg, str): try: message = json.loads(incoming_msg) except: message = {} tb = traceback.format_exc() logging.debug("[XXX] Error: "+tb) raise etl_data = self.extract_message_components(message) syslog_msg = etl_data['raw'] self.store_results(syslog_msg, etl_data) return etl_data def _read_messages(self, uri, queue, callback=None, cnt=1): msgs = [] read_all = False if cnt < 1: read_all = True try: logging.debug("Reading the messages") with Connection(uri) as conn: q = conn.SimpleQueue(queue) while cnt > 0 or read_all: cnt += -1 try: message = q.get(block=False) if callback is not None: data = callback(message.payload) msgs.append(data) logging.debug("made it here 2") logging.debug(data) message.ack() except Queue.Empty: logging.debug("%s queue is empty" % queue) break except: tb = traceback.format_exc() logging.debug("[XXX] Error: "+tb) logging.debug("Successfully read %d messages" % len(msgs)) except: tb = traceback.format_exc() logging.debug("[XXX] Error: "+tb) logging.debug("Failed to read message") return msgs def store_mongo(self, syslog_msg, etl_data): if self.mongo_backend is not None: m = "Sending results to mongo" logging.debug(m) raw_insert, json_insert = self.mongo_backend.insert( syslog_msg, etl_data) if not raw_insert: logging.debug("Failed to insert the raw syslog information in mongo") if not json_insert: logging.debug("Failed to insert the processed syslog information in mongo") def store_kombu(self, etl_data): logging.debug("Storing message in logstash queue") try: with Connection(self.store_uri) as conn: q = conn.SimpleQueue(self.store_queue) q.put(etl_data) q.close() logging.debug("Storing message in logstash success") except: tb = traceback.format_exc() logging.debug("[XXX] Error: "+tb) logging.debug("Storing message in logstash queue failed") def store_results(self, syslog_msg, etl_data): self.store_mongo(syslog_msg, etl_data) self.store_kombu(etl_data) def read_messages(self): msgs = self._read_messages(self.broker_uri, self.broker_queue, cnt=self.msg_limit, callback=self.process_and_report) return msgs def serve_forever(self, poll_interval=1.0): self.keep_running = True while self.keep_running: try: self.read_messages() time.sleep(poll_interval) except KeyboardInterrupt: break<|fim▁end|>
# may not be right
<|file_name|>length.js<|end_file_name|><|fim▁begin|>// Copyright (C) 2015 the V8 project authors. All rights reserved. // This code is governed by the BSD license found in the LICENSE file.<|fim▁hole|>info: > The length property of the @@split method is 2. ES6 Section 17: [...] Unless otherwise specified, the length property of a built-in Function object has the attributes { [[Writable]]: false, [[Enumerable]]: false, [[Configurable]]: true }. includes: [propertyHelper.js] ---*/ assert.sameValue(RegExp.prototype[Symbol.split].length, 2); verifyNotEnumerable(RegExp.prototype[Symbol.split], 'length'); verifyNotWritable(RegExp.prototype[Symbol.split], 'length'); verifyConfigurable(RegExp.prototype[Symbol.split], 'length');<|fim▁end|>
/*--- es6id: 21.2.5.11 description: RegExp.prototype[Symbol.split] `length` property
<|file_name|>node-debug.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1 oid sha256:49c54ee863855e8fa7d43bdb5142596122609269a2e98c9a92e10dffcda1376d<|fim▁hole|><|fim▁end|>
size 65177
<|file_name|>image.py<|end_file_name|><|fim▁begin|>import logging import six import warnings from ..auth import auth from ..constants import INSECURE_REGISTRY_DEPRECATION_WARNING from .. import utils from .. import errors log = logging.getLogger(__name__) class ImageApiMixin(object): @utils.check_resource def get_image(self, image): res = self._get(self._url("/images/{0}/get", image), stream=True) self._raise_for_status(res) return res.raw @utils.check_resource def history(self, image): res = self._get(self._url("/images/{0}/history", image)) return self._result(res, True) def images(self, name=None, quiet=False, all=False, viz=False, filters=None): if viz: if utils.compare_version('1.7', self._version) >= 0: raise Exception('Viz output is not supported in API >= 1.7!') return self._result(self._get(self._url("images/viz"))) params = { 'filter': name, 'only_ids': 1 if quiet else 0, 'all': 1 if all else 0, } if filters: params['filters'] = utils.convert_filters(filters) res = self._result(self._get(self._url("/images/json"), params=params), True) if quiet: return [x['Id'] for x in res] return res def import_image(self, src=None, repository=None, tag=None, image=None): if src: if isinstance(src, six.string_types): try: result = self.import_image_from_file( src, repository=repository, tag=tag) except IOError: result = self.import_image_from_url( src, repository=repository, tag=tag) else: result = self.import_image_from_data( src, repository=repository, tag=tag) elif image: result = self.import_image_from_image(<|fim▁hole|> raise Exception("Must specify a src or image") return result def import_image_from_data(self, data, repository=None, tag=None): u = self._url("/images/create") params = { 'fromSrc': '-', 'repo': repository, 'tag': tag } headers = { 'Content-Type': 'application/tar', } return self._result( self._post(u, data=data, params=params, headers=headers)) def import_image_from_file(self, filename, repository=None, tag=None): u = self._url("/images/create") params = { 'fromSrc': '-', 'repo': repository, 'tag': tag } headers = { 'Content-Type': 'application/tar', } with open(filename, 'rb') as f: return self._result( self._post(u, data=f, params=params, headers=headers, timeout=None)) def import_image_from_stream(self, stream, repository=None, tag=None): u = self._url("/images/create") params = { 'fromSrc': '-', 'repo': repository, 'tag': tag } headers = { 'Content-Type': 'application/tar', 'Transfer-Encoding': 'chunked', } return self._result( self._post(u, data=stream, params=params, headers=headers)) def import_image_from_url(self, url, repository=None, tag=None): u = self._url("/images/create") params = { 'fromSrc': url, 'repo': repository, 'tag': tag } return self._result( self._post(u, data=None, params=params)) def import_image_from_image(self, image, repository=None, tag=None): u = self._url("/images/create") params = { 'fromImage': image, 'repo': repository, 'tag': tag } return self._result( self._post(u, data=None, params=params)) @utils.check_resource def insert(self, image, url, path): if utils.compare_version('1.12', self._version) >= 0: raise errors.DeprecatedMethod( 'insert is not available for API version >=1.12' ) api_url = self._url("/images/{0}/insert", image) params = { 'url': url, 'path': path } return self._result(self._post(api_url, params=params)) @utils.check_resource def inspect_image(self, image): return self._result( self._get(self._url("/images/{0}/json", image)), True ) def load_image(self, data): res = self._post(self._url("/images/load"), data=data) self._raise_for_status(res) def pull(self, repository, tag=None, stream=False, insecure_registry=False, auth_config=None): if insecure_registry: warnings.warn( INSECURE_REGISTRY_DEPRECATION_WARNING.format('pull()'), DeprecationWarning ) if not tag: repository, tag = utils.parse_repository_tag(repository) registry, repo_name = auth.resolve_repository_name(repository) params = { 'tag': tag, 'fromImage': repository } headers = {} if utils.compare_version('1.5', self._version) >= 0: # If we don't have any auth data so far, try reloading the config # file one more time in case anything showed up in there. if auth_config is None: log.debug('Looking for auth config') if not self._auth_configs: log.debug( "No auth config in memory - loading from filesystem" ) self._auth_configs = auth.load_config() authcfg = auth.resolve_authconfig(self._auth_configs, registry) # Do not fail here if no authentication exists for this # specific registry as we can have a readonly pull. Just # put the header if we can. if authcfg: log.debug('Found auth config') # auth_config needs to be a dict in the format used by # auth.py username , password, serveraddress, email headers['X-Registry-Auth'] = auth.encode_header( authcfg ) else: log.debug('No auth config found') else: log.debug('Sending supplied auth config') headers['X-Registry-Auth'] = auth.encode_header(auth_config) response = self._post( self._url('/images/create'), params=params, headers=headers, stream=stream, timeout=None ) self._raise_for_status(response) if stream: return self._stream_helper(response) return self._result(response) def push(self, repository, tag=None, stream=False, insecure_registry=False): if insecure_registry: warnings.warn( INSECURE_REGISTRY_DEPRECATION_WARNING.format('push()'), DeprecationWarning ) if not tag: repository, tag = utils.parse_repository_tag(repository) registry, repo_name = auth.resolve_repository_name(repository) u = self._url("/images/{0}/push", repository) params = { 'tag': tag } headers = {} if utils.compare_version('1.5', self._version) >= 0: # If we don't have any auth data so far, try reloading the config # file one more time in case anything showed up in there. if not self._auth_configs: self._auth_configs = auth.load_config() authcfg = auth.resolve_authconfig(self._auth_configs, registry) # Do not fail here if no authentication exists for this specific # registry as we can have a readonly pull. Just put the header if # we can. if authcfg: headers['X-Registry-Auth'] = auth.encode_header(authcfg) response = self._post_json( u, None, headers=headers, stream=stream, params=params ) self._raise_for_status(response) if stream: return self._stream_helper(response) return self._result(response) @utils.check_resource def remove_image(self, image, force=False, noprune=False): params = {'force': force, 'noprune': noprune} res = self._delete(self._url("/images/{0}", image), params=params) self._raise_for_status(res) def search(self, term): return self._result( self._get(self._url("/images/search"), params={'term': term}), True ) @utils.check_resource def tag(self, image, repository, tag=None, force=False): params = { 'tag': tag, 'repo': repository, 'force': 1 if force else 0 } url = self._url("/images/{0}/tag", image) res = self._post(url, params=params) self._raise_for_status(res) return res.status_code == 201<|fim▁end|>
image, repository=repository, tag=tag) else:
<|file_name|>position.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! CSS handling for the specified value of //! [`position`][position]s //! //! [position]: https://drafts.csswg.org/css-backgrounds-3/#position use app_units::Au; use cssparser::Parser; use parser::{Parse, ParserContext}; use properties::longhands::parse_origin; use std::mem; use values::Either; use values::computed::{CalcLengthOrPercentage, Context}; use values::computed::{LengthOrPercentage as ComputedLengthOrPercentage, ToComputedValue}; use values::computed::position as computed_position; use values::generics::position::{Position as GenericPosition, PositionValue, PositionWithKeyword}; use values::generics::position::HorizontalPosition as GenericHorizontalPosition; use values::generics::position::VerticalPosition as GenericVerticalPosition; use values::specified::{AllowQuirks, LengthOrPercentage, Percentage}; pub use values::generics::position::Keyword; /// The specified value of a CSS `<position>` pub type Position = PositionWithKeyword<PositionValue<LengthOrPercentage>>; /// The specified value for `<position>` values without a keyword. pub type OriginPosition = GenericPosition<LengthOrPercentage, LengthOrPercentage>; impl Parse for OriginPosition { fn parse(context: &ParserContext, input: &mut Parser) -> Result<Self, ()> { let result = parse_origin(context, input)?; match result.depth { Some(_) => Err(()), None => Ok(GenericPosition { horizontal: result.horizontal.unwrap_or(LengthOrPercentage::Percentage(Percentage(0.5))), vertical: result.vertical.unwrap_or(LengthOrPercentage::Percentage(Percentage(0.5))), }) } } } type PositionComponent = Either<LengthOrPercentage, Keyword>; impl Position { /// Create a new position value from either a length or a keyword. pub fn from_components(mut first_position: Option<PositionComponent>, mut second_position: Option<PositionComponent>, first_keyword: Option<PositionComponent>, second_keyword: Option<PositionComponent>) -> Result<Position, ()> { // Unwrap for checking if values are at right place. let first_key = first_keyword.clone().unwrap_or(Either::Second(Keyword::Left)); let second_key = second_keyword.clone().unwrap_or(Either::Second(Keyword::Top)); let (horiz_keyword, vert_keyword) = match (&first_key, &second_key) { // Check if a position is specified after center keyword. (&Either::Second(Keyword::Center), _) if first_position.is_some() => return Err(()), (_, &Either::Second(Keyword::Center)) if second_position.is_some() => return Err(()), // Check first and second keywords for both 2 and 4 value positions. // FIXME(canaltinova): Allow logical keywords for Position. They are not in current spec yet. (&Either::Second(k), _) if k.is_logical() => return Err(()), (_, &Either::Second(k)) if k.is_logical() => return Err(()), // Don't allow two vertical keywords or two horizontal keywords. (&Either::Second(k1), &Either::Second(k2)) if (k1.is_horizontal() && k2.is_horizontal()) || (k1.is_vertical() && k2.is_vertical()) => return Err(()), // Also don't allow <length-percentage> values in the wrong position (&Either::First(_), &Either::Second(k)) if k.is_horizontal() => return Err(()), (&Either::Second(k), &Either::First(_)) if k.is_vertical() => return Err(()), // Swap if both are keywords and vertical precedes horizontal. (&Either::Second(k1), &Either::Second(k2)) if (k1.is_vertical() && k2.is_horizontal()) || (k1.is_vertical() && k2 == Keyword::Center) || (k1 == Keyword::Center && k2.is_horizontal()) => { mem::swap(&mut first_position, &mut second_position); (second_keyword, first_keyword) }, // By default, horizontal is first. _ => (first_keyword, second_keyword), }; let (mut h_pos, mut h_key, mut v_pos, mut v_key) = (None, None, None, None); if let Some(Either::First(l)) = first_position { h_pos = Some(l); } if let Some(Either::First(l)) = second_position { v_pos = Some(l); } if let Some(Either::Second(k)) = horiz_keyword { h_key = Some(k); } if let Some(Either::Second(k)) = vert_keyword { v_key = Some(k); } Ok(Position { horizontal: GenericHorizontalPosition(PositionValue { keyword: h_key, position: h_pos, }), vertical: GenericVerticalPosition(PositionValue { keyword: v_key, position: v_pos, }), }) } /// Returns a "centered" position, as in "center center". pub fn center() -> Position { Position { horizontal: GenericHorizontalPosition(PositionValue { keyword: Some(Keyword::Center), position: None, }), vertical: GenericVerticalPosition(PositionValue { keyword: Some(Keyword::Center), position: None, }), } } } impl Parse for Position { fn parse(context: &ParserContext, input: &mut Parser) -> Result<Self, ()> { Position::parse_quirky(context, input, AllowQuirks::No) } } impl Position { /// Parses, with quirks. pub fn parse_quirky(context: &ParserContext, input: &mut Parser, allow_quirks: AllowQuirks) -> Result<Self, ()> { let first = input.try(|i| PositionComponent::parse_quirky(context, i, allow_quirks))?; let second = input.try(|i| PositionComponent::parse_quirky(context, i, allow_quirks)) .unwrap_or(Either::Second(Keyword::Center)); if let Ok(third) = input.try(|i| PositionComponent::parse_quirky(context, i, allow_quirks)) { // There's a 3rd value. if let Ok(fourth) = input.try(|i| PositionComponent::parse_quirky(context, i, allow_quirks)) { // There's a 4th value. Position::from_components(Some(second), Some(fourth), Some(first), Some(third)) } else { // For 3 value background position, there are several options. if let Either::First(_) = first { return Err(()) // <length-percentage> must be preceded by <keyword> } // only 3 values. match (&second, &third) { (&Either::First(_), &Either::First(_)) => Err(()), // "keyword length keyword" (&Either::First(_), _) => Position::from_components(Some(second), None, Some(first), Some(third)), // "keyword keyword length" _ => Position::from_components(None, Some(third), Some(first), Some(second)), } } } else { // only 2 values. match (&first, &second) { (&Either::First(_), &Either::First(_)) => Position::from_components(Some(first), Some(second), None, None), (&Either::First(_), &Either::Second(_)) => Position::from_components(Some(first), None, None, Some(second)),<|fim▁hole|> (&Either::Second(_), &Either::First(_)) => Position::from_components(None, Some(second), Some(first), None), (&Either::Second(_), &Either::Second(_)) => Position::from_components(None, None, Some(first), Some(second)), } } } } impl PositionComponent { /// Parses, with quirks. fn parse_quirky(context: &ParserContext, input: &mut Parser, allow_quirks: AllowQuirks) -> Result<Self, ()> { input.try(|i| LengthOrPercentage::parse_quirky(context, i, allow_quirks)) .map(Either::First) .or_else(|()| input.try(Keyword::parse).map(Either::Second)) } } impl PositionValue<LengthOrPercentage> { /// Generic function for the computed value of a position. fn computed_value(&self, context: &Context) -> ComputedLengthOrPercentage { match self.keyword { Some(Keyword::Center) => ComputedLengthOrPercentage::Percentage(0.5), Some(k) if k.is_other_side() => match self.position { Some(ref x) => { let (length, percentage) = match *x { LengthOrPercentage::Percentage(Percentage(y)) => (Au(0), Some(1.0 - y)), LengthOrPercentage::Length(ref y) => (-y.to_computed_value(context), Some(1.0)), _ => (Au(0), None), }; ComputedLengthOrPercentage::Calc(CalcLengthOrPercentage { length: length, percentage: percentage }) }, None => ComputedLengthOrPercentage::Percentage(1.0), }, _ => self.position.as_ref().map(|l| l.to_computed_value(context)) .unwrap_or(ComputedLengthOrPercentage::Percentage(0.0)), } } } /// The specified value of horizontal `<position>` pub type HorizontalPosition = GenericHorizontalPosition<PositionValue<LengthOrPercentage>>; impl ToComputedValue for HorizontalPosition { type ComputedValue = computed_position::HorizontalPosition; #[inline] fn to_computed_value(&self, context: &Context) -> computed_position::HorizontalPosition { GenericHorizontalPosition(self.0.computed_value(context)) } #[inline] fn from_computed_value(computed: &computed_position::HorizontalPosition) -> HorizontalPosition { GenericHorizontalPosition(PositionValue { keyword: None, position: Some(ToComputedValue::from_computed_value(&computed.0)), }) } } impl HorizontalPosition { #[inline] /// Initial specified value for vertical position (`top` keyword). pub fn left() -> HorizontalPosition { GenericHorizontalPosition(PositionValue { keyword: Some(Keyword::Left), position: None, }) } } /// The specified value of vertical `<position>` pub type VerticalPosition = GenericVerticalPosition<PositionValue<LengthOrPercentage>>; impl ToComputedValue for VerticalPosition { type ComputedValue = computed_position::VerticalPosition; #[inline] fn to_computed_value(&self, context: &Context) -> computed_position::VerticalPosition { GenericVerticalPosition(self.0.computed_value(context)) } #[inline] fn from_computed_value(computed: &computed_position::VerticalPosition) -> VerticalPosition { GenericVerticalPosition(PositionValue { keyword: None, position: Some(ToComputedValue::from_computed_value(&computed.0)), }) } } impl VerticalPosition { #[inline] /// Initial specified value for vertical position (`top` keyword). pub fn top() -> VerticalPosition { GenericVerticalPosition(PositionValue { keyword: Some(Keyword::Top), position: None, }) } }<|fim▁end|>
<|file_name|>OrCRIF.java<|end_file_name|><|fim▁begin|>/* * $RCSfile: OrCRIF.java,v $ * <|fim▁hole|> * * $Revision: 1.1 $ * $Date: 2005/02/11 04:56:38 $ * $State: Exp $ */ package com.sun.media.jai.opimage; import java.awt.RenderingHints; import java.awt.geom.Rectangle2D; import java.awt.image.RenderedImage; import java.awt.image.renderable.RenderContext; import java.awt.image.renderable.ParameterBlock; import java.awt.image.renderable.RenderableImage; import javax.media.jai.CRIFImpl; import javax.media.jai.ImageLayout; import java.util.Map; /** * A <code>CRIF</code> supporting the "Or" operation in the * rendered and renderable image layers. * * @since EA2 * @see javax.media.jai.operator.OrDescriptor * @see OrOpImage * */ public class OrCRIF extends CRIFImpl { /** Constructor. */ public OrCRIF() { super("or"); } /** * Creates a new instance of <code>OrOpImage</code> in the * rendered layer. This method satisifies the implementation of RIF. * * @param paramBlock The two source images to be "Ored" together. * @param renderHints Optionally contains destination image layout. */ public RenderedImage create(ParameterBlock paramBlock, RenderingHints renderHints) { // Get ImageLayout from renderHints if any. ImageLayout layout = RIFUtil.getImageLayoutHint(renderHints); return new OrOpImage(paramBlock.getRenderedSource(0), paramBlock.getRenderedSource(1), renderHints, layout); } }<|fim▁end|>
* Copyright (c) 2005 Sun Microsystems, Inc. All rights reserved. * * Use is subject to license terms.
<|file_name|>server.js<|end_file_name|><|fim▁begin|>var http = require('http'); var fs = require('fs'); var url = require('url'); http.createServer( function (req, res) { var pathname = url.parse(req.url).pathname; if (pathname == '/data' && req.method == "POST") { req.setEncoding('utf8'); var t = new Date(); var body = ''; req.on('data', function(chunk){ body += chunk; console.log('data!');}); req.on('end', function () { var entry = { ts: t, d: body }; fs.appendFile('data.json', JSON.stringify(entry)); console.log("Recorded entry to data.json: " + JSON.stringify(entry)); }); res.writeHead(200); res.end(); } else { var fn = pathname.substr(1); var extn = pathname.slice (pathname.lastIndexOf('.')); var mimetype = 'none'; switch(pathname.slice( pathname.lastIndexOf('.'))) { case ".js": mimetype = "application/javascript"; break; case ".png": mimetype = "image/png"; break; case ".jpg": mimetype = "image/jpeg"; break; case ".html": mimetype = "text/html"; break; case ".css": mimetype = "text/css"; break; } fs.readFile(fn, function (err,data) { if (err) { console.log(err); res.writeHead(404,{'Content-Type':'text/html'}); res.end(); } else { res.writeHead(200, {'Content-Type':mimetype}); res.write(data); res.end(); } });<|fim▁hole|><|fim▁end|>
} }).listen(80);
<|file_name|>RepositoryIssueList.js<|end_file_name|><|fim▁begin|>// LICENSE : MIT "use strict"; import React from "react" global.React = require('react'); var md2react = require("md2react"); var todoRegexp = /^-\s*\[[x ]\]\s*/; function isTODO(line) { return todoRegexp.test(line); } function flatten([first, ...rest]) { if (first === undefined) { return []; } else if (!Array.isArray(first)) { return [first, ...flatten(rest)]; } else { return [...flatten(first), ...flatten(rest)]; } } export default class RepositoryIssueList extends React.Component { static get propTypes() { return { issues: React.PropTypes.array } } <|fim▁hole|> // if(!this.markdownContainer) { // return; // } // var list = this.markdownContainer.querySelectorAll("li.checked, li.unchecked"); // console.log(list); //} render() { if (this.props.issue == null) { return <div className="RepositoryIssueList"> <div className="markdown" ref="markdown"> </div> </div>; } var ownerSubTasks = this.props.issue.body.split("\n").filter(isTODO); var commentSubTasks = this.props.comments.map(function (comment) { return comment.body.split("\n").filter(isTODO); }); var subTasks = ownerSubTasks.concat(...commentSubTasks); var subTasksList = subTasks.join("\n"); return <div className="RepositoryIssueList"> <div className="markdown" ref="markdown"> {md2react(subTasksList, { tasklist: true })} </div> </div> } }<|fim▁end|>
//componentDidUpdate() { // this.markdownContainer = React.findDOMNode(this.refs.markdown);
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals import codecs import logging import sys from io import BytesIO from threading import Lock import warnings from django import http from django.conf import settings from django.core import signals from django.core.handlers import base from django.core.urlresolvers import set_script_prefix from django.utils import datastructures from django.utils.encoding import force_str, force_text from django.utils.functional import cached_property from django.utils import six # For backwards compatibility -- lots of code uses this in the wild! from django.http.response import REASON_PHRASES as STATUS_CODE_TEXT # NOQA logger = logging.getLogger('django.request') # encode() and decode() expect the charset to be a native string. ISO_8859_1, UTF_8 = str('iso-8859-1'), str('utf-8') class LimitedStream(object): ''' LimitedStream wraps another stream in order to not allow reading from it past specified amount of bytes. ''' def __init__(self, stream, limit, buf_size=64 * 1024 * 1024): self.stream = stream self.remaining = limit self.buffer = b'' self.buf_size = buf_size def _read_limited(self, size=None): if size is None or size > self.remaining: size = self.remaining if size == 0: return b'' result = self.stream.read(size) self.remaining -= len(result) return result def read(self, size=None): if size is None: result = self.buffer + self._read_limited() self.buffer = b'' elif size < len(self.buffer): result = self.buffer[:size] self.buffer = self.buffer[size:] else: # size >= len(self.buffer) result = self.buffer + self._read_limited(size - len(self.buffer)) self.buffer = b'' return result def readline(self, size=None): while b'\n' not in self.buffer and \ (size is None or len(self.buffer) < size): if size: # since size is not None here, len(self.buffer) < size chunk = self._read_limited(size - len(self.buffer)) else: chunk = self._read_limited() if not chunk: break self.buffer += chunk sio = BytesIO(self.buffer) if size: line = sio.readline(size) else: line = sio.readline() self.buffer = sio.read() return line class WSGIRequest(http.HttpRequest): def __init__(self, environ): script_name = get_script_name(environ) path_info = get_path_info(environ) if not path_info: # Sometimes PATH_INFO exists, but is empty (e.g. accessing # the SCRIPT_NAME URL without a trailing slash). We really need to # operate as if they'd requested '/'. Not amazingly nice to force # the path like this, but should be harmless. path_info = '/' self.environ = environ self.path_info = path_info self.path = '%s/%s' % (script_name.rstrip('/'), path_info.lstrip('/')) self.META = environ self.META['PATH_INFO'] = path_info self.META['SCRIPT_NAME'] = script_name self.method = environ['REQUEST_METHOD'].upper() _, content_params = self._parse_content_type(environ.get('CONTENT_TYPE', '')) if 'charset' in content_params: try: codecs.lookup(content_params['charset']) except LookupError: pass else: self.encoding = content_params['charset'] self._post_parse_error = False try: content_length = int(environ.get('CONTENT_LENGTH')) except (ValueError, TypeError): content_length = 0 self._stream = LimitedStream(self.environ['wsgi.input'], content_length) self._read_started = False self.resolver_match = None def _get_scheme(self): return self.environ.get('wsgi.url_scheme') def _parse_content_type(self, ctype): """ Media Types parsing according to RFC 2616, section 3.7. Returns the data type and parameters. For example: Input: "text/plain; charset=iso-8859-1" Output: ('text/plain', {'charset': 'iso-8859-1'}) """ content_type, _, params = ctype.partition(';') content_params = {} for parameter in params.split(';'): k, _, v = parameter.strip().partition('=') content_params[k] = v return content_type, content_params def _get_request(self): warnings.warn('`request.REQUEST` is deprecated, use `request.GET` or ' '`request.POST` instead.', PendingDeprecationWarning, 2) if not hasattr(self, '_request'): self._request = datastructures.MergeDict(self.POST, self.GET) return self._request @cached_property def GET(self): # The WSGI spec says 'QUERY_STRING' may be absent. raw_query_string = get_bytes_from_wsgi(self.environ, 'QUERY_STRING', '') return http.QueryDict(raw_query_string, encoding=self._encoding) def _get_post(self): if not hasattr(self, '_post'): self._load_post_and_files() return self._post def _set_post(self, post):<|fim▁hole|> raw_cookie = get_str_from_wsgi(self.environ, 'HTTP_COOKIE', '') return http.parse_cookie(raw_cookie) def _get_files(self): if not hasattr(self, '_files'): self._load_post_and_files() return self._files POST = property(_get_post, _set_post) FILES = property(_get_files) REQUEST = property(_get_request) class WSGIHandler(base.BaseHandler): initLock = Lock() request_class = WSGIRequest def __call__(self, environ, start_response): # Set up middleware if needed. We couldn't do this earlier, because # settings weren't available. if self._request_middleware is None: with self.initLock: try: # Check that middleware is still uninitialised. if self._request_middleware is None: self.load_middleware() except: # Unload whatever middleware we got self._request_middleware = None raise set_script_prefix(get_script_name(environ)) signals.request_started.send(sender=self.__class__) try: request = self.request_class(environ) except UnicodeDecodeError: logger.warning('Bad Request (UnicodeDecodeError)', exc_info=sys.exc_info(), extra={ 'status_code': 400, } ) response = http.HttpResponseBadRequest() else: response = self.get_response(request) response._handler_class = self.__class__ status = '%s %s' % (response.status_code, response.reason_phrase) response_headers = [(str(k), str(v)) for k, v in response.items()] for c in response.cookies.values(): response_headers.append((str('Set-Cookie'), str(c.output(header='')))) start_response(force_str(status), response_headers) return response def get_path_info(environ): """ Returns the HTTP request's PATH_INFO as a unicode string. """ path_info = get_bytes_from_wsgi(environ, 'PATH_INFO', '/') # It'd be better to implement URI-to-IRI decoding, see #19508. return path_info.decode(UTF_8) def get_script_name(environ): """ Returns the equivalent of the HTTP request's SCRIPT_NAME environment variable. If Apache mod_rewrite has been used, returns what would have been the script name prior to any rewriting (so it's the script name as seen from the client's perspective), unless the FORCE_SCRIPT_NAME setting is set (to anything). """ if settings.FORCE_SCRIPT_NAME is not None: return force_text(settings.FORCE_SCRIPT_NAME) # If Apache's mod_rewrite had a whack at the URL, Apache set either # SCRIPT_URL or REDIRECT_URL to the full resource URL before applying any # rewrites. Unfortunately not every Web server (lighttpd!) passes this # information through all the time, so FORCE_SCRIPT_NAME, above, is still # needed. script_url = get_bytes_from_wsgi(environ, 'SCRIPT_URL', '') if not script_url: script_url = get_bytes_from_wsgi(environ, 'REDIRECT_URL', '') if script_url: path_info = get_bytes_from_wsgi(environ, 'PATH_INFO', '') script_name = script_url[:-len(path_info)] else: script_name = get_bytes_from_wsgi(environ, 'SCRIPT_NAME', '') # It'd be better to implement URI-to-IRI decoding, see #19508. return script_name.decode(UTF_8) def get_bytes_from_wsgi(environ, key, default): """ Get a value from the WSGI environ dictionary as bytes. key and default should be str objects. Under Python 2 they may also be unicode objects provided they only contain ASCII characters. """ value = environ.get(str(key), str(default)) # Under Python 3, non-ASCII values in the WSGI environ are arbitrarily # decoded with ISO-8859-1. This is wrong for Django websites where UTF-8 # is the default. Re-encode to recover the original bytestring. return value if six.PY2 else value.encode(ISO_8859_1) def get_str_from_wsgi(environ, key, default): """ Get a value from the WSGI environ dictionary as bytes. key and default should be str objects. Under Python 2 they may also be unicode objects provided they only contain ASCII characters. """ value = environ.get(str(key), str(default)) # Same comment as above return value if six.PY2 else value.encode(ISO_8859_1).decode(UTF_8)<|fim▁end|>
self._post = post @cached_property def COOKIES(self):
<|file_name|>authz_ownership.py<|end_file_name|><|fim▁begin|>""" Authorization module that allow users listed in /etc/cobbler/users.conf to be permitted to access resources, with the further restriction that cobbler objects can be edited to only allow certain users/groups to access those specific objects. Copyright 2008-2009, Red Hat, Inc and Others Michael DeHaan <michael.dehaan AT gmail> This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA """ import ConfigParser import os from cobbler.cexceptions import CX from cobbler.utils import _ def register(): """ The mandatory cobbler module registration hook. """ return "authz" def __parse_config(): etcfile = '/etc/cobbler/users.conf' if not os.path.exists(etcfile): raise CX(_("/etc/cobbler/users.conf does not exist")) config = ConfigParser.ConfigParser() # Make users case sensitive to handle kerberos config.optionxform = str config.read(etcfile) alldata = {} sections = config.sections() for g in sections: alldata[str(g)] = {} opts = config.options(g) for o in opts: alldata[g][o] = 1 return alldata def __authorize_autoinst(api_handle, groups, user, autoinst): # the authorization rules for automatic installation file editing are a bit # of a special case. Non-admin users can edit a automatic installation file # only if all objects that depend on that automatic installation file are # editable by the user in question. # # Example: # if Pinky owns ProfileA # and the Brain owns ProfileB # and both profiles use the same automatic installation template # and neither Pinky nor the Brain is an admin # neither is allowed to edit the automatic installation template # because they would make unwanted changes to each other # # In the above scenario the UI will explain the problem # and ask that the user asks the admin to resolve it if required. # NOTE: this function is only called by authorize so admin users are # cleared before this function is called. lst = api_handle.find_profile(autoinst=autoinst, return_list=True) lst.extend(api_handle.find_system(autoinst=autoinst, return_list=True)) for obj in lst: if not __is_user_allowed(obj, groups, user, "write_autoinst", autoinst, None): return 0 return 1 def __authorize_snippet(api_handle, groups, user, autoinst): # only allow admins to edit snippets -- since we don't have detection to see # where each snippet is in use for group in groups: if group not in ["admins", "admin"]: return False return True def __is_user_allowed(obj, groups, user, resource, arg1, arg2): if user == "<DIRECT>": # system user, logged in via web.ss return True for group in groups: if group in ["admins", "admin"]: return True if obj.owners == []: return True for allowed in obj.owners: if user == allowed: # user match return True # else look for a group match for group in groups: if group == allowed: return True return 0 def authorize(api_handle, user, resource, arg1=None, arg2=None): """ Validate a user against a resource. All users in the file are permitted by this module. """ if user == "<DIRECT>": # CLI should always be permitted return True # everybody can get read-only access to everything # if they pass authorization, they don't have to be in users.conf if resource is not None: # FIXME: /cobbler/web should not be subject to user check in any case for x in ["get", "read", "/cobbler/web"]: if resource.startswith(x): return 1 # read operation is always ok. user_groups = __parse_config() # classify the type of operation modify_operation = False for criteria in ["save", "copy", "rename", "remove", "modify", "edit", "xapi", "background"]: if resource.find(criteria) != -1: modify_operation = True # FIXME: is everyone allowed to copy? I think so. # FIXME: deal with the problem of deleted parents and promotion found_user = False found_groups = [] grouplist = user_groups.keys() for g in grouplist: for x in user_groups[g]: if x == user: found_groups.append(g) found_user = True # if user is in the admin group, always authorize # regardless of the ownership of the object. if g == "admins" or g == "admin": return True if not found_user: # if the user isn't anywhere in the file, reject regardless # they can still use read-only XMLRPC return 0 if not modify_operation: # sufficient to allow access for non save/remove ops to all # users for now, may want to refine later. return True # now we have a modify_operation op, so we must check ownership # of the object. remove ops pass in arg1 as a string name, # saves pass in actual objects, so we must treat them differently. # automatic installaton files are even more special so we call those # out to another function, rather than going through the rest of the # code here. if resource.find("write_autoinstall_template") != -1: return __authorize_autoinst(api_handle, found_groups, user, arg1) elif resource.find("read_autoinstall_template") != -1: return True # the API for editing snippets also needs to do something similar. # as with automatic installation files, though since they are more # widely used it's more restrictive if resource.find("write_autoinstall_snippet") != -1: return __authorize_snippet(api_handle, found_groups, user, arg1) elif resource.find("read_autoinstall_snipppet") != -1: return True obj = None if resource.find("remove") != -1: if resource == "remove_distro": obj = api_handle.find_distro(arg1) elif resource == "remove_profile": obj = api_handle.find_profile(arg1) elif resource == "remove_system":<|fim▁hole|> elif resource == "remove_image": obj = api_handle.find_image(arg1) elif resource.find("save") != -1 or resource.find("modify") != -1: obj = arg1 # if the object has no ownership data, allow access regardless if obj is None or obj.owners is None or obj.owners == []: return True return __is_user_allowed(obj, found_groups, user, resource, arg1, arg2)<|fim▁end|>
obj = api_handle.find_system(arg1) elif resource == "remove_repo": obj = api_handle.find_repo(arg1)
<|file_name|>organization.js<|end_file_name|><|fim▁begin|>// Load Moongoose var mongoose = require('mongoose');<|fim▁hole|> // Automatically create a last modified date attribute that auto-updates var lastMod = require('./lastMod'); // define the schema for our model var organizationSchema = mongoose.Schema({ // The date this object was created createdDate : { type: Date, default: Date.now }, }); organizationSchema.plugin(lastMod); // create the model for organization and expose it to our app module.exports = mongoose.model('Organization', organizationSchema);<|fim▁end|>
<|file_name|>path_filter.rs<|end_file_name|><|fim▁begin|>use std::path::{PathBuf, Path, Component}; use ignore::gitignore::GitignoreBuilder; use errors; pub(crate) fn new(base_path: &Path, stash_path: &Path) -> errors::Result<PathFilter> { let base_path = PathBuf::from(base_path).canonicalize()?; let stash_path = PathBuf::from(stash_path).canonicalize()?; Ok(PathFilter { base_path: base_path, stash_path: stash_path, }) } pub(crate) struct PathFilter { base_path: PathBuf, stash_path: PathBuf, } impl PathFilter { fn is_in_scope(&self, path: &Path) -> bool { (!path.is_absolute()) || path.starts_with(&self.base_path) } fn ignored(&self, path: &Path) -> errors::Result<bool> { let rel_path = if path.is_absolute() { path.strip_prefix(&self.base_path)? } else { path }; let mut builder = GitignoreBuilder::new(&self.base_path); builder.add(".gitignore"); let mut ignore_path = self.base_path.clone(); for c in rel_path.components() { match c { Component::Normal(ref path_part) => ignore_path.push(path_part), _ => { panic!(<|fim▁hole|> rel_path ) } } if ignore_path.is_file() { break; } builder.add(&ignore_path.join(".gitignore")); } let built = builder.build()?; Ok( built .matched_path_or_any_parents(&path, path.is_dir()) .is_ignore(), ) } fn is_stash_path(&self, path: &Path) -> bool { path.starts_with(&self.stash_path) } fn is_dotted(&self, path: &Path) -> bool { for c in path.components() { match c { Component::Normal(ref path_part) => { if path_part.to_string_lossy().starts_with(".") { return true; } } _ => {} }; } false } pub fn allow(&self, path: &Path) -> errors::Result<bool> { if !self.is_in_scope(&path) { debug!("ignoring {:?} because not in scope", path); return Ok(false); } if self.is_stash_path(&path) { debug!("ignoring {:?} because within the stash", path); return Ok(false); } if self.is_dotted(&path) { debug!("ignoring {:?} because it's a dotted file", path); return Ok(false); } if self.ignored(&path)? { debug!("ignoring {:?} because ignored by .gitignore", path); return Ok(false); } return Ok(true); } }<|fim▁end|>
"Expecting a path witin {:?} but got {:?} - don't know how to check ignore status", self.base_path,
<|file_name|>NumberGreaterThanOrEqualsAdvancedFilter.java<|end_file_name|><|fim▁begin|>/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ <|fim▁hole|>import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * NumberGreaterThanOrEquals Advanced Filter. */ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "operatorType", defaultImpl = NumberGreaterThanOrEqualsAdvancedFilter.class) @JsonTypeName("NumberGreaterThanOrEquals") public class NumberGreaterThanOrEqualsAdvancedFilter extends AdvancedFilter { /** * The filter value. */ @JsonProperty(value = "value") private Double value; /** * Get the filter value. * * @return the value value */ public Double value() { return this.value; } /** * Set the filter value. * * @param value the value value to set * @return the NumberGreaterThanOrEqualsAdvancedFilter object itself. */ public NumberGreaterThanOrEqualsAdvancedFilter withValue(Double value) { this.value = value; return this; } }<|fim▁end|>
package com.microsoft.azure.management.eventgrid.v2020_04_01_preview;
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! The serial support library contains all //! the functionality to read ports, and send data //! between threads reading serial port data //! and threads handling websocket requests #![recursion_limit = "1024"] #![allow(dead_code)] #![allow(unused_variables)] extern crate argparse; #[macro_use] extern crate error_chain; #[macro_use] extern crate log;<|fim▁hole|>pub mod cfg; pub mod common; pub mod dynamic_sleep; pub mod errors; pub mod manager; pub mod messages; pub mod port_manager; pub mod sub_manager; pub mod writelock_manager;<|fim▁end|>
#[macro_use] extern crate serde_derive;
<|file_name|>canvaskit.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|> declare function CanvasKitInit(opts: CanvasKitInitOptions): Promise<CanvasKit>; export = CanvasKitInit;<|fim▁end|>
import { CanvasKitInitOptions, CanvasKit } from "../index";
<|file_name|>delivery_carrier.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. import logging from odoo import api, fields, models, _ from odoo.exceptions import UserError, ValidationError from odoo.tools.safe_eval import safe_eval _logger = logging.getLogger(__name__) class DeliveryCarrier(models.Model): _name = 'delivery.carrier' _inherits = {'product.product': 'product_id'} _description = "Carrier" _order = 'sequence, id' ''' A Shipping Provider In order to add your own external provider, follow these steps: 1. Create your model MyProvider that _inherit 'delivery.carrier' 2. Extend the selection of the field "delivery_type" with a pair ('<my_provider>', 'My Provider') 3. Add your methods: <my_provider>_get_shipping_price_from_so <my_provider>_send_shipping <my_provider>_open_tracking_page <my_provider>_cancel_shipment (they are documented hereunder) ''' # -------------------------------- # # Internals for shipping providers # # -------------------------------- # sequence = fields.Integer(help="Determine the display order", default=10) # This field will be overwritten by internal shipping providers by adding their own type (ex: 'fedex') delivery_type = fields.Selection([('fixed', 'Fixed Price'), ('base_on_rule', 'Based on Rules')], string='Provider', default='fixed', required=True) product_type = fields.Selection(related='product_id.type', default='service') product_sale_ok = fields.Boolean(related='product_id.sale_ok', default=False) product_id = fields.Many2one('product.product', string='Delivery Product', required=True, ondelete="cascade") price = fields.Float(compute='get_price') available = fields.Boolean(compute='get_price') free_if_more_than = fields.Boolean('Free if Order total is more than', help="If the order is more expensive than a certain amount, the customer can benefit from a free shipping", default=False) amount = fields.Float(string='Amount', help="Amount of the order to benefit from a free shipping, expressed in the company currency") country_ids = fields.Many2many('res.country', 'delivery_carrier_country_rel', 'carrier_id', 'country_id', 'Countries') state_ids = fields.Many2many('res.country.state', 'delivery_carrier_state_rel', 'carrier_id', 'state_id', 'States') zip_from = fields.Char('Zip From') zip_to = fields.Char('Zip To') price_rule_ids = fields.One2many('delivery.price.rule', 'carrier_id', 'Pricing Rules', copy=True) fixed_price = fields.Float(compute='_compute_fixed_price', inverse='_set_product_fixed_price', store=True, string='Fixed Price',help="Keep empty if the pricing depends on the advanced pricing per destination") integration_level = fields.Selection([('rate', 'Get Rate'), ('rate_and_ship', 'Get Rate and Create Shipment')], string="Integration Level", default='rate_and_ship', help="Action while validating Delivery Orders") prod_environment = fields.Boolean("Environment", help="Set to True if your credentials are certified for production.") margin = fields.Integer(help='This percentage will be added to the shipping price.') _sql_constraints = [ ('margin_not_under_100_percent', 'CHECK (margin >= -100)', 'Margin cannot be lower than -100%'), ] @api.one def toggle_prod_environment(self): self.prod_environment = not self.prod_environment @api.multi def install_more_provider(self): return { 'name': 'New Providers', 'view_mode': 'kanban', 'res_model': 'ir.module.module', 'domain': [['name', 'ilike', 'delivery_']], 'type': 'ir.actions.act_window', 'help': _('''<p class="oe_view_nocontent"> Buy Odoo Enterprise now to get more providers. </p>'''), } @api.multi def name_get(self): display_delivery = self.env.context.get('display_delivery', False) order_id = self.env.context.get('order_id', False) if display_delivery and order_id: order = self.env['sale.order'].browse(order_id) currency = order.pricelist_id.currency_id.name or '' res = [] for carrier_id in self.ids: try: r = self.read([carrier_id], ['name', 'price'])[0] res.append((r['id'], r['name'] + ' (' + (str(r['price'])) + ' ' + currency + ')')) except ValidationError: r = self.read([carrier_id], ['name'])[0] res.append((r['id'], r['name'])) else: res = super(DeliveryCarrier, self).name_get() return res @api.depends('product_id.list_price', 'product_id.product_tmpl_id.list_price') def _compute_fixed_price(self): for carrier in self: carrier.fixed_price = carrier.product_id.list_price def _set_product_fixed_price(self): for carrier in self: carrier.product_id.list_price = carrier.fixed_price @api.one def get_price(self): SaleOrder = self.env['sale.order'] self.available = False self.price = False order_id = self.env.context.get('order_id') if order_id: # FIXME: temporary hack until we refactor the delivery API in master order = SaleOrder.browse(order_id) if self.delivery_type not in ['fixed', 'base_on_rule']: try: computed_price = self.get_shipping_price_from_so(order)[0] self.available = True except ValidationError as e: # No suitable delivery method found, probably configuration error _logger.info("Carrier %s: %s, not found", self.name, e.name) computed_price = 0.0 else: carrier = self.verify_carrier(order.partner_shipping_id) if carrier: try: computed_price = carrier.get_price_available(order) self.available = True except UserError as e: # No suitable delivery method found, probably configuration error _logger.info("Carrier %s: %s", carrier.name, e.name) computed_price = 0.0 else: computed_price = 0.0 self.price = computed_price * (1.0 + (float(self.margin) / 100.0)) # -------------------------- # # API for external providers # # -------------------------- # # TODO define and handle exceptions that could be thrown by providers def get_shipping_price_from_so(self, orders): ''' For every sale order, compute the price of the shipment :param orders: A recordset of sale orders :return list: A list of floats, containing the estimated price for the shipping of the sale order ''' self.ensure_one() if hasattr(self, '%s_get_shipping_price_from_so' % self.delivery_type): return getattr(self, '%s_get_shipping_price_from_so' % self.delivery_type)(orders) def send_shipping(self, pickings): ''' Send the package to the service provider :param pickings: A recordset of pickings :return list: A list of dictionaries (one per picking) containing of the form:: { 'exact_price': price, 'tracking_number': number } ''' self.ensure_one() if hasattr(self, '%s_send_shipping' % self.delivery_type): return getattr(self, '%s_send_shipping' % self.delivery_type)(pickings) def get_tracking_link(self, pickings): ''' Ask the tracking link to the service provider :param pickings: A recordset of pickings :return list: A list of string URLs, containing the tracking links for every picking ''' self.ensure_one() if hasattr(self, '%s_get_tracking_link' % self.delivery_type): return getattr(self, '%s_get_tracking_link' % self.delivery_type)(pickings) def cancel_shipment(self, pickings): ''' Cancel a shipment :param pickings: A recordset of pickings ''' self.ensure_one() if hasattr(self, '%s_cancel_shipment' % self.delivery_type): return getattr(self, '%s_cancel_shipment' % self.delivery_type)(pickings) @api.onchange('state_ids') def onchange_states(self): self.country_ids = [(6, 0, self.country_ids.ids + self.state_ids.mapped('country_id.id'))] @api.onchange('country_ids') def onchange_countries(self): self.state_ids = [(6, 0, self.state_ids.filtered(lambda state: state.id in self.country_ids.mapped('state_ids').ids).ids)] @api.multi def verify_carrier(self, contact): self.ensure_one() if self.country_ids and contact.country_id not in self.country_ids: return False if self.state_ids and contact.state_id not in self.state_ids: return False if self.zip_from and (contact.zip or '') < self.zip_from: return False if self.zip_to and (contact.zip or '') > self.zip_to: return False return self @api.multi def create_price_rules(self): PriceRule = self.env['delivery.price.rule'] for record in self: # If using advanced pricing per destination: do not change if record.delivery_type == 'base_on_rule': continue # Not using advanced pricing per destination: override lines if record.delivery_type == 'base_on_rule' and not (record.fixed_price is not False or record.free_if_more_than):<|fim▁hole|> # Check that float, else 0.0 is False if not (record.fixed_price is not False or record.free_if_more_than): continue if record.delivery_type == 'fixed': PriceRule.search([('carrier_id', '=', record.id)]).unlink() line_data = { 'carrier_id': record.id, 'variable': 'price', 'operator': '>=', } # Create the delivery price rules if record.free_if_more_than: line_data.update({ 'max_value': record.amount, 'standard_price': 0.0, 'list_base_price': 0.0, }) PriceRule.create(line_data) if record.fixed_price is not False: line_data.update({ 'max_value': 0.0, 'standard_price': record.fixed_price, 'list_base_price': record.fixed_price, }) PriceRule.create(line_data) return True @api.model def create(self, vals): res = super(DeliveryCarrier, self).create(vals) res.create_price_rules() return res @api.multi def write(self, vals): res = super(DeliveryCarrier, self).write(vals) self.create_price_rules() return res @api.multi def get_price_available(self, order): self.ensure_one() total = weight = volume = quantity = 0 total_delivery = 0.0 for line in order.order_line: if line.state == 'cancel': continue if line.is_delivery: total_delivery += line.price_total if not line.product_id or line.is_delivery: continue qty = line.product_uom._compute_quantity(line.product_uom_qty, line.product_id.uom_id) weight += (line.product_id.weight or 0.0) * qty volume += (line.product_id.volume or 0.0) * qty quantity += qty total = (order.amount_total or 0.0) - total_delivery total = order.currency_id.with_context(date=order.date_order).compute(total, order.company_id.currency_id) return self.get_price_from_picking(total, weight, volume, quantity) def get_price_from_picking(self, total, weight, volume, quantity): price = 0.0 criteria_found = False price_dict = {'price': total, 'volume': volume, 'weight': weight, 'wv': volume * weight, 'quantity': quantity} for line in self.price_rule_ids: test = safe_eval(line.variable + line.operator + str(line.max_value), price_dict) if test: price = line.list_base_price + line.list_price * price_dict[line.variable_factor] criteria_found = True break if not criteria_found: raise UserError(_("Selected product in the delivery method doesn't fulfill any of the delivery carrier(s) criteria.")) return price<|fim▁end|>
record.price_rule_ids.unlink()
<|file_name|>shared.ts<|end_file_name|><|fim▁begin|>import {ListWrapper, StringMapWrapper} from 'angular2/src/core/facade/collection'; import {isBlank, BaseException, looseIdentical} from 'angular2/src/core/facade/lang'; import {ControlContainer} from './control_container'; import {NgControl} from './ng_control'; import {NgValidator} from './validators'; import {Control} from '../model'; import {Validators} from '../validators'; import {Renderer} from 'angular2/render'; import {ElementRef, QueryList} from 'angular2/core'; export function controlPath(name: string, parent: ControlContainer): string[] { var p = ListWrapper.clone(parent.path); p.push(name); return p; } export function setUpControl(c: Control, dir: NgControl) { if (isBlank(c)) _throwError(dir, "Cannot find control"); if (isBlank(dir.valueAccessor)) _throwError(dir, "No value accessor for"); c.validator = Validators.compose([c.validator, dir.validator]); dir.valueAccessor.writeValue(c.value); // view -> model dir.valueAccessor.registerOnChange(newValue => { dir.viewToModelUpdate(newValue); c.updateValue(newValue, {emitModelToViewChange: false}); c.markAsDirty(); }); // model -> view c.registerOnChange(newValue => dir.valueAccessor.writeValue(newValue)); // touched dir.valueAccessor.registerOnTouched(() => c.markAsTouched()); } export function composeNgValidator(ngValidators: QueryList<NgValidator>): Function { if (isBlank(ngValidators)) return Validators.nullValidator; return Validators.compose(ngValidators.map(v => v.validator)); } <|fim▁hole|>} export function setProperty(renderer: Renderer, elementRef: ElementRef, propName: string, propValue: any) { renderer.setElementProperty(elementRef, propName, propValue); } export function isPropertyUpdated(changes: StringMap<string, any>, viewModel: any): boolean { if (!StringMapWrapper.contains(changes, "model")) return false; var change = changes["model"]; if (change.isFirstChange()) return true; return !looseIdentical(viewModel, change.currentValue); }<|fim▁end|>
function _throwError(dir: NgControl, message: string): void { var path = ListWrapper.join(dir.path, " -> "); throw new BaseException(`${message} '${path}'`);
<|file_name|>test_tree.py<|end_file_name|><|fim▁begin|>from hippiehug import RedisStore, Tree, Leaf, Branch import pytest ## ============== TESTS =================== def test_evidence(): t = Tree() # Test positive case<|fim▁hole|> root, E = t.evidence(b"World") assert len(E) == 2 store = dict((e.identity(), e) for e in E) t2 = Tree(store, root) assert t2.is_in(b"World") def test_store(rstore): l = Leaf(b"Hello", b"Hello") rstore[l.identity()] = l assert rstore[l.identity()].identity() == l.identity() def test_store_tree(rstore): t = Tree(store=rstore) from os import urandom for _ in range(100): item = urandom(32) t.add(item, item) assert t.is_in(item) assert not t.is_in(urandom(32)) def test_leaf_isin(): l = Leaf(b"Hello", b"Hello") store = {l.identity() : l} b = l.add(store, b"Woitemrld", b"Woitemrld") assert l.is_in(store, b"Hello", b"Hello") def test_leaf_isin_map(): l = Leaf(item=b"Hello", key=b"World") store = {l.identity() : l} b = l.add(store, b"World", b"World") assert l.is_in(store, item=b"Hello", key=b"World") def test_Branch_isin(): l = Leaf(b"Hello", b"Hello") store = {l.identity() : l} b = l.add(store, b"World", b"World") assert b.is_in(store, b"Hello", b"Hello") assert b.is_in(store, b"World", b"World") def test_Branch_isin_map(): l = Leaf(item=b"Hello", key=b"A") store = {l.identity() : l} b = l.add(store, item=b"World", key=b"B") assert b.is_in(store, b"Hello", b"A") assert b.is_in(store, b"World", b"B") assert not b.is_in(store, b"World", b"C") def test_Branch_multi(): l = Leaf(b"Hello", b"Hello") store = {l.identity() : l} b = l.multi_add(store, [b"B", b"C"], [b"B", b"C"]) b.check(store) assert b.is_in(store, b"B", b"B") assert b.is_in(store, b"C", b"C") assert b.is_in(store, b"Hello", b"Hello") def test_Branch_add(): l = Leaf(b"Hello", b"Hello") store = {l.identity() : l} b = l.add(store, b"World", b"World") b2 = b.add(store, b"Doom", b"Doom") assert isinstance(b2, Branch) assert b2.left_branch in store assert b2.right_branch in store assert b2.identity() in store b2.check(store) def test_add_like_a_monkey(): root = Leaf(b"Hello",b"Hello") store = {root.identity() : root} from os import urandom for _ in range(100): item = urandom(32) root = root.add(store, item, item) root.check(store) assert root.is_in(store, item, item) def test_Leaf_add(): l = Leaf(b"Hello", b"Hello") store = {l.identity() : l} b = l.add(store, b"World", b"World") assert isinstance(b, Branch) assert b.left_branch in store assert b.right_branch in store assert b.identity() in store assert store[b.left_branch].item <= b.pivot assert store[b.right_branch].item > b.pivot def test_Tree(): t = Tree() def test_add_isin(): t = Tree() # Test positive case t.add(b"Hello") assert t.is_in(b"Hello") == True # Infix operator assert b"Hello" in t def test_fail_isin(): t = Tree() # Test negative case assert t.is_in(b"World") == False def test_massive(): t = Tree() from os import urandom for _ in range(100): item = urandom(32) t.add(item) assert t.is_in(item) assert not t.is_in(urandom(32)) def test_multi_add(): t = Tree() from os import urandom X = [urandom(32) for _ in range(100)] t.multi_add(X) for x in X: assert x in t X = [urandom(32) for _ in range(100)] t.multi_add(X) for x in X: assert x in t Y = [urandom(32) for _ in range(100)] for y in Y: assert y not in t def test_multi_small(): t = Tree() t.multi_add([b"Hello", b"World"]) assert b"Hello" in t assert b"World" in t t.multi_add([b"A", b"B", b"C", b"D", b"E", b"F"]) assert b"E" in t assert b"F" in t def test_multi_test(): t = Tree() t.multi_add([b"Hello", b"World"]) assert t.multi_is_in([b"Hello", b"World"]) == [True, True] answer, head, evidence = t.multi_is_in([b"Hello", b"World"], evidence=True) assert answer == [True, True] e = dict((k.identity(), k) for k in evidence) t2 = Tree(e, head) assert t2.multi_is_in([b"Hello", b"World"]) == [True, True] def test_lookup(): l = Leaf(item=b"Hello", key=b"A") store = {l.identity() : l} b = l.add(store, item=b"World", key=b"B") assert b.is_in(store, b"Hello", b"A") assert b.is_in(store, b"World", b"B") assert not b.is_in(store, b"World", b"C") assert b.lookup(store, b"B") == (b"B", b"World") try: b.lookup(store, b"B") == (b"B", b"World2") assert False except: assert True try: b.lookup(store, b"C") == (b"B", b"World2") assert False except: assert True def test_double_add(): l = Leaf(item=b"Hello", key=b"A") store = {l.identity() : l} b = l.add(store, item=b"World", key=b"B") assert b.is_in(store, b"Hello", b"A") assert b.is_in(store, b"World", b"B") assert not b.is_in(store, b"World", b"C") b = b.add(store, item=b"World2", key=b"B") assert b.lookup(store, b"B") == (b"B", b"World") assert not b.lookup(store, b"B") == (b"B", b"World2") def test_tree_default_store(): t = Tree() t.multi_add([b"test"]) assert t.is_in(b"test") t2 = Tree() assert not t2.is_in(b"test") def test_tree_empty_store(): store = {} t = Tree(store) t.multi_add([b"test"]) assert t.is_in(b"test") t2 = Tree(store, root_hash=t.root()) assert t2.is_in(b"test")<|fim▁end|>
t.add(b"Hello", b"Hello") t.add(b"World", b"World")
<|file_name|>stats.pb.go<|end_file_name|><|fim▁begin|>// Code generated by protoc-gen-go. DO NOT EDIT. // source: github.com/appcelerator/amp/api/rpc/stats/stats.proto /* Package stats is a generated protocol buffer package. It is generated from these files: github.com/appcelerator/amp/api/rpc/stats/stats.proto It has these top-level messages: MetricsEntry MetricsCPUEntry MetricsIOEntry MetricsMemEntry MetricsNetEntry StatsRequest StatsReply */ package stats import proto "github.com/golang/protobuf/proto" import fmt "fmt" import math "math" import _ "google.golang.org/genproto/googleapis/api/annotations" import ( context "golang.org/x/net/context" grpc "google.golang.org/grpc" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type MetricsEntry struct { Timestamp string `protobuf:"bytes,1,opt,name=timestamp" json:"timestamp,omitempty"` ContainerId string `protobuf:"bytes,2,opt,name=container_id,json=containerId" json:"container_id,omitempty"` ContainerName string `protobuf:"bytes,3,opt,name=container_name,json=containerName" json:"container_name,omitempty"` ContainerShortName string `protobuf:"bytes,4,opt,name=container_short_name,json=containerShortName" json:"container_short_name,omitempty"` ContainerState string `protobuf:"bytes,5,opt,name=container_state,json=containerState" json:"container_state,omitempty"` ServiceName string `protobuf:"bytes,6,opt,name=service_name,json=serviceName" json:"service_name,omitempty"` ServiceId string `protobuf:"bytes,7,opt,name=service_id,json=serviceId" json:"service_id,omitempty"` TaskId string `protobuf:"bytes,8,opt,name=task_id,json=taskId" json:"task_id,omitempty"` TaskSlot int32 `protobuf:"varint,9,opt,name=task_slot,json=taskSlot" json:"task_slot,omitempty"` StackName string `protobuf:"bytes,10,opt,name=stack_name,json=stackName" json:"stack_name,omitempty"` NodeId string `protobuf:"bytes,11,opt,name=node_id,json=nodeId" json:"node_id,omitempty"` TimeId string `protobuf:"bytes,12,opt,name=time_id,json=timeId" json:"time_id,omitempty"` Labels map[string]string `protobuf:"bytes,13,rep,name=labels" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` Group string `protobuf:"bytes,14,opt,name=group" json:"group,omitempty"` Sgroup string `protobuf:"bytes,15,opt,name=sgroup" json:"sgroup,omitempty"` Cpu *MetricsCPUEntry `protobuf:"bytes,16,opt,name=cpu" json:"cpu,omitempty"` Io *MetricsIOEntry `protobuf:"bytes,17,opt,name=io" json:"io,omitempty"` Mem *MetricsMemEntry `protobuf:"bytes,18,opt,name=mem" json:"mem,omitempty"` Net *MetricsNetEntry `protobuf:"bytes,19,opt,name=net" json:"net,omitempty"` } func (m *MetricsEntry) Reset() { *m = MetricsEntry{} } func (m *MetricsEntry) String() string { return proto.CompactTextString(m) } func (*MetricsEntry) ProtoMessage() {} func (*MetricsEntry) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } func (m *MetricsEntry) GetTimestamp() string { if m != nil { return m.Timestamp } return "" } func (m *MetricsEntry) GetContainerId() string { if m != nil { return m.ContainerId } return "" } func (m *MetricsEntry) GetContainerName() string { if m != nil { return m.ContainerName } return "" } func (m *MetricsEntry) GetContainerShortName() string { if m != nil { return m.ContainerShortName } return "" } func (m *MetricsEntry) GetContainerState() string { if m != nil { return m.ContainerState } return "" } func (m *MetricsEntry) GetServiceName() string { if m != nil { return m.ServiceName } return "" } func (m *MetricsEntry) GetServiceId() string { if m != nil { return m.ServiceId } return "" } func (m *MetricsEntry) GetTaskId() string { if m != nil { return m.TaskId } return "" } func (m *MetricsEntry) GetTaskSlot() int32 { if m != nil { return m.TaskSlot } return 0 } func (m *MetricsEntry) GetStackName() string { if m != nil { return m.StackName } return "" } func (m *MetricsEntry) GetNodeId() string { if m != nil { return m.NodeId } return "" } func (m *MetricsEntry) GetTimeId() string { if m != nil { return m.TimeId } return "" } func (m *MetricsEntry) GetLabels() map[string]string { if m != nil { return m.Labels } return nil } func (m *MetricsEntry) GetGroup() string { if m != nil { return m.Group } return "" } func (m *MetricsEntry) GetSgroup() string { if m != nil { return m.Sgroup } return "" } func (m *MetricsEntry) GetCpu() *MetricsCPUEntry { if m != nil { return m.Cpu } return nil } func (m *MetricsEntry) GetIo() *MetricsIOEntry { if m != nil { return m.Io } return nil } func (m *MetricsEntry) GetMem() *MetricsMemEntry { if m != nil { return m.Mem } return nil } func (m *MetricsEntry) GetNet() *MetricsNetEntry { if m != nil { return m.Net } return nil } type MetricsCPUEntry struct { TotalUsage float64 `protobuf:"fixed64,1,opt,name=total_usage,json=totalUsage" json:"total_usage,omitempty"` UsageInKernelMode float64 `protobuf:"fixed64,2,opt,name=usage_in_kernel_mode,json=usageInKernelMode" json:"usage_in_kernel_mode,omitempty"` UsageInUserMode float64 `protobuf:"fixed64,3,opt,name=usage_in_user_mode,json=usageInUserMode" json:"usage_in_user_mode,omitempty"` } func (m *MetricsCPUEntry) Reset() { *m = MetricsCPUEntry{} } func (m *MetricsCPUEntry) String() string { return proto.CompactTextString(m) } func (*MetricsCPUEntry) ProtoMessage() {} func (*MetricsCPUEntry) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } func (m *MetricsCPUEntry) GetTotalUsage() float64 { if m != nil { return m.TotalUsage } return 0 } func (m *MetricsCPUEntry) GetUsageInKernelMode() float64 { if m != nil { return m.UsageInKernelMode } return 0 } func (m *MetricsCPUEntry) GetUsageInUserMode() float64 { if m != nil { return m.UsageInUserMode } return 0 } type MetricsIOEntry struct { Read int64 `protobuf:"varint,1,opt,name=read" json:"read,omitempty"` Write int64 `protobuf:"varint,2,opt,name=write" json:"write,omitempty"` Total int64 `protobuf:"varint,3,opt,name=total" json:"total,omitempty"` } func (m *MetricsIOEntry) Reset() { *m = MetricsIOEntry{} } func (m *MetricsIOEntry) String() string { return proto.CompactTextString(m) } func (*MetricsIOEntry) ProtoMessage() {} func (*MetricsIOEntry) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } func (m *MetricsIOEntry) GetRead() int64 { if m != nil { return m.Read } return 0 } func (m *MetricsIOEntry) GetWrite() int64 { if m != nil { return m.Write } return 0 } func (m *MetricsIOEntry) GetTotal() int64 { if m != nil { return m.Total } return 0 } type MetricsMemEntry struct { Failcnt int64 `protobuf:"varint,1,opt,name=failcnt" json:"failcnt,omitempty"` Limit int64 `protobuf:"varint,2,opt,name=limit" json:"limit,omitempty"` Maxusage int64 `protobuf:"varint,3,opt,name=maxusage" json:"maxusage,omitempty"` Usage int64 `protobuf:"varint,4,opt,name=usage" json:"usage,omitempty"` UsageP float64 `protobuf:"fixed64,5,opt,name=usage_p,json=usageP" json:"usage_p,omitempty"` } func (m *MetricsMemEntry) Reset() { *m = MetricsMemEntry{} } func (m *MetricsMemEntry) String() string { return proto.CompactTextString(m) } func (*MetricsMemEntry) ProtoMessage() {} func (*MetricsMemEntry) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } func (m *MetricsMemEntry) GetFailcnt() int64 { if m != nil { return m.Failcnt } return 0 } func (m *MetricsMemEntry) GetLimit() int64 { if m != nil { return m.Limit } return 0 } func (m *MetricsMemEntry) GetMaxusage() int64 { if m != nil { return m.Maxusage } return 0 } func (m *MetricsMemEntry) GetUsage() int64 { if m != nil { return m.Usage } return 0 } func (m *MetricsMemEntry) GetUsageP() float64 { if m != nil { return m.UsageP } return 0 } type MetricsNetEntry struct { TotalBytes int64 `protobuf:"varint,1,opt,name=total_bytes,json=totalBytes" json:"total_bytes,omitempty"` RxBytes int64 `protobuf:"varint,2,opt,name=rx_bytes,json=rxBytes" json:"rx_bytes,omitempty"` RxDropped int64 `protobuf:"varint,3,opt,name=rx_dropped,json=rxDropped" json:"rx_dropped,omitempty"` RxErrors int64 `protobuf:"varint,4,opt,name=rx_errors,json=rxErrors" json:"rx_errors,omitempty"` RxPackets int64 `protobuf:"varint,5,opt,name=rx_packets,json=rxPackets" json:"rx_packets,omitempty"` TxBytes int64 `protobuf:"varint,6,opt,name=tx_bytes,json=txBytes" json:"tx_bytes,omitempty"` TxDropped int64 `protobuf:"varint,7,opt,name=tx_dropped,json=txDropped" json:"tx_dropped,omitempty"` TxErrors int64 `protobuf:"varint,8,opt,name=tx_errors,json=txErrors" json:"tx_errors,omitempty"` TxPackets int64 `protobuf:"varint,9,opt,name=tx_packets,json=txPackets" json:"tx_packets,omitempty"` } func (m *MetricsNetEntry) Reset() { *m = MetricsNetEntry{} } func (m *MetricsNetEntry) String() string { return proto.CompactTextString(m) } func (*MetricsNetEntry) ProtoMessage() {} func (*MetricsNetEntry) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } func (m *MetricsNetEntry) GetTotalBytes() int64 { if m != nil { return m.TotalBytes } return 0 } func (m *MetricsNetEntry) GetRxBytes() int64 { if m != nil { return m.RxBytes } return 0 } func (m *MetricsNetEntry) GetRxDropped() int64 { if m != nil { return m.RxDropped } return 0 } func (m *MetricsNetEntry) GetRxErrors() int64 { if m != nil { return m.RxErrors } return 0 } func (m *MetricsNetEntry) GetRxPackets() int64 { if m != nil { return m.RxPackets } return 0 } func (m *MetricsNetEntry) GetTxBytes() int64 { if m != nil { return m.TxBytes } return 0 } func (m *MetricsNetEntry) GetTxDropped() int64 { if m != nil { return m.TxDropped } return 0 } func (m *MetricsNetEntry) GetTxErrors() int64 { if m != nil { return m.TxErrors } return 0 } func (m *MetricsNetEntry) GetTxPackets() int64 { if m != nil { return m.TxPackets } return 0 } type StatsRequest struct { StatsCpu bool `protobuf:"varint,1,opt,name=stats_cpu,json=statsCpu" json:"stats_cpu,omitempty"` StatsMem bool `protobuf:"varint,2,opt,name=stats_mem,json=statsMem" json:"stats_mem,omitempty"` StatsIo bool `protobuf:"varint,3,opt,name=stats_io,json=statsIo" json:"stats_io,omitempty"` StatsNet bool `protobuf:"varint,4,opt,name=stats_net,json=statsNet" json:"stats_net,omitempty"` Group string `protobuf:"bytes,5,opt,name=group" json:"group,omitempty"` FilterContainerId string `protobuf:"bytes,6,opt,name=filter_container_id,json=filterContainerId" json:"filter_container_id,omitempty"` FilterContainerName string `protobuf:"bytes,7,opt,name=filter_container_name,json=filterContainerName" json:"filter_container_name,omitempty"` FilterContainerShortName string `protobuf:"bytes,8,opt,name=filter_container_short_name,json=filterContainerShortName" json:"filter_container_short_name,omitempty"` FilterContainerState string `protobuf:"bytes,9,opt,name=filter_container_state,json=filterContainerState" json:"filter_container_state,omitempty"` FilterServiceName string `protobuf:"bytes,10,opt,name=filter_service_name,json=filterServiceName" json:"filter_service_name,omitempty"` FilterServiceId string `protobuf:"bytes,11,opt,name=filter_service_id,json=filterServiceId" json:"filter_service_id,omitempty"` FilterTaskId string `protobuf:"bytes,12,opt,name=filter_task_id,json=filterTaskId" json:"filter_task_id,omitempty"` FilterStackName string `protobuf:"bytes,13,opt,name=filter_stack_name,json=filterStackName" json:"filter_stack_name,omitempty"` FilterNodeId string `protobuf:"bytes,14,opt,name=filter_node_id,json=filterNodeId" json:"filter_node_id,omitempty"` Since string `protobuf:"bytes,15,opt,name=since" json:"since,omitempty"` Until string `protobuf:"bytes,16,opt,name=until" json:"until,omitempty"` Period string `protobuf:"bytes,17,opt,name=period" json:"period,omitempty"` TimeGroup string `protobuf:"bytes,18,opt,name=time_group,json=timeGroup" json:"time_group,omitempty"`<|fim▁hole|> func (m *StatsRequest) Reset() { *m = StatsRequest{} } func (m *StatsRequest) String() string { return proto.CompactTextString(m) } func (*StatsRequest) ProtoMessage() {} func (*StatsRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} } func (m *StatsRequest) GetStatsCpu() bool { if m != nil { return m.StatsCpu } return false } func (m *StatsRequest) GetStatsMem() bool { if m != nil { return m.StatsMem } return false } func (m *StatsRequest) GetStatsIo() bool { if m != nil { return m.StatsIo } return false } func (m *StatsRequest) GetStatsNet() bool { if m != nil { return m.StatsNet } return false } func (m *StatsRequest) GetGroup() string { if m != nil { return m.Group } return "" } func (m *StatsRequest) GetFilterContainerId() string { if m != nil { return m.FilterContainerId } return "" } func (m *StatsRequest) GetFilterContainerName() string { if m != nil { return m.FilterContainerName } return "" } func (m *StatsRequest) GetFilterContainerShortName() string { if m != nil { return m.FilterContainerShortName } return "" } func (m *StatsRequest) GetFilterContainerState() string { if m != nil { return m.FilterContainerState } return "" } func (m *StatsRequest) GetFilterServiceName() string { if m != nil { return m.FilterServiceName } return "" } func (m *StatsRequest) GetFilterServiceId() string { if m != nil { return m.FilterServiceId } return "" } func (m *StatsRequest) GetFilterTaskId() string { if m != nil { return m.FilterTaskId } return "" } func (m *StatsRequest) GetFilterStackName() string { if m != nil { return m.FilterStackName } return "" } func (m *StatsRequest) GetFilterNodeId() string { if m != nil { return m.FilterNodeId } return "" } func (m *StatsRequest) GetSince() string { if m != nil { return m.Since } return "" } func (m *StatsRequest) GetUntil() string { if m != nil { return m.Until } return "" } func (m *StatsRequest) GetPeriod() string { if m != nil { return m.Period } return "" } func (m *StatsRequest) GetTimeGroup() string { if m != nil { return m.TimeGroup } return "" } func (m *StatsRequest) GetTimeZone() string { if m != nil { return m.TimeZone } return "" } func (m *StatsRequest) GetAvg() bool { if m != nil { return m.Avg } return false } func (m *StatsRequest) GetAllowsInfra() bool { if m != nil { return m.AllowsInfra } return false } type StatsReply struct { Entries []*MetricsEntry `protobuf:"bytes,1,rep,name=entries" json:"entries,omitempty"` } func (m *StatsReply) Reset() { *m = StatsReply{} } func (m *StatsReply) String() string { return proto.CompactTextString(m) } func (*StatsReply) ProtoMessage() {} func (*StatsReply) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} } func (m *StatsReply) GetEntries() []*MetricsEntry { if m != nil { return m.Entries } return nil } func init() { proto.RegisterType((*MetricsEntry)(nil), "stats.MetricsEntry") proto.RegisterType((*MetricsCPUEntry)(nil), "stats.MetricsCPUEntry") proto.RegisterType((*MetricsIOEntry)(nil), "stats.MetricsIOEntry") proto.RegisterType((*MetricsMemEntry)(nil), "stats.MetricsMemEntry") proto.RegisterType((*MetricsNetEntry)(nil), "stats.MetricsNetEntry") proto.RegisterType((*StatsRequest)(nil), "stats.StatsRequest") proto.RegisterType((*StatsReply)(nil), "stats.StatsReply") } // Reference imports to suppress errors if they are not otherwise used. var _ context.Context var _ grpc.ClientConn // This is a compile-time assertion to ensure that this generated file // is compatible with the grpc package it is being compiled against. const _ = grpc.SupportPackageIsVersion4 // Client API for Stats service type StatsClient interface { StatsQuery(ctx context.Context, in *StatsRequest, opts ...grpc.CallOption) (*StatsReply, error) } type statsClient struct { cc *grpc.ClientConn } func NewStatsClient(cc *grpc.ClientConn) StatsClient { return &statsClient{cc} } func (c *statsClient) StatsQuery(ctx context.Context, in *StatsRequest, opts ...grpc.CallOption) (*StatsReply, error) { out := new(StatsReply) err := grpc.Invoke(ctx, "/stats.Stats/StatsQuery", in, out, c.cc, opts...) if err != nil { return nil, err } return out, nil } // Server API for Stats service type StatsServer interface { StatsQuery(context.Context, *StatsRequest) (*StatsReply, error) } func RegisterStatsServer(s *grpc.Server, srv StatsServer) { s.RegisterService(&_Stats_serviceDesc, srv) } func _Stats_StatsQuery_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(StatsRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(StatsServer).StatsQuery(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: "/stats.Stats/StatsQuery", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(StatsServer).StatsQuery(ctx, req.(*StatsRequest)) } return interceptor(ctx, in, info, handler) } var _Stats_serviceDesc = grpc.ServiceDesc{ ServiceName: "stats.Stats", HandlerType: (*StatsServer)(nil), Methods: []grpc.MethodDesc{ { MethodName: "StatsQuery", Handler: _Stats_StatsQuery_Handler, }, }, Streams: []grpc.StreamDesc{}, Metadata: "github.com/appcelerator/amp/api/rpc/stats/stats.proto", } func init() { proto.RegisterFile("github.com/appcelerator/amp/api/rpc/stats/stats.proto", fileDescriptor0) } var fileDescriptor0 = []byte{ // 1097 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x56, 0xcd, 0x6e, 0x23, 0x45, 0x10, 0x96, 0xed, 0xf8, 0x67, 0xca, 0x59, 0x67, 0xd3, 0x71, 0x92, 0x21, 0xd9, 0xd5, 0x06, 0x8b, 0x15, 0xd1, 0x22, 0x62, 0x08, 0x20, 0x60, 0x11, 0x17, 0xc2, 0x0a, 0x19, 0x48, 0x08, 0x13, 0x72, 0xe1, 0x32, 0xea, 0xcc, 0x74, 0xbc, 0xad, 0xcc, 0x4c, 0x0f, 0x3d, 0xed, 0xac, 0xcd, 0x91, 0x33, 0x07, 0x24, 0xee, 0xbc, 0x14, 0x4f, 0x80, 0xc4, 0x43, 0x70, 0x44, 0x5d, 0xd5, 0xe3, 0x19, 0x27, 0xb9, 0x58, 0x5d, 0x5f, 0x7d, 0xf5, 0x55, 0x57, 0xbb, 0xbb, 0x6a, 0xe0, 0x93, 0xa9, 0x34, 0xaf, 0x67, 0x57, 0x47, 0x91, 0x4a, 0xc7, 0x3c, 0xcf, 0x23, 0x91, 0x08, 0xcd, 0x8d, 0xd2, 0x63, 0x9e, 0xe6, 0x63, 0x9e, 0xcb, 0xb1, 0xce, 0xa3, 0x71, 0x61, 0xb8, 0x29, 0xe8, 0xf7, 0x28, 0xd7, 0xca, 0x28, 0xd6, 0x46, 0x63, 0xef, 0xc9, 0x54, 0xa9, 0x69, 0x22, 0x90, 0xc8, 0xb3, 0x4c, 0x19, 0x6e, 0xa4, 0xca, 0x1c, 0x69, 0xf4, 0x4f, 0x1b, 0xd6, 0x4f, 0x85, 0xd1, 0x32, 0x2a, 0x5e, 0x65, 0x46, 0x2f, 0xd8, 0x13, 0xf0, 0x8c, 0x4c, 0x45, 0x61, 0x78, 0x9a, 0xfb, 0x8d, 0x83, 0xc6, 0xa1, 0x17, 0x54, 0x00, 0x7b, 0x1b, 0xd6, 0x23, 0x95, 0x19, 0x2e, 0x33, 0xa1, 0x43, 0x19, 0xfb, 0x4d, 0x24, 0xf4, 0x97, 0xd8, 0x24, 0x66, 0xcf, 0x61, 0x50, 0x51, 0x32, 0x9e, 0x0a, 0xbf, 0x85, 0xa4, 0x47, 0x4b, 0xf4, 0x8c, 0xa7, 0x82, 0x7d, 0x00, 0xc3, 0x8a, 0x56, 0xbc, 0x56, 0xda, 0x10, 0x79, 0x0d, 0xc9, 0x6c, 0xe9, 0xbb, 0xb0, 0x2e, 0x8c, 0x78, 0x17, 0x36, 0x6a, 0x11, 0x86, 0x1b, 0xe1, 0xb7, 0x91, 0x5c, 0xe5, 0xbb, 0xb0, 0xa8, 0xdd, 0x64, 0x21, 0xf4, 0xad, 0x8c, 0x04, 0x49, 0x76, 0x68, 0x93, 0x0e, 0x43, 0xad, 0xa7, 0x00, 0x25, 0x45, 0xc6, 0x7e, 0x97, 0xca, 0x74, 0xc8, 0x24, 0x66, 0xbb, 0xd0, 0x35, 0xbc, 0xb8, 0xb1, 0xbe, 0x1e, 0xfa, 0x3a, 0xd6, 0x9c, 0xc4, 0x6c, 0x1f, 0x3c, 0x74, 0x14, 0x89, 0x32, 0xbe, 0x77, 0xd0, 0x38, 0x6c, 0x07, 0x3d, 0x0b, 0x5c, 0x24, 0xca, 0xa0, 0xa8, 0xe1, 0xd1, 0x0d, 0x65, 0x05, 0x27, 0x6a, 0x11, 0xcc, 0xb9, 0x0b, 0xdd, 0x4c, 0xc5, 0x98, 0xb0, 0x4f, 0xa2, 0xd6, 0x74, 0xd9, 0x64, 0x8a, 0x8e, 0x75, 0x97, 0x4d, 0xa6, 0xd6, 0xf1, 0x29, 0x74, 0x12, 0x7e, 0x25, 0x92, 0xc2, 0x7f, 0x74, 0xd0, 0x3a, 0xec, 0x1f, 0x3f, 0x3b, 0xa2, 0xff, 0xb7, 0xfe, 0x87, 0x1d, 0x7d, 0x8f, 0x0c, 0x5c, 0x07, 0x8e, 0xce, 0x86, 0xd0, 0x9e, 0x6a, 0x35, 0xcb, 0xfd, 0x01, 0xea, 0x91, 0xc1, 0x76, 0xa0, 0x53, 0x10, 0xbc, 0x41, 0x69, 0xc8, 0x62, 0x87, 0xd0, 0x8a, 0xf2, 0x99, 0xff, 0xf8, 0xa0, 0x71, 0xd8, 0x3f, 0xde, 0x59, 0xcd, 0x71, 0x72, 0x7e, 0x49, 0xd2, 0x96, 0xc2, 0x9e, 0x43, 0x53, 0x2a, 0x7f, 0x13, 0x89, 0xdb, 0xab, 0xc4, 0xc9, 0x0f, 0xc4, 0x6b, 0x4a, 0x65, 0x05, 0x53, 0x91, 0xfa, 0xec, 0x21, 0xc1, 0x53, 0x91, 0x3a, 0xc1, 0x54, 0xa4, 0x96, 0x99, 0x09, 0xe3, 0x6f, 0x3d, 0xc4, 0x3c, 0x13, 0xc6, 0x31, 0x33, 0x61, 0xf6, 0x3e, 0x87, 0x7e, 0xad, 0x52, 0xf6, 0x18, 0x5a, 0x37, 0x62, 0xe1, 0x2e, 0xa8, 0x5d, 0xda, 0x9a, 0x6f, 0x79, 0x32, 0x13, 0xee, 0x4e, 0x92, 0xf1, 0xb2, 0xf9, 0x59, 0x63, 0xf4, 0x47, 0x03, 0x36, 0xee, 0x94, 0xc3, 0x9e, 0x41, 0xdf, 0x28, 0xc3, 0x93, 0x70, 0x56, 0xf0, 0xa9, 0x40, 0x9d, 0x46, 0x00, 0x08, 0x5d, 0x5a, 0x84, 0x8d, 0x61, 0x88, 0xae, 0x50, 0x66, 0xe1, 0x8d, 0xd0, 0x99, 0x48, 0xc2, 0x54, 0xc5, 0xa4, 0xde, 0x08, 0x36, 0xd1, 0x37, 0xc9, 0xbe, 0x43, 0xcf, 0xa9, 0x8a, 0x05, 0x7b, 0x0f, 0xd8, 0x32, 0x60, 0x56, 0x08, 0x4d, 0xf4, 0x16, 0xd2, 0x37, 0x1c, 0xfd, 0xb2, 0x10, 0xda, 0x92, 0x47, 0xe7, 0x30, 0x58, 0x3d, 0x37, 0xc6, 0x60, 0x4d, 0x0b, 0x1e, 0xe3, 0x4e, 0x5a, 0x01, 0xae, 0x6d, 0x49, 0x6f, 0xb4, 0x34, 0x94, 0xb4, 0x15, 0x90, 0x61, 0x51, 0xdc, 0x27, 0x6a, 0xb7, 0x02, 0x32, 0x46, 0xbf, 0x57, 0x45, 0x96, 0x47, 0xcc, 0x7c, 0xe8, 0x5e, 0x73, 0x99, 0x44, 0x99, 0x71, 0xb2, 0xa5, 0x69, 0x35, 0x12, 0x99, 0x4a, 0x53, 0x2a, 0xa3, 0xc1, 0xf6, 0xa0, 0x97, 0xf2, 0x39, 0x9d, 0x08, 0x89, 0x2f, 0x6d, 0x1b, 0x41, 0x8e, 0x35, 0x8a, 0x20, 0x74, 0x17, 0xba, 0x54, 0x74, 0x8e, 0x6f, 0xb1, 0x11, 0x74, 0xd0, 0x3c, 0x1f, 0xfd, 0xd5, 0x5c, 0x6e, 0xa7, 0xfc, 0x1f, 0xab, 0x33, 0xbf, 0x5a, 0x18, 0x51, 0xb8, 0x2d, 0xd1, 0x99, 0x7f, 0x65, 0x11, 0xf6, 0x16, 0xf4, 0xf4, 0xdc, 0x79, 0x69, 0x63, 0x5d, 0x3d, 0x27, 0xd7, 0x53, 0x00, 0x3d, 0x0f, 0x63, 0xad, 0xf2, 0x5c, 0xc4, 0x6e, 0x73, 0x9e, 0x9e, 0x7f, 0x4d, 0x80, 0x7d, 0x97, 0x7a, 0x1e, 0x0a, 0xad, 0x95, 0x2e, 0xdc, 0x0e, 0x7b, 0x7a, 0xfe, 0x0a, 0x6d, 0x17, 0x9b, 0xf3, 0xe8, 0x46, 0x98, 0x02, 0xf7, 0x89, 0xb1, 0xe7, 0x04, 0xd8, 0xac, 0xa6, 0xcc, 0xda, 0xa1, 0xac, 0xa6, 0xca, 0x6a, 0xaa, 0xac, 0x5d, 0x8a, 0x34, 0xf5, 0xac, 0x66, 0x99, 0xb5, 0x47, 0x59, 0x4d, 0x2d, 0xab, 0xa9, 0xb2, 0x7a, 0x65, 0xac, 0xcb, 0x3a, 0xfa, 0xaf, 0x0d, 0xeb, 0xb6, 0x5d, 0x15, 0x81, 0xf8, 0x65, 0x26, 0x0a, 0x63, 0xc5, 0xf0, 0xfa, 0x87, 0xf6, 0x2d, 0xda, 0xb3, 0xe9, 0x05, 0x3d, 0x04, 0x4e, 0xf2, 0x59, 0xe5, 0xb4, 0xef, 0xaa, 0x59, 0x73, 0x9e, 0x8a, 0xd4, 0x16, 0x40, 0x4e, 0xa9, 0xf0, 0x64, 0x7a, 0x41, 0x17, 0xed, 0x89, 0xaa, 0xe2, 0xec, 0x2b, 0x5b, 0xab, 0xc5, 0x9d, 0x09, 0x53, 0x75, 0x89, 0x76, 0xbd, 0x4b, 0x1c, 0xc1, 0xd6, 0xb5, 0x4c, 0x8c, 0xd0, 0xe1, 0x4a, 0xa7, 0xa7, 0x26, 0xba, 0x49, 0xae, 0x93, 0x5a, 0xbf, 0x3f, 0x86, 0xed, 0x7b, 0x7c, 0x6c, 0x80, 0xd4, 0x55, 0xb7, 0xee, 0x44, 0x60, 0x2b, 0xfc, 0x12, 0xf6, 0xef, 0xc5, 0xd4, 0x66, 0x00, 0xf5, 0x5c, 0xff, 0x4e, 0x64, 0x35, 0x09, 0x3e, 0x86, 0x9d, 0xfb, 0xe1, 0x38, 0x10, 0x3c, 0x8c, 0x1c, 0xde, 0x8d, 0xc4, 0xb1, 0x50, 0x15, 0xb6, 0x32, 0x1d, 0xa0, 0x5e, 0xd8, 0x45, 0x6d, 0x46, 0xbc, 0x80, 0xcd, 0x3b, 0xfc, 0x65, 0xe7, 0xde, 0x58, 0x61, 0x4f, 0x62, 0xf6, 0x0e, 0x0c, 0x1c, 0xb7, 0x9c, 0x1b, 0xd4, 0xc9, 0xd7, 0x09, 0xfd, 0x89, 0xa6, 0x47, 0x4d, 0xb1, 0x9a, 0x13, 0x8f, 0x56, 0x14, 0x97, 0xd3, 0xa2, 0x52, 0x2c, 0x87, 0xc6, 0xa0, 0xae, 0x78, 0x46, 0xa3, 0x63, 0x08, 0xed, 0x42, 0x66, 0x91, 0x70, 0x1d, 0x9d, 0x0c, 0x7c, 0xab, 0x99, 0x91, 0x09, 0xb6, 0x74, 0x2f, 0x20, 0xc3, 0xb6, 0xff, 0x5c, 0x68, 0xa9, 0x62, 0x6c, 0xe0, 0x5e, 0xe0, 0x2c, 0xbc, 0xa8, 0x76, 0xfc, 0xd0, 0x5d, 0x60, 0xd5, 0xc8, 0xff, 0x06, 0xef, 0xc3, 0x3e, 0x7d, 0x10, 0x84, 0xbf, 0xaa, 0x4c, 0x60, 0xa3, 0xf6, 0x82, 0x9e, 0x05, 0x7e, 0x56, 0x99, 0xb0, 0x6d, 0x98, 0xdf, 0x4e, 0xfd, 0x21, 0xde, 0x2c, 0xbb, 0xb4, 0xc3, 0x97, 0x27, 0x89, 0x7a, 0x53, 0x84, 0x32, 0xbb, 0xd6, 0xdc, 0xdf, 0x46, 0x57, 0x9f, 0xb0, 0x89, 0x85, 0x46, 0x5f, 0x00, 0xb8, 0x9b, 0x9f, 0x27, 0x0b, 0xf6, 0x3e, 0x74, 0x45, 0x66, 0xb4, 0xc4, 0x8e, 0x60, 0xa7, 0xdc, 0xd6, 0x03, 0x53, 0x2e, 0x28, 0x39, 0xc7, 0x17, 0xd0, 0xc6, 0x60, 0xf6, 0xad, 0x53, 0xf9, 0x71, 0x26, 0xf4, 0x82, 0x95, 0x41, 0xf5, 0x27, 0xb5, 0xb7, 0xb9, 0x0a, 0xe6, 0xc9, 0x62, 0x34, 0xfc, 0xed, 0xef, 0x7f, 0xff, 0x6c, 0x0e, 0x46, 0xde, 0xf8, 0xf6, 0x43, 0xfa, 0x5a, 0x7a, 0xd9, 0x78, 0x71, 0xd5, 0xc1, 0x8f, 0xa1, 0x8f, 0xfe, 0x0f, 0x00, 0x00, 0xff, 0xff, 0xdd, 0x37, 0xdb, 0x27, 0x6a, 0x09, 0x00, 0x00, }<|fim▁end|>
TimeZone string `protobuf:"bytes,19,opt,name=time_zone,json=timeZone" json:"time_zone,omitempty"` Avg bool `protobuf:"varint,20,opt,name=avg" json:"avg,omitempty"` AllowsInfra bool `protobuf:"varint,21,opt,name=allows_infra,json=allowsInfra" json:"allows_infra,omitempty"` }
<|file_name|>codetest.js<|end_file_name|><|fim▁begin|>new require('styles/dark') module.exports = class extends require('base/app'){ prototype(){ this.tools = { Rect:require('shaders/quad'), Code: require('views/code').extend({ w:'100#', h:'100%' }) } } constructor(){ super() //this.code = new this.Code(this, {text:require('/examples/tiny.js').__module__.source}) let code = 'var x=(1,2,)\n' this.code = new this.Code(this, {text:code}) //this.code = new this.Code(this, {text:module.source}) //this.code = new this.Code(this, {text:'if(x){\n\t1+2\nif(t){\n}\n}'})<|fim▁hole|> /*this.drawRect({ w:100, h:100, color:'red' })*/ this.code.draw(this) } }<|fim▁end|>
} onDraw(){
<|file_name|>test_obj.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # Copyright (c) 2010-2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import itertools import random import time import unittest from contextlib import contextmanager import mock from eventlet import Timeout import swift from swift.common import utils, swob from swift.proxy import server as proxy_server from swift.common.storage_policy import StoragePolicy, POLICIES from test.unit import FakeRing, FakeMemcache, fake_http_connect, \ debug_logger, patch_policies from test.unit.proxy.test_server import node_error_count @contextmanager def set_http_connect(*args, **kwargs): old_connect = swift.proxy.controllers.base.http_connect new_connect = fake_http_connect(*args, **kwargs) try: swift.proxy.controllers.base.http_connect = new_connect swift.proxy.controllers.obj.http_connect = new_connect swift.proxy.controllers.account.http_connect = new_connect swift.proxy.controllers.container.http_connect = new_connect yield new_connect left_over_status = list(new_connect.code_iter) if left_over_status: raise AssertionError('left over status %r' % left_over_status) finally: swift.proxy.controllers.base.http_connect = old_connect swift.proxy.controllers.obj.http_connect = old_connect swift.proxy.controllers.account.http_connect = old_connect swift.proxy.controllers.container.http_connect = old_connect class PatchedObjControllerApp(proxy_server.Application): """ This patch is just a hook over handle_request to ensure that when get_controller is called the ObjectController class is patched to return a (possibly stubbed) ObjectController class. """ object_controller = proxy_server.ObjectController def handle_request(self, req): with mock.patch('swift.proxy.server.ObjectController', new=self.object_controller): return super(PatchedObjControllerApp, self).handle_request(req) @patch_policies([StoragePolicy(0, 'zero', True, object_ring=FakeRing(max_more_nodes=9))]) class TestObjControllerWriteAffinity(unittest.TestCase): def setUp(self): self.app = proxy_server.Application( None, FakeMemcache(), account_ring=FakeRing(), container_ring=FakeRing(), logger=debug_logger()) self.app.request_node_count = lambda ring: 10000000 self.app.sort_nodes = lambda l: l # stop shuffling the primary nodes def test_iter_nodes_local_first_noops_when_no_affinity(self): controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o') self.app.write_affinity_is_local_fn = None object_ring = self.app.get_object_ring(None) all_nodes = object_ring.get_part_nodes(1) all_nodes.extend(object_ring.get_more_nodes(1)) local_first_nodes = list(controller.iter_nodes_local_first( object_ring, 1)) self.maxDiff = None self.assertEqual(all_nodes, local_first_nodes) def test_iter_nodes_local_first_moves_locals_first(self): controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o') self.app.write_affinity_is_local_fn = ( lambda node: node['region'] == 1) self.app.write_affinity_node_count = lambda ring: 4 object_ring = self.app.get_object_ring(None) all_nodes = object_ring.get_part_nodes(1) all_nodes.extend(object_ring.get_more_nodes(1)) local_first_nodes = list(controller.iter_nodes_local_first( object_ring, 1)) # the local nodes move up in the ordering self.assertEqual([1, 1, 1, 1], [node['region'] for node in local_first_nodes[:4]]) # we don't skip any nodes self.assertEqual(len(all_nodes), len(local_first_nodes)) self.assertEqual(sorted(all_nodes), sorted(local_first_nodes)) def test_connect_put_node_timeout(self): controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o') self.app.conn_timeout = 0.05 with set_http_connect(slow_connect=True): nodes = [dict(ip='', port='', device='')] res = controller._connect_put_node(nodes, '', '', {}, ('', '')) self.assertTrue(res is None) @patch_policies([ StoragePolicy(0, 'zero', True), StoragePolicy(1, 'one'), StoragePolicy(2, 'two'), ]) class TestObjController(unittest.TestCase): container_info = { 'partition': 1, 'nodes': [ {'ip': '127.0.0.1', 'port': '1', 'device': 'sda'}, {'ip': '127.0.0.1', 'port': '2', 'device': 'sda'}, {'ip': '127.0.0.1', 'port': '3', 'device': 'sda'}, ], 'write_acl': None, 'read_acl': None, 'storage_policy': None, 'sync_key': None, 'versions': None, } def setUp(self): # setup fake rings with handoffs self.obj_ring = FakeRing(max_more_nodes=3) for policy in POLICIES: policy.object_ring = self.obj_ring logger = debug_logger('proxy-server') logger.thread_locals = ('txn1', '127.0.0.2') self.app = PatchedObjControllerApp( None, FakeMemcache(), account_ring=FakeRing(), container_ring=FakeRing(), logger=logger) class FakeContainerInfoObjController(proxy_server.ObjectController): def container_info(controller, *args, **kwargs): patch_path = 'swift.proxy.controllers.base.get_info' with mock.patch(patch_path) as mock_get_info: mock_get_info.return_value = dict(self.container_info) return super(FakeContainerInfoObjController, controller).container_info(*args, **kwargs) # this is taking advantage of the fact that self.app is a # PachedObjControllerApp, so handle_response will route into an # instance of our FakeContainerInfoObjController just by # overriding the class attribute for object_controller self.app.object_controller = FakeContainerInfoObjController def test_PUT_simple(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='PUT') req.headers['content-length'] = '0' with set_http_connect(201, 201, 201): resp = req.get_response(self.app) self.assertEquals(resp.status_int, 201) def test_PUT_if_none_match(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='PUT') req.headers['if-none-match'] = '*' req.headers['content-length'] = '0' with set_http_connect(201, 201, 201): resp = req.get_response(self.app) self.assertEquals(resp.status_int, 201) def test_PUT_if_none_match_denied(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='PUT') req.headers['if-none-match'] = '*' req.headers['content-length'] = '0' with set_http_connect(201, 412, 201): resp = req.get_response(self.app) self.assertEquals(resp.status_int, 412) def test_PUT_if_none_match_not_star(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='PUT') req.headers['if-none-match'] = 'somethingelse' req.headers['content-length'] = '0' with set_http_connect(): resp = req.get_response(self.app) self.assertEquals(resp.status_int, 400) def test_PUT_connect_exceptions(self): object_ring = self.app.get_object_ring(None) self.app.sort_nodes = lambda n: n # disable shuffle def test_status_map(statuses, expected): self.app._error_limiting = {} req = swob.Request.blank('/v1/a/c/o.jpg', method='PUT', body='test body') with set_http_connect(*statuses): resp = req.get_response(self.app) self.assertEqual(resp.status_int, expected) base_status = [201] * 3 # test happy path test_status_map(list(base_status), 201) for i in range(3): self.assertEqual(node_error_count( self.app, object_ring.devs[i]), 0) # single node errors and test isolation for i in range(3): status_list = list(base_status) status_list[i] = 503 test_status_map(status_list, 201) for j in range(3): self.assertEqual(node_error_count( self.app, object_ring.devs[j]), 1 if j == i else 0) # connect errors test_status_map((201, Timeout(), 201, 201), 201) self.assertEqual(node_error_count( self.app, object_ring.devs[1]), 1) test_status_map((Exception('kaboom!'), 201, 201, 201), 201) self.assertEqual(node_error_count( self.app, object_ring.devs[0]), 1) # expect errors test_status_map((201, 201, (503, None), 201), 201) self.assertEqual(node_error_count( self.app, object_ring.devs[2]), 1) test_status_map(((507, None), 201, 201, 201), 201) self.assertEqual( node_error_count(self.app, object_ring.devs[0]), self.app.error_suppression_limit + 1) # response errors test_status_map(((100, Timeout()), 201, 201), 201) self.assertEqual( node_error_count(self.app, object_ring.devs[0]), 1) test_status_map((201, 201, (100, Exception())), 201) self.assertEqual( node_error_count(self.app, object_ring.devs[2]), 1) test_status_map((201, (100, 507), 201), 201) self.assertEqual( node_error_count(self.app, object_ring.devs[1]), self.app.error_suppression_limit + 1) def test_GET_simple(self): req = swift.common.swob.Request.blank('/v1/a/c/o') with set_http_connect(200): resp = req.get_response(self.app) self.assertEquals(resp.status_int, 200) def test_GET_error(self): req = swift.common.swob.Request.blank('/v1/a/c/o') with set_http_connect(503, 200): resp = req.get_response(self.app) self.assertEquals(resp.status_int, 200) def test_GET_handoff(self): req = swift.common.swob.Request.blank('/v1/a/c/o') codes = [503] * self.obj_ring.replicas + [200] with set_http_connect(*codes): resp = req.get_response(self.app) self.assertEquals(resp.status_int, 200) def test_GET_not_found(self): req = swift.common.swob.Request.blank('/v1/a/c/o') codes = [404] * (self.obj_ring.replicas + self.obj_ring.max_more_nodes) with set_http_connect(*codes): resp = req.get_response(self.app) self.assertEquals(resp.status_int, 404) def test_DELETE_simple(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='DELETE') with set_http_connect(204, 204, 204): resp = req.get_response(self.app) self.assertEquals(resp.status_int, 204) def test_DELETE_missing_one(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='DELETE') with set_http_connect(404, 204, 204): resp = req.get_response(self.app) self.assertEquals(resp.status_int, 204) def test_DELETE_half_not_found_statuses(self): self.obj_ring.set_replicas(4) req = swift.common.swob.Request.blank('/v1/a/c/o', method='DELETE') with set_http_connect(404, 204, 404, 204): resp = req.get_response(self.app) self.assertEquals(resp.status_int, 204) def test_DELETE_half_not_found_headers_and_body(self): # Transformed responses have bogus bodies and headers, so make sure we # send the client headers and body from a real node's response. self.obj_ring.set_replicas(4) status_codes = (404, 404, 204, 204) bodies = ('not found', 'not found', '', '') headers = [{}, {}, {'Pick-Me': 'yes'}, {'Pick-Me': 'yes'}] req = swift.common.swob.Request.blank('/v1/a/c/o', method='DELETE') with set_http_connect(*status_codes, body_iter=bodies, headers=headers): resp = req.get_response(self.app) self.assertEquals(resp.status_int, 204) self.assertEquals(resp.headers.get('Pick-Me'), 'yes') self.assertEquals(resp.body, '') def test_DELETE_not_found(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='DELETE') with set_http_connect(404, 404, 204): resp = req.get_response(self.app) self.assertEquals(resp.status_int, 404) def test_DELETE_handoff(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='DELETE') codes = [204] * self.obj_ring.replicas with set_http_connect(507, *codes): resp = req.get_response(self.app) self.assertEquals(resp.status_int, 204) def test_POST_as_COPY_simple(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='POST') head_resp = [200] * self.obj_ring.replicas + \ [404] * self.obj_ring.max_more_nodes put_resp = [201] * self.obj_ring.replicas codes = head_resp + put_resp with set_http_connect(*codes): resp = req.get_response(self.app) self.assertEquals(resp.status_int, 202) def test_POST_delete_at(self): t = str(int(time.time() + 100)) req = swob.Request.blank('/v1/a/c/o', method='POST', headers={'Content-Type': 'foo/bar', 'X-Delete-At': t}) post_headers = [] def capture_headers(ip, port, device, part, method, path, headers, **kwargs): if method == 'POST': post_headers.append(headers) x_newest_responses = [200] * self.obj_ring.replicas + \ [404] * self.obj_ring.max_more_nodes post_resp = [200] * self.obj_ring.replicas codes = x_newest_responses + post_resp with set_http_connect(*codes, give_connect=capture_headers): resp = req.get_response(self.app) self.assertEquals(resp.status_int, 200) for given_headers in post_headers: self.assertEquals(given_headers.get('X-Delete-At'), t) self.assertTrue('X-Delete-At-Host' in given_headers) self.assertTrue('X-Delete-At-Device' in given_headers) self.assertTrue('X-Delete-At-Partition' in given_headers) self.assertTrue('X-Delete-At-Container' in given_headers) def test_POST_non_int_delete_after(self): t = str(int(time.time() + 100)) + '.1' req = swob.Request.blank('/v1/a/c/o', method='POST', headers={'Content-Type': 'foo/bar', 'X-Delete-After': t}) resp = req.get_response(self.app) self.assertEqual(resp.status_int, 400) self.assertEqual('Non-integer X-Delete-After', resp.body) def test_POST_negative_delete_after(self): req = swob.Request.blank('/v1/a/c/o', method='POST', headers={'Content-Type': 'foo/bar', 'X-Delete-After': '-60'}) resp = req.get_response(self.app) self.assertEqual(resp.status_int, 400) self.assertEqual('X-Delete-After in past', resp.body) def test_POST_delete_at_non_integer(self): t = str(int(time.time() + 100)) + '.1' req = swob.Request.blank('/v1/a/c/o', method='POST', headers={'Content-Type': 'foo/bar', 'X-Delete-At': t}) resp = req.get_response(self.app) self.assertEqual(resp.status_int, 400) self.assertEqual('Non-integer X-Delete-At', resp.body) def test_POST_delete_at_in_past(self): t = str(int(time.time() - 100)) req = swob.Request.blank('/v1/a/c/o', method='POST', headers={'Content-Type': 'foo/bar', 'X-Delete-At': t}) resp = req.get_response(self.app) self.assertEqual(resp.status_int, 400) self.assertEqual('X-Delete-At in past', resp.body) def test_PUT_converts_delete_after_to_delete_at(self): req = swob.Request.blank('/v1/a/c/o', method='PUT', body='', headers={'Content-Type': 'foo/bar', 'X-Delete-After': '60'}) put_headers = [] def capture_headers(ip, port, device, part, method, path, headers, **kwargs): if method == 'PUT': put_headers.append(headers) codes = [201] * self.obj_ring.replicas t = time.time() with set_http_connect(*codes, give_connect=capture_headers): with mock.patch('time.time', lambda: t): resp = req.get_response(self.app) self.assertEquals(resp.status_int, 201) expected_delete_at = str(int(t) + 60) for given_headers in put_headers: self.assertEquals(given_headers.get('X-Delete-At'), expected_delete_at) self.assertTrue('X-Delete-At-Host' in given_headers) self.assertTrue('X-Delete-At-Device' in given_headers) self.assertTrue('X-Delete-At-Partition' in given_headers) self.assertTrue('X-Delete-At-Container' in given_headers) def test_PUT_non_int_delete_after(self): t = str(int(time.time() + 100)) + '.1' req = swob.Request.blank('/v1/a/c/o', method='PUT', body='', headers={'Content-Type': 'foo/bar', 'X-Delete-After': t}) with set_http_connect(): resp = req.get_response(self.app) self.assertEqual(resp.status_int, 400) self.assertEqual('Non-integer X-Delete-After', resp.body) def test_PUT_negative_delete_after(self): req = swob.Request.blank('/v1/a/c/o', method='PUT', body='', headers={'Content-Type': 'foo/bar', 'X-Delete-After': '-60'}) with set_http_connect(): resp = req.get_response(self.app) self.assertEqual(resp.status_int, 400) self.assertEqual('X-Delete-After in past', resp.body) def test_PUT_delete_at(self): t = str(int(time.time() + 100)) req = swob.Request.blank('/v1/a/c/o', method='PUT', body='', headers={'Content-Type': 'foo/bar', 'X-Delete-At': t}) put_headers = [] def capture_headers(ip, port, device, part, method, path, headers, **kwargs): if method == 'PUT': put_headers.append(headers) codes = [201] * self.obj_ring.replicas with set_http_connect(*codes, give_connect=capture_headers): resp = req.get_response(self.app) self.assertEquals(resp.status_int, 201) for given_headers in put_headers: self.assertEquals(given_headers.get('X-Delete-At'), t) self.assertTrue('X-Delete-At-Host' in given_headers) self.assertTrue('X-Delete-At-Device' in given_headers) self.assertTrue('X-Delete-At-Partition' in given_headers) self.assertTrue('X-Delete-At-Container' in given_headers) def test_PUT_delete_at_non_integer(self): t = str(int(time.time() - 100)) + '.1' req = swob.Request.blank('/v1/a/c/o', method='PUT', body='', headers={'Content-Type': 'foo/bar', 'X-Delete-At': t}) with set_http_connect(): resp = req.get_response(self.app) self.assertEqual(resp.status_int, 400) self.assertEqual('Non-integer X-Delete-At', resp.body) def test_PUT_delete_at_in_past(self): t = str(int(time.time() - 100)) req = swob.Request.blank('/v1/a/c/o', method='PUT', body='', headers={'Content-Type': 'foo/bar', 'X-Delete-At': t}) with set_http_connect(): resp = req.get_response(self.app) self.assertEqual(resp.status_int, 400) self.assertEqual('X-Delete-At in past', resp.body) def test_container_sync_put_x_timestamp_not_found(self): test_indexes = [None] + [int(p) for p in POLICIES] for policy_index in test_indexes: self.container_info['storage_policy'] = policy_index put_timestamp = utils.Timestamp(time.time()).normal req = swob.Request.blank( '/v1/a/c/o', method='PUT', headers={ 'Content-Length': 0, 'X-Timestamp': put_timestamp}) codes = [201] * self.obj_ring.replicas with set_http_connect(*codes): resp = req.get_response(self.app) self.assertEqual(resp.status_int, 201) def test_container_sync_put_x_timestamp_match(self): test_indexes = [None] + [int(p) for p in POLICIES] for policy_index in test_indexes: self.container_info['storage_policy'] = policy_index put_timestamp = utils.Timestamp(time.time()).normal req = swob.Request.blank( '/v1/a/c/o', method='PUT', headers={ 'Content-Length': 0, 'X-Timestamp': put_timestamp}) ts_iter = itertools.repeat(put_timestamp) codes = [409] * self.obj_ring.replicas with set_http_connect(*codes, timestamps=ts_iter): resp = req.get_response(self.app) self.assertEqual(resp.status_int, 202) def test_container_sync_put_x_timestamp_older(self): ts = (utils.Timestamp(t) for t in itertools.count(int(time.time()))) test_indexes = [None] + [int(p) for p in POLICIES] for policy_index in test_indexes: self.container_info['storage_policy'] = policy_index req = swob.Request.blank( '/v1/a/c/o', method='PUT', headers={ 'Content-Length': 0, 'X-Timestamp': ts.next().internal}) ts_iter = itertools.repeat(ts.next().internal) codes = [409] * self.obj_ring.replicas with set_http_connect(*codes, timestamps=ts_iter): resp = req.get_response(self.app) self.assertEqual(resp.status_int, 202) def test_container_sync_put_x_timestamp_newer(self): ts = (utils.Timestamp(t) for t in itertools.count(int(time.time()))) test_indexes = [None] + [int(p) for p in POLICIES] for policy_index in test_indexes: orig_timestamp = ts.next().internal req = swob.Request.blank( '/v1/a/c/o', method='PUT', headers={ 'Content-Length': 0, 'X-Timestamp': ts.next().internal}) ts_iter = itertools.repeat(orig_timestamp) codes = [201] * self.obj_ring.replicas with set_http_connect(*codes, timestamps=ts_iter): resp = req.get_response(self.app) self.assertEqual(resp.status_int, 201) def test_container_sync_delete(self): ts = (utils.Timestamp(t) for t in itertools.count(int(time.time()))) test_indexes = [None] + [int(p) for p in POLICIES] for policy_index in test_indexes: req = swob.Request.blank( '/v1/a/c/o', method='DELETE', headers={ 'X-Timestamp': ts.next().internal}) codes = [409] * self.obj_ring.replicas ts_iter = itertools.repeat(ts.next().internal) with set_http_connect(*codes, timestamps=ts_iter): resp = req.get_response(self.app) self.assertEqual(resp.status_int, 409) def test_put_x_timestamp_conflict(self): ts = (utils.Timestamp(t) for t in itertools.count(int(time.time()))) req = swob.Request.blank( '/v1/a/c/o', method='PUT', headers={ 'Content-Length': 0, 'X-Timestamp': ts.next().internal}) ts_iter = iter([ts.next().internal, None, None]) codes = [409] + [201] * (self.obj_ring.replicas - 1) with set_http_connect(*codes, timestamps=ts_iter): resp = req.get_response(self.app) self.assertEqual(resp.status_int, 202) def test_container_sync_put_x_timestamp_race(self): ts = (utils.Timestamp(t) for t in itertools.count(int(time.time()))) test_indexes = [None] + [int(p) for p in POLICIES] for policy_index in test_indexes: put_timestamp = ts.next().internal req = swob.Request.blank( '/v1/a/c/o', method='PUT', headers={ 'Content-Length': 0, 'X-Timestamp': put_timestamp}) # object nodes they respond 409 because another in-flight request # finished and now the on disk timestamp is equal to the request. put_ts = [put_timestamp] * self.obj_ring.replicas codes = [409] * self.obj_ring.replicas ts_iter = iter(put_ts) with set_http_connect(*codes, timestamps=ts_iter): resp = req.get_response(self.app) self.assertEqual(resp.status_int, 202) def test_container_sync_put_x_timestamp_unsynced_race(self): ts = (utils.Timestamp(t) for t in itertools.count(int(time.time()))) test_indexes = [None] + [int(p) for p in POLICIES] for policy_index in test_indexes: put_timestamp = ts.next().internal req = swob.Request.blank( '/v1/a/c/o', method='PUT', headers={ 'Content-Length': 0, 'X-Timestamp': put_timestamp}) # only one in-flight request finished put_ts = [None] * (self.obj_ring.replicas - 1) put_resp = [201] * (self.obj_ring.replicas - 1) put_ts += [put_timestamp] put_resp += [409] ts_iter = iter(put_ts) codes = put_resp with set_http_connect(*codes, timestamps=ts_iter): resp = req.get_response(self.app) self.assertEqual(resp.status_int, 202) def test_COPY_simple(self): req = swift.common.swob.Request.blank( '/v1/a/c/o', method='COPY', headers={'Content-Length': 0, 'Destination': 'c/o-copy'}) head_resp = [200] * self.obj_ring.replicas + \ [404] * self.obj_ring.max_more_nodes put_resp = [201] * self.obj_ring.replicas codes = head_resp + put_resp with set_http_connect(*codes): resp = req.get_response(self.app) self.assertEquals(resp.status_int, 201) def test_HEAD_simple(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='HEAD') with set_http_connect(200): resp = req.get_response(self.app) self.assertEquals(resp.status_int, 200) def test_HEAD_x_newest(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='HEAD', headers={'X-Newest': 'true'}) with set_http_connect(200, 200, 200): resp = req.get_response(self.app) self.assertEquals(resp.status_int, 200) def test_HEAD_x_newest_different_timestamps(self): req = swob.Request.blank('/v1/a/c/o', method='HEAD', headers={'X-Newest': 'true'}) ts = (utils.Timestamp(t) for t in itertools.count(int(time.time()))) timestamps = [next(ts) for i in range(3)] newest_timestamp = timestamps[-1] random.shuffle(timestamps) backend_response_headers = [{ 'X-Backend-Timestamp': t.internal, 'X-Timestamp': t.normal } for t in timestamps] with set_http_connect(200, 200, 200, headers=backend_response_headers): resp = req.get_response(self.app) self.assertEqual(resp.status_int, 200) self.assertEqual(resp.headers['x-timestamp'], newest_timestamp.normal) def test_HEAD_x_newest_with_two_vector_timestamps(self): req = swob.Request.blank('/v1/a/c/o', method='HEAD', headers={'X-Newest': 'true'}) ts = (utils.Timestamp(time.time(), offset=offset) for offset in itertools.count()) timestamps = [next(ts) for i in range(3)] newest_timestamp = timestamps[-1] random.shuffle(timestamps) backend_response_headers = [{ 'X-Backend-Timestamp': t.internal, 'X-Timestamp': t.normal } for t in timestamps] with set_http_connect(200, 200, 200, headers=backend_response_headers): resp = req.get_response(self.app) self.assertEqual(resp.status_int, 200) self.assertEqual(resp.headers['x-backend-timestamp'], newest_timestamp.internal) def test_HEAD_x_newest_with_some_missing(self): req = swob.Request.blank('/v1/a/c/o', method='HEAD', headers={'X-Newest': 'true'}) ts = (utils.Timestamp(t) for t in itertools.count(int(time.time()))) request_count = self.app.request_node_count(self.obj_ring.replicas) backend_response_headers = [{ 'x-timestamp': next(ts).normal, } for i in range(request_count)] responses = [404] * (request_count - 1) responses.append(200) request_log = [] def capture_requests(ip, port, device, part, method, path, headers=None, **kwargs): req = { 'ip': ip, 'port': port, 'device': device, 'part': part, 'method': method, 'path': path, 'headers': headers, } request_log.append(req) with set_http_connect(*responses, headers=backend_response_headers, give_connect=capture_requests): resp = req.get_response(self.app) self.assertEqual(resp.status_int, 200) for req in request_log: self.assertEqual(req['method'], 'HEAD') self.assertEqual(req['path'], '/a/c/o') def test_PUT_log_info(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='PUT') req.headers['x-copy-from'] = 'some/where' req.headers['Content-Length'] = 0 # override FakeConn default resp headers to keep log_info clean<|fim▁hole|> put_resp = [201] * self.obj_ring.replicas codes = head_resp + put_resp with set_http_connect(*codes, headers=resp_headers): resp = req.get_response(self.app) self.assertEqual(resp.status_int, 201) self.assertEquals( req.environ.get('swift.log_info'), ['x-copy-from:some/where']) # and then check that we don't do that for originating POSTs req = swift.common.swob.Request.blank('/v1/a/c/o') req.method = 'POST' req.headers['x-copy-from'] = 'else/where' with set_http_connect(*codes, headers=resp_headers): resp = req.get_response(self.app) self.assertEqual(resp.status_int, 202) self.assertEquals(req.environ.get('swift.log_info'), None) @patch_policies([ StoragePolicy(0, 'zero', True), StoragePolicy(1, 'one'), StoragePolicy(2, 'two'), ]) class TestObjControllerLegacyCache(TestObjController): """ This test pretends like memcache returned a stored value that should resemble whatever "old" format. It catches KeyErrors you'd get if your code was expecting some new format during a rolling upgrade. """ container_info = { 'read_acl': None, 'write_acl': None, 'sync_key': None, 'versions': None, } if __name__ == '__main__': unittest.main()<|fim▁end|>
resp_headers = {'x-delete-at': None} head_resp = [200] * self.obj_ring.replicas + \ [404] * self.obj_ring.max_more_nodes
<|file_name|>line.rs<|end_file_name|><|fim▁begin|>use std::string::ToString; use std::borrow::Cow; use time; #[derive(Debug)] pub enum Line { // PMs are logged sometimes trigger alerts (and are sent to client(s)) PrivMsg { src: String, dst: String, text: String, orig: String }, // Metadata is not logged (but is sent to client(s)) Meta { orig: String }, // Pings must be ponged but are neither logged nor sent to client(s) Ping { orig: String }, } impl Line { pub fn new_meta(s: &str) -> Self { Line::Meta{ orig: s.to_string() } } pub fn new_ping(s: &str) -> Self { Line::Ping{ orig: s.to_string() } } pub fn new_pm(src: &str, dst: &str, text: &str, orig: &str) -> Self { Line::PrivMsg{ src: src.to_string(), dst: dst.to_string(), text: text.to_string(), orig: orig.to_string(), } } pub fn pong_from_ping(p: &str) -> Line { let s = p.replacen("PING ", "PONG ", 1); Line::Ping { orig: s } } pub fn format_privmsg(&self, srv_name: &str) -> Option<(Cow<str>,String)> { // (name,msg) // if this message was in a public channel, `name` should be that channel // if it was a private message from another user, it should be their nick if let &Line::PrivMsg{ ref src, ref dst, ref text, .. } = self { let now = time::now(); let msg = format!("{} {:>9}: {}\n", now.rfc3339(), src, text); // https://tools.ietf.org/html/rfc2812#section-2.3.1 let valid_nick_start = |c: char| c >= char::from(0x41) && c <= char::from(0x7d); let name: Cow<str> = if dst.starts_with(valid_nick_start) { Cow::Owned(format!("{}_{}", src, srv_name)) } else { Cow::Borrowed(dst) }; Some((name,msg)) } else { None } } pub fn mention(&self, nick: &str) -> bool { if let &Line::PrivMsg{ ref dst, ref text, .. } = self { dst == nick || text.contains(nick) } else { false }<|fim▁hole|> pub fn from_str(input: &str) -> Self { // TODO: adhere closer to the RFC // e.g. `:[email protected] PRIVMSG Wiz message goes here` // TODO: treat PRIVMSG and NOTICE differently? // TODO: handle '\r' better? let in_fixed = input.trim_right(); let mut parts = in_fixed.splitn(4, ' '); let a = parts.nth(0); let b = parts.nth(0); let c = parts.nth(0); let d = parts.nth(0); match (a, b, c, d) { //(Some(s), Some("NOTICE"), Some(d), Some(m)) | (Some(s), Some("PRIVMSG"), Some(d), Some(m)) => { let i = if s.starts_with(':') { 1 } else { 0 }; let j = s.find('!').unwrap_or(s.len()-1); let src_fixed = &s[i..j]; let msg_fixed = if m.starts_with(':') { &m[1..] } else { m }; Line::new_pm(src_fixed, d, msg_fixed, in_fixed) }, (Some("PING"), _, _, _) => Line::new_ping(in_fixed), _ => Line::new_meta(input) } } } impl ToString for Line { fn to_string(&self) -> String { match *self { Line::PrivMsg { orig: ref o, .. } => o, Line::Meta { orig: ref o, .. } => o, Line::Ping { orig: ref o, .. } => o, }.clone() } }<|fim▁end|>
}
<|file_name|>bootloader_advanced_gui.py<|end_file_name|><|fim▁begin|># # bootloader_advanced.py: gui advanced bootloader configuration dialog # # Jeremy Katz <[email protected]> # # Copyright 2001-2002 Red Hat, Inc. # # This software may be freely redistributed under the terms of the GNU # library public license. # # You should have received a copy of the GNU Library Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. # import gtk import gobject import iutil import partedUtils import gui from iw_gui import * from rhpl.translate import _, N_ from bootlocwidget import BootloaderLocationWidget class AdvancedBootloaderWindow(InstallWindow): windowTitle = N_("Advanced Boot Loader Configuration") def __init__(self, ics): InstallWindow.__init__(self, ics) self.parent = ics.getICW().window def getPrev(self): pass def getNext(self): # forcing lba32 can be a bad idea.. make sure they really want to if (self.forceLBA.get_active() and not self.bl.forceLBA32): rc = self.intf.messageWindow(_("Warning"), _("Forcing the use of LBA32 for your bootloader when " "not supported by the BIOS can cause your machine " "to be unable to boot.\n\n" "Would you like to continue and force LBA32 mode?"), type = "custom", custom_buttons = [_("Cancel"), _("Force LBA32")]) if rc != 1: raise gui.StayOnScreen # set forcelba self.bl.setForceLBA(self.forceLBA.get_active()) # set kernel args self.bl.args.set(self.appendEntry.get_text()) # set the boot device self.bl.setDevice(self.blloc.getBootDevice()) # set the drive order self.bl.drivelist = self.blloc.getDriveOrder() # set up the vbox with force lba32 and kernel append def setupOptionsVbox(self): self.options_vbox = gtk.VBox(False, 5) self.options_vbox.set_border_width(5) self.forceLBA = gtk.CheckButton(_("_Force LBA32 (not normally required)")) self.options_vbox.pack_start(self.forceLBA, False) self.forceLBA.set_active(self.bl.forceLBA32) label = gui.WrappingLabel(_("If you wish to add default options to the " "boot command, enter them into " "the 'General kernel parameters' field.")) label.set_alignment(0.0, 0.0) self.options_vbox.pack_start(label, False) label = gui.MnemonicLabel(_("_General kernel parameters")) self.appendEntry = gtk.Entry() label.set_mnemonic_widget(self.appendEntry) args = self.bl.args.get() if args: self.appendEntry.set_text(args) box = gtk.HBox(False, 0) box.pack_start(label) box.pack_start(self.appendEntry) al = gtk.Alignment(0.0, 0.0) al.add(box) self.options_vbox.pack_start(al, False) def getScreen(self, anaconda): self.dispatch = anaconda.dispatch self.bl = anaconda.id.bootloader self.intf = anaconda.intf thebox = gtk.VBox (False, 10) # boot loader location bits (mbr vs boot, drive order) self.blloc = BootloaderLocationWidget(anaconda, self.parent) thebox.pack_start(self.blloc.getWidget(), False) thebox.pack_start (gtk.HSeparator(), False) # some optional things self.setupOptionsVbox() thebox.pack_start(self.options_vbox, False) <|fim▁hole|><|fim▁end|>
return thebox
<|file_name|>isSurrogatePair.d.ts<|end_file_name|><|fim▁begin|>import validator from '../';<|fim▁hole|><|fim▁end|>
export default validator.isSurrogatePair;
<|file_name|>TestReverseStringFilter.java<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License.<|fim▁hole|>package org.apache.lucene.analysis.reverse; import java.io.IOException; import java.io.StringReader; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.BaseTokenStreamTestCase; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.KeywordTokenizer; public class TestReverseStringFilter extends BaseTokenStreamTestCase { public void testFilter() throws Exception { TokenStream stream = new MockTokenizer(MockTokenizer.WHITESPACE, false); // 1-4 length string ((Tokenizer)stream).setReader(new StringReader("Do have a nice day")); ReverseStringFilter filter = new ReverseStringFilter(stream); assertTokenStreamContents(filter, new String[] { "oD", "evah", "a", "ecin", "yad" }); } public void testFilterWithMark() throws Exception { TokenStream stream = new MockTokenizer(MockTokenizer.WHITESPACE, false); // 1-4 length string ((Tokenizer)stream).setReader(new StringReader("Do have a nice day")); ReverseStringFilter filter = new ReverseStringFilter(stream, '\u0001'); assertTokenStreamContents(filter, new String[] { "\u0001oD", "\u0001evah", "\u0001a", "\u0001ecin", "\u0001yad" }); } public void testReverseString() throws Exception { assertEquals( "A", ReverseStringFilter.reverse( "A" ) ); assertEquals( "BA", ReverseStringFilter.reverse( "AB" ) ); assertEquals( "CBA", ReverseStringFilter.reverse( "ABC" ) ); } public void testReverseChar() throws Exception { char[] buffer = { 'A', 'B', 'C', 'D', 'E', 'F' }; ReverseStringFilter.reverse( buffer, 2, 3 ); assertEquals( "ABEDCF", new String( buffer ) ); } public void testReverseSupplementary() throws Exception { // supplementary at end assertEquals("𩬅艱鍟䇹愯瀛", ReverseStringFilter.reverse("瀛愯䇹鍟艱𩬅")); // supplementary at end - 1 assertEquals("a𩬅艱鍟䇹愯瀛", ReverseStringFilter.reverse("瀛愯䇹鍟艱𩬅a")); // supplementary at start assertEquals("fedcba𩬅", ReverseStringFilter.reverse("𩬅abcdef")); // supplementary at start + 1 assertEquals("fedcba𩬅z", ReverseStringFilter.reverse("z𩬅abcdef")); // supplementary medial assertEquals("gfe𩬅dcba", ReverseStringFilter.reverse("abcd𩬅efg")); } public void testReverseSupplementaryChar() throws Exception { // supplementary at end char[] buffer = "abc瀛愯䇹鍟艱𩬅".toCharArray(); ReverseStringFilter.reverse(buffer, 3, 7); assertEquals("abc𩬅艱鍟䇹愯瀛", new String(buffer)); // supplementary at end - 1 buffer = "abc瀛愯䇹鍟艱𩬅d".toCharArray(); ReverseStringFilter.reverse(buffer, 3, 8); assertEquals("abcd𩬅艱鍟䇹愯瀛", new String(buffer)); // supplementary at start buffer = "abc𩬅瀛愯䇹鍟艱".toCharArray(); ReverseStringFilter.reverse(buffer, 3, 7); assertEquals("abc艱鍟䇹愯瀛𩬅", new String(buffer)); // supplementary at start + 1 buffer = "abcd𩬅瀛愯䇹鍟艱".toCharArray(); ReverseStringFilter.reverse(buffer, 3, 8); assertEquals("abc艱鍟䇹愯瀛𩬅d", new String(buffer)); // supplementary medial buffer = "abc瀛愯𩬅def".toCharArray(); ReverseStringFilter.reverse(buffer, 3, 7); assertEquals("abcfed𩬅愯瀛", new String(buffer)); } /** blast some random strings through the analyzer */ public void testRandomStrings() throws Exception { Analyzer a = new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName) { Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false); return new TokenStreamComponents(tokenizer, new ReverseStringFilter(tokenizer)); } }; checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER); } public void testEmptyTerm() throws IOException { Analyzer a = new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName) { Tokenizer tokenizer = new KeywordTokenizer(); return new TokenStreamComponents(tokenizer, new ReverseStringFilter(tokenizer)); } }; checkOneTerm(a, "", ""); } }<|fim▁end|>
*/
<|file_name|>resources.py<|end_file_name|><|fim▁begin|>from tastypie import fields from tastypie.bundle import Bundle from tastypie.resources import ModelResource, ALL, ALL_WITH_RELATIONS from api.authorization import DateaBaseAuthorization from api.authentication import ApiKeyPlusWebAuthentication from api.base_resources import JSONDefaultMixin from api.serializers import UTCSerializer from django.template.defaultfilters import linebreaksbr from tastypie.cache import SimpleCache from tastypie.throttle import CacheThrottle from django.contrib.contenttypes.models import ContentType from account.utils import get_domain_from_url from comment.models import Comment class CommentResource(JSONDefaultMixin, ModelResource): user = fields.ToOneField('account.resources.UserResource', attribute='user', full=True, readonly=True) def dehydrate(self, bundle): user_data = { 'username': bundle.data['user'].data['username'], 'image_small': bundle.data['user'].data['image_small'], 'id': bundle.data['user'].data['id'] } bundle.data['user'] = user_data bundle.data['content_type'] = bundle.obj.content_type.model return bundle def hydrate(self,bundle): # preserve data if bundle.request.method == 'PATCH': #preserve original fields fields = ['user', 'published', 'content_type', 'object_id', 'created', 'client_domain'] orig_obj = Comment.objects.get(pk=int(bundle.data['id'])) for f in fields: if f in request.data: request.data[f] = getattr(orig_obj, f) elif bundle.request.method == 'POST': # enforce post user bundle.obj.user = bundle.request.user bundle.data['user'] = bundle.request.user.id # convert model name into model bundle.obj.content_type = ContentType.objects.get(model=bundle.data['content_type']) bundle.obj.client_domain = get_domain_from_url(bundle.request.META.get('HTTP_ORIGIN', '')) del bundle.data['content_type'] return bundle def apply_sorting(self, obj_list, options=None): if options is None: options = {} else: options = options.copy() if not 'order_by' in options: options['order_by'] = 'created' return super(CommentResource, self).apply_sorting(obj_list, options) class Meta: queryset = Comment.objects.all() resource_name = 'comment' allowed_methods = ['get', 'post', 'patch', 'delete'] serializer = UTCSerializer(formats=['json']) filtering={ 'id' : ['exact'], 'user': ALL_WITH_RELATIONS, 'content_type': ALL_WITH_RELATIONS, 'object_id': ['exact'] } authentication = ApiKeyPlusWebAuthentication() authorization = DateaBaseAuthorization() limit = 50 excludes = ['client_domain'] ordering=['created'] #cache = SimpleCache(timeout=5) throttle = CacheThrottle(throttle_at=500) always_return_data = True include_resource_uri = False <|fim▁hole|><|fim▁end|>
def get_comment_resource_class(): return CommentResource
<|file_name|>oemedical_medicament_category.py<|end_file_name|><|fim▁begin|>from openerp import models,fields class OeMedicalMedicamentCategory(models.Model): _name = 'oemedical.medicament.category' <|fim▁hole|> childs = fields.One2many('oemedical.medicament.category', 'parent_id', string='Children', ) name = fields.Char(size=256, string='Name', required=True) parent_id = fields.Many2one('oemedical.medicament.category', string='Parent', select=True) _constraints = [ (models.Model._check_recursion, 'Error ! You cannot create recursive \n' 'Category.', ['parent_id']) ]<|fim▁end|>
<|file_name|>heap.rs<|end_file_name|><|fim▁begin|>// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![unstable(feature = "heap_api", reason = "the precise API and guarantees it provides may be tweaked \ slightly, especially to possibly take into account the \ types being stored to make room for a future \ tracing garbage collector", issue = "27700")] use core::{isize, usize}; #[allow(improper_ctypes)] extern { #[allocator] fn __rust_allocate(size: usize, align: usize) -> *mut u8; fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize); fn __rust_reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8; fn __rust_reallocate_inplace(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> usize; fn __rust_usable_size(size: usize, align: usize) -> usize; } #[inline(always)] fn check_size_and_alignment(size: usize, align: usize) { debug_assert!(size != 0); debug_assert!(size <= isize::MAX as usize, "Tried to allocate too much: {} bytes", size); debug_assert!(usize::is_power_of_two(align), "Invalid alignment of allocation: {}", align); } // FIXME: #13996: mark the `allocate` and `reallocate` return value as `noalias` /// Return a pointer to `size` bytes of memory aligned to `align`. /// /// On failure, return a null pointer. /// /// Behavior is undefined if the requested size is 0 or the alignment is not a /// power of 2. The alignment must be no larger than the largest supported page /// size on the platform. #[inline] pub unsafe fn allocate(size: usize, align: usize) -> *mut u8 { check_size_and_alignment(size, align); __rust_allocate(size, align) } /// Resize the allocation referenced by `ptr` to `size` bytes. /// /// On failure, return a null pointer and leave the original allocation intact. /// /// If the allocation was relocated, the memory at the passed-in pointer is /// undefined after the call. /// /// Behavior is undefined if the requested size is 0 or the alignment is not a /// power of 2. The alignment must be no larger than the largest supported page /// size on the platform. /// /// The `old_size` and `align` parameters are the parameters that were used to /// create the allocation referenced by `ptr`. The `old_size` parameter may be /// any value in range_inclusive(requested_size, usable_size). #[inline] pub unsafe fn reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8 { check_size_and_alignment(size, align); __rust_reallocate(ptr, old_size, size, align) } /// Resize the allocation referenced by `ptr` to `size` bytes. /// /// If the operation succeeds, it returns `usable_size(size, align)` and if it /// fails (or is a no-op) it returns `usable_size(old_size, align)`. /// /// Behavior is undefined if the requested size is 0 or the alignment is not a /// power of 2. The alignment must be no larger than the largest supported page /// size on the platform. /// /// The `old_size` and `align` parameters are the parameters that were used to /// create the allocation referenced by `ptr`. The `old_size` parameter may be /// any value in range_inclusive(requested_size, usable_size). #[inline] pub unsafe fn reallocate_inplace(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> usize { check_size_and_alignment(size, align); __rust_reallocate_inplace(ptr, old_size, size, align) } /// Deallocates the memory referenced by `ptr`. /// /// The `ptr` parameter must not be null. /// /// The `old_size` and `align` parameters are the parameters that were used to /// create the allocation referenced by `ptr`. The `old_size` parameter may be /// any value in range_inclusive(requested_size, usable_size). #[inline] pub unsafe fn deallocate(ptr: *mut u8, old_size: usize, align: usize) { __rust_deallocate(ptr, old_size, align) } /// Returns the usable size of an allocation created with the specified the /// `size` and `align`. #[inline] pub fn usable_size(size: usize, align: usize) -> usize { unsafe { __rust_usable_size(size, align) } } /// An arbitrary non-null address to represent zero-size allocations. /// /// This preserves the non-null invariant for types like `Box<T>`. The address /// may overlap with non-zero-size memory allocations. pub const EMPTY: *mut () = 0x1 as *mut (); /// The allocator for unique pointers. #[cfg(not(test))] #[lang = "exchange_malloc"] #[inline] unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { if size == 0 { EMPTY as *mut u8 } else { let ptr = allocate(size, align); if ptr.is_null() { ::oom() } ptr } } #[cfg(not(test))] #[lang = "exchange_free"] #[inline] unsafe fn exchange_free(ptr: *mut u8, old_size: usize, align: usize) { deallocate(ptr, old_size, align); } #[cfg(test)] mod tests { extern crate test; use self::test::Bencher; use boxed::Box; use heap; <|fim▁hole|> #[test] fn basic_reallocate_inplace_noop() { unsafe { let size = 4000; let ptr = heap::allocate(size, 8); if ptr.is_null() { ::oom() } let ret = heap::reallocate_inplace(ptr, size, size, 8); heap::deallocate(ptr, size, 8); assert_eq!(ret, heap::usable_size(size, 8)); } } #[bench] fn alloc_owned_small(b: &mut Bencher) { b.iter(|| { let _: Box<_> = box 10; }) } }<|fim▁end|>
<|file_name|>compress.rs<|end_file_name|><|fim▁begin|>extern crate env_logger; extern crate handlebars_iron as hbs; extern crate iron; extern crate router; extern crate serde; extern crate serde_json; #[macro_use] extern crate serde_derive; #[macro_use] extern crate maplit; extern crate flate2; use hbs::handlebars::{Context, Handlebars, Helper, Output, RenderContext, RenderError}; use hbs::{DirectorySource, HandlebarsEngine, MemorySource, Template}; use iron::headers::{ContentEncoding, Encoding}; use iron::prelude::*; use iron::{status, AfterMiddleware}; use router::Router; use flate2::write::GzEncoder; use flate2::Compression; mod data { use hbs::handlebars::to_json; use serde_json::value::{Map, Value}; #[derive(Serialize, Debug)] pub struct Team {<|fim▁hole|> } pub fn make_data() -> Map<String, Value> { let mut data = Map::new(); data.insert("year".to_string(), to_json(&"2015".to_owned())); let teams = vec![ Team { name: "Jiangsu Sainty".to_string(), pts: 43u16, }, Team { name: "Beijing Guoan".to_string(), pts: 27u16, }, Team { name: "Guangzhou Evergrand".to_string(), pts: 22u16, }, Team { name: "Shandong Luneng".to_string(), pts: 12u16, }, ]; data.insert("teams".to_string(), to_json(&teams)); data.insert("engine".to_string(), to_json(&"serde_json".to_owned())); data } } use data::*; /// the handlers fn index(_: &mut Request) -> IronResult<Response> { let mut resp = Response::new(); let data = make_data(); resp.set_mut(Template::new("some/path/hello", data)) .set_mut(status::Ok); Ok(resp) } fn memory(_: &mut Request) -> IronResult<Response> { let mut resp = Response::new(); let data = make_data(); resp.set_mut(Template::new("memory", data)) .set_mut(status::Ok); Ok(resp) } fn temp(_: &mut Request) -> IronResult<Response> { let mut resp = Response::new(); let data = make_data(); resp.set_mut(Template::with( include_str!("templates/some/path/hello.hbs"), data, )) .set_mut(status::Ok); Ok(resp) } fn plain(_: &mut Request) -> IronResult<Response> { Ok(Response::with((status::Ok, "It works"))) } // an example compression middleware pub struct GzMiddleware; impl AfterMiddleware for GzMiddleware { fn after(&self, _: &mut Request, mut resp: Response) -> IronResult<Response> { let compressed_bytes = resp.body.as_mut().map(|b| { let mut encoder = GzEncoder::new(Vec::new(), Compression::Best); { let _ = b.write_body(&mut encoder); } encoder.finish().unwrap() }); if let Some(b) = compressed_bytes { resp.headers.set(ContentEncoding(vec![Encoding::Gzip])); resp.set_mut(b); } Ok(resp) } } fn main() { env_logger::init().unwrap(); let mut hbse = HandlebarsEngine::new(); // add a directory source, all files with .hbs suffix will be loaded as template hbse.add(Box::new(DirectorySource::new( "./examples/templates/", ".hbs", ))); let mem_templates = btreemap! { "memory".to_owned() => include_str!("templates/some/path/hello.hbs").to_owned() }; // add a memory based source hbse.add(Box::new(MemorySource(mem_templates))); // load templates from all registered sources if let Err(r) = hbse.reload() { panic!("{}", r); } hbse.handlebars_mut().register_helper( "some_helper", Box::new( |_: &Helper, _: &Handlebars, _: &Context, _: &mut RenderContext, _: &mut dyn Output| -> Result<(), RenderError> { Ok(()) }, ), ); let mut router = Router::new(); router .get("/", index, "index") .get("/mem", memory, "memory") .get("/temp", temp, "temp") .get("/plain", plain, "plain"); let mut chain = Chain::new(router); chain.link_after(hbse); chain.link_after(GzMiddleware); println!("Server running at http://localhost:3000/"); Iron::new(chain).http("localhost:3000").unwrap(); }<|fim▁end|>
name: String, pts: u16,
<|file_name|>ADTFPinMessageEncoder.cpp<|end_file_name|><|fim▁begin|>#include "ADTFPinMessageEncoder.h" using namespace A2O; ADTFPinMessageEncoder::ADTFPinMessageEncoder(IAction::Ptr action, ICarMetaModel::ConstPtr carMetaModel) : _action(action) { // Create output pins const std::vector<IServoDriveConfig::ConstPtr>& servoDriveConfigs = carMetaModel->getServoDriveConfigs(); for (unsigned int i = 0; i < servoDriveConfigs.size(); i++) { _pins.push_back(OutputPin(servoDriveConfigs[i]->getEffectorName(), "tSignalValue")); } std::vector<IActuatorConfig::ConstPtr> actuatorConfigs = carMetaModel->getMotorConfigs(); for (unsigned int i = 0; i < actuatorConfigs.size(); i++) { _pins.push_back(OutputPin(actuatorConfigs[i]->getEffectorName(), "tSignalValue")); } actuatorConfigs = carMetaModel->getLightConfigs(); for (unsigned int i = 0; i < actuatorConfigs.size(); i++) { _pins.push_back(OutputPin(actuatorConfigs[i]->getEffectorName(), "tBoolSignalValue")); } actuatorConfigs = carMetaModel->getManeuverStatusConfigs(); for (unsigned int i = 0; i < actuatorConfigs.size(); i++) { _pins.push_back(OutputPin(actuatorConfigs[i]->getEffectorName(), "tDriverStruct")); } } ADTFPinMessageEncoder::~ADTFPinMessageEncoder() {<|fim▁hole|> } int ADTFPinMessageEncoder::indexOfPin(OutputPin pin) const { for (unsigned int i = 0; i < _pins.size(); i++) { if (_pins[i] == pin) { return i; } } return -1; } const std::vector<OutputPin>& ADTFPinMessageEncoder::getOutputPins() { return _pins; } bool ADTFPinMessageEncoder::encode(const OutputPin& pin, adtf::IMediaTypeDescription* mediaTypeDescription, adtf::IMediaSample* mediaSample) { int pinIndex = indexOfPin(pin); bool toTransmit = false; if (pinIndex >= 0) { cObjectPtr<adtf::IMediaSerializer> serializer; mediaTypeDescription->GetMediaSampleSerializer(&serializer); tInt size = serializer->GetDeserializedSize(); mediaSample->AllocBuffer(size); cObjectPtr<adtf::IMediaCoder> mediaCoder; tUInt32 timestamp = 0; if (pin.signalType == "tBoolSignalValue") { IBoolValueEffector::Ptr boolEffector = _action->getLightEffector(pin.name); if (boolEffector) { __adtf_sample_write_lock_mediadescription(mediaTypeDescription, mediaSample, mediaCoder); if(!mediaCoder) { return false; } tBool value = boolEffector->getValue(); mediaCoder->Set("bValue", (tVoid*)&value); mediaCoder->Set("ui32ArduinoTimestamp", (tVoid*)&timestamp); toTransmit = true; } } else if (pin.signalType == "tSignalValue") { IDoubleValueEffector::Ptr valueEffector = boost::dynamic_pointer_cast<IDoubleValueEffector>(_action->getEffector(pin.name)); if (valueEffector) { __adtf_sample_write_lock_mediadescription(mediaTypeDescription, mediaSample, mediaCoder); if(!mediaCoder) { return false; } tFloat32 value = valueEffector->getValue(); mediaCoder->Set("f32Value", (tVoid*)&value); mediaCoder->Set("ui32ArduinoTimestamp", (tVoid*)&timestamp); toTransmit = true; } } else if (pin.signalType == "tDriverStruct") { IManeuverStatusEffector::Ptr valueEffector = boost::dynamic_pointer_cast<IManeuverStatusEffector>(_action->getEffector(pin.name)); if (valueEffector) { __adtf_sample_write_lock_mediadescription(mediaTypeDescription, mediaSample, mediaCoder); if(!mediaCoder) { return false; } int state = valueEffector->getStatus(); int maneuverId = valueEffector->getManeuverId(); mediaCoder->Set("i8StateID", (tVoid*)&state); mediaCoder->Set("i16ManeuverEntry", (tVoid*)&maneuverId); toTransmit = true; } } } return toTransmit; }<|fim▁end|>
<|file_name|>StatsDReporter.java<|end_file_name|><|fim▁begin|>/** * Copyright (C) 2013 metrics-statsd contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.readytalk.metrics; import com.codahale.metrics.Counter; import com.codahale.metrics.Gauge; import com.codahale.metrics.Histogram; import com.codahale.metrics.Meter; import com.codahale.metrics.Metered; import com.codahale.metrics.MetricFilter; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.ScheduledReporter; import com.codahale.metrics.Snapshot; import com.codahale.metrics.Timer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nullable; import javax.annotation.concurrent.NotThreadSafe; import java.io.IOException; import java.math.BigDecimal; import java.math.BigInteger; import java.util.Locale; import java.util.Map; import java.util.SortedMap; import java.util.concurrent.TimeUnit; /** * A reporter which publishes metric values to a StatsD server. * * @see <a href="https://github.com/etsy/statsd">StatsD</a> */ @NotThreadSafe public class StatsDReporter extends ScheduledReporter { private static final Logger LOG = LoggerFactory.getLogger(StatsDReporter.class); private final StatsD statsD; private final String prefix; private final String suffix; private StatsDReporter(final MetricRegistry registry, final StatsD statsD, final String prefix, final String suffix, final TimeUnit rateUnit, final TimeUnit durationUnit, final MetricFilter filter) { super(registry, "statsd-reporter", filter, rateUnit, durationUnit); this.statsD = statsD; this.prefix = prefix; this.suffix = suffix; } /** * Returns a new {@link Builder} for {@link StatsDReporter}. * * @param registry the registry to report * @return a {@link Builder} instance for a {@link StatsDReporter} */ public static Builder forRegistry(final MetricRegistry registry) { return new Builder(registry); } /** * A builder for {@link StatsDReporter} instances. Defaults to not using a prefix, * converting rates to events/second, converting durations to milliseconds, and not * filtering metrics. */ @NotThreadSafe public static final class Builder { private final MetricRegistry registry; private String prefix; private String suffix; private TimeUnit rateUnit; private TimeUnit durationUnit; private MetricFilter filter; private Builder(final MetricRegistry registry) { this.registry = registry; this.prefix = null; this.rateUnit = TimeUnit.SECONDS; this.durationUnit = TimeUnit.MILLISECONDS; this.filter = MetricFilter.ALL; } /** * Prefix all metric names with the given string. * * @param _prefix the prefix for all metric names * @return {@code this} */ public Builder prefixedWith(@Nullable final String _prefix) { this.prefix = _prefix; return this; } /** * Prefix all metric names with the given string. * * @param _suffix the prefix for all metric names * @return {@code this} */ public Builder suffixedWith(@Nullable final String _suffix) { this.suffix = _suffix; return this; } /** * Convert rates to the given time unit. * * @param _rateUnit a unit of time * @return {@code this} */ public Builder convertRatesTo(final TimeUnit _rateUnit) { this.rateUnit = _rateUnit; return this; } /** * Convert durations to the given time unit. * * @param _durationUnit a unit of time * @return {@code this} */ public Builder convertDurationsTo(final TimeUnit _durationUnit) { this.durationUnit = _durationUnit; return this; } /** * Only report metrics which match the given filter. * * @param _filter a {@link MetricFilter} * @return {@code this} */ public Builder filter(final MetricFilter _filter) { this.filter = _filter; return this; } /** * Builds a {@link StatsDReporter} with the given properties, sending metrics to StatsD at the given host and port. * * @param host the hostname of the StatsD server. * @param port the port of the StatsD server. This is typically 8125. * @return a {@link StatsDReporter} */ public StatsDReporter build(final String host, final int port) { return build(new StatsD(host, port)); } /** * Builds a {@link StatsDReporter} with the given properties, sending metrics using the * given {@link StatsD} client. * * @param statsD a {@link StatsD} client * @return a {@link StatsDReporter} */ public StatsDReporter build(final StatsD statsD) { return new StatsDReporter(registry, statsD, prefix, suffix, rateUnit, durationUnit, filter); } } @Override @SuppressWarnings("rawtypes") //Metrics 3.0 interface specifies the raw Gauge type<|fim▁hole|> public void report(final SortedMap<String, Gauge> gauges, final SortedMap<String, Counter> counters, final SortedMap<String, Histogram> histograms, final SortedMap<String, Meter> meters, final SortedMap<String, Timer> timers) { try { statsD.connect(); for (Map.Entry<String, Gauge> entry : gauges.entrySet()) { reportGauge(entry.getKey(), entry.getValue()); } for (Map.Entry<String, Counter> entry : counters.entrySet()) { reportCounter(entry.getKey(), entry.getValue()); } for (Map.Entry<String, Histogram> entry : histograms.entrySet()) { reportHistogram(entry.getKey(), entry.getValue()); } for (Map.Entry<String, Meter> entry : meters.entrySet()) { reportMetered(entry.getKey(), entry.getValue()); } for (Map.Entry<String, Timer> entry : timers.entrySet()) { reportTimer(entry.getKey(), entry.getValue()); } } catch (IOException e) { LOG.warn("Unable to report to StatsD", statsD, e); } finally { try { statsD.close(); } catch (IOException e) { LOG.debug("Error disconnecting from StatsD", statsD, e); } } } private void reportTimer(final String name, final Timer timer) { final Snapshot snapshot = timer.getSnapshot(); String suffixedName = suffix(name); statsD.send(prefix(suffixedName, "max"), formatNumber(convertDuration(snapshot.getMax()))); statsD.send(prefix(suffixedName, "mean"), formatNumber(convertDuration(snapshot.getMean()))); statsD.send(prefix(suffixedName, "min"), formatNumber(convertDuration(snapshot.getMin()))); statsD.send(prefix(suffixedName, "stddev"), formatNumber(convertDuration(snapshot.getStdDev()))); statsD.send(prefix(suffixedName, "p50"), formatNumber(convertDuration(snapshot.getMedian()))); statsD.send(prefix(suffixedName, "p75"), formatNumber(convertDuration(snapshot.get75thPercentile()))); statsD.send(prefix(suffixedName, "p95"), formatNumber(convertDuration(snapshot.get95thPercentile()))); statsD.send(prefix(suffixedName, "p98"), formatNumber(convertDuration(snapshot.get98thPercentile()))); statsD.send(prefix(suffixedName, "p99"), formatNumber(convertDuration(snapshot.get99thPercentile()))); statsD.send(prefix(suffixedName, "p999"), formatNumber(convertDuration(snapshot.get999thPercentile()))); reportMetered(name, timer); } private void reportMetered(final String name, final Metered meter) { String suffixedName = suffix(name); statsD.send(prefix(suffixedName, "samples"), formatNumber(meter.getCount())); statsD.send(prefix(suffixedName, "m1_rate"), formatNumber(convertRate(meter.getOneMinuteRate()))); statsD.send(prefix(suffixedName, "m5_rate"), formatNumber(convertRate(meter.getFiveMinuteRate()))); statsD.send(prefix(suffixedName, "m15_rate"), formatNumber(convertRate(meter.getFifteenMinuteRate()))); statsD.send(prefix(suffixedName, "mean_rate"), formatNumber(convertRate(meter.getMeanRate()))); } private void reportHistogram(final String name, final Histogram histogram) { final Snapshot snapshot = histogram.getSnapshot(); String suffixedName = suffix(name); statsD.send(prefix(suffixedName, "samples"), formatNumber(histogram.getCount())); statsD.send(prefix(suffixedName, "max"), formatNumber(snapshot.getMax())); statsD.send(prefix(suffixedName, "mean"), formatNumber(snapshot.getMean())); statsD.send(prefix(suffixedName, "min"), formatNumber(snapshot.getMin())); statsD.send(prefix(suffixedName, "stddev"), formatNumber(snapshot.getStdDev())); statsD.send(prefix(suffixedName, "p50"), formatNumber(snapshot.getMedian())); statsD.send(prefix(suffixedName, "p75"), formatNumber(snapshot.get75thPercentile())); statsD.send(prefix(suffixedName, "p95"), formatNumber(snapshot.get95thPercentile())); statsD.send(prefix(suffixedName, "p98"), formatNumber(snapshot.get98thPercentile())); statsD.send(prefix(suffixedName, "p99"), formatNumber(snapshot.get99thPercentile())); statsD.send(prefix(suffixedName, "p999"), formatNumber(snapshot.get999thPercentile())); } private void reportCounter(final String name, final Counter counter) { statsD.send(prefix(suffix(name)), formatNumber(counter.getCount())); } @SuppressWarnings("rawtypes") //Metrics 3.0 passes us the raw Gauge type private void reportGauge(final String name, final Gauge gauge) { final String value = format(gauge.getValue()); if (value != null) { statsD.send(prefix(suffix(name)), value); } } @Nullable private String format(final Object o) { if (o instanceof Float) { return formatNumber(((Float) o).doubleValue()); } else if (o instanceof Double) { return formatNumber((Double) o); } else if (o instanceof Byte) { return formatNumber(((Byte) o).longValue()); } else if (o instanceof Short) { return formatNumber(((Short) o).longValue()); } else if (o instanceof Integer) { return formatNumber(((Integer) o).longValue()); } else if (o instanceof Long) { return formatNumber((Long) o); } else if (o instanceof BigInteger) { return formatNumber((BigInteger) o); } else if (o instanceof BigDecimal) { return formatNumber(((BigDecimal) o).doubleValue()); } return null; } private String prefix(final String... components) { return MetricRegistry.name(prefix, components); } private String suffix(String name) { if (suffix == null || suffix.isEmpty() || !name.contains("%s")) { return name; } return String.format(name, suffix); } private String formatNumber(final BigInteger n) { return String.valueOf(n); } private String formatNumber(final long n) { return Long.toString(n); } private String formatNumber(final double v) { return String.format(Locale.US, "%2.2f", v); } }<|fim▁end|>
<|file_name|>en-GB.js<|end_file_name|><|fim▁begin|>module.exports = { site: { title: 'i18n node example', description: 'An example for this module on node' }, bankBalance: 'Hi {1}, your balance is {2}.', transports: { yacht: 'Yacht', bike: 'Bike'<|fim▁hole|><|fim▁end|>
}, modeOfTransport: 'Your preferred mode of transport is by {1}.' };
<|file_name|>example.go<|end_file_name|><|fim▁begin|>package isbn import ( "errors" "math" "strconv" "unicode" ) func IsValidISBN(isbn string) bool { isbn = dropHyphen(isbn) ary, err := strToSlice(isbn) if len(ary) != 10 || err != nil { return false } return calcCheckDigit(ary) } func dropHyphen(isbn string) string { var result string for _, char := range isbn { if char == '-' { continue } result += string(char) } return result } func strToSlice(isbn string) (result []int, err error) { for pos, char := range isbn { if unicode.IsLetter(char) && (char != 'X' || pos != 9) { err = errors.New("invalid character") return } else if char == 'X' {<|fim▁hole|> result = append(result, 10) } else { i, _ := strconv.Atoi(string(char)) result = append(result, i) } } return } func calcCheckDigit(isbn []int) bool { var pool int for idx, value := range isbn { pool += int(math.Abs(float64(idx)-10)) * value } result := pool % 11 return result == 0 }<|fim▁end|>
<|file_name|>database.js<|end_file_name|><|fim▁begin|>// config/database.js module.exports = { 'secret': 'puneetvashisht', 'url' : 'mongodb://localhost/userdb' // looks like mongodb://<user>:<pass>@mongo.onmodulus.net:27017/Mikha4ot <|fim▁hole|><|fim▁end|>
};
<|file_name|>ASTStringNode.java<|end_file_name|><|fim▁begin|>/** Copyright 2010 Christian Kästner This file is part of CIDE. CIDE is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 3 of the License. CIDE is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with CIDE. If not, see <http://www.gnu.org/licenses/>. See http://www.fosd.de/cide/ for further information. */ package cide.gast; import java.util.ArrayList; public class ASTStringNode extends ASTNode { private String value; <|fim▁hole|> } public String getValue() { return value; } public String toString() { return value; } @Override public IASTNode deepCopy() { return new ASTStringNode(new String(value), firstToken); } @Override public String render() { return getValue(); } }<|fim▁end|>
public ASTStringNode(String value, IToken token) { super(new ArrayList<Property>(), token, token); this.value = value;
<|file_name|>IrcMessageListener.java<|end_file_name|><|fim▁begin|>package net.wayward_realms.waywardchat.irc; import net.wayward_realms.waywardchat.WaywardChat; import org.pircbotx.Channel; import org.pircbotx.PircBotX; import org.pircbotx.User; import org.pircbotx.hooks.ListenerAdapter; import org.pircbotx.hooks.events.MessageEvent; public class IrcMessageListener extends ListenerAdapter<PircBotX> {<|fim▁hole|> private WaywardChat plugin; public IrcMessageListener(WaywardChat plugin) { this.plugin = plugin; } @Override public void onMessage(MessageEvent<PircBotX> event) { final User user = event.getUser(); final Channel channel = event.getChannel(); final String message = event.getMessage(); if (user != plugin.getIrcBot().getUserBot()) { if (!message.startsWith("!")) { plugin.handleChat(user, channel, message); } } } }<|fim▁end|>
<|file_name|>add-milestone.js<|end_file_name|><|fim▁begin|>// Get all of our fake login data //var login = require('../login.json'); exports.view = function(req, res){ <|fim▁hole|> res.render('add-milestone', {'time' : req.cookies.startTime, 'goalname': goalname}); }; exports.timePost = function(req,res){ var startTime = req.params.startTime; res.cookie('time', startTime, { maxAge: 900000 }); }<|fim▁end|>
var goalname =req.params.goalname;
<|file_name|>db.go<|end_file_name|><|fim▁begin|>package config<|fim▁hole|>var ()<|fim▁end|>
<|file_name|>step21.min.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1 oid sha256:bf2580cc3dbb5c69564e5338a736b949ba7f1c7d567f37e58589d9f573c7abbb<|fim▁hole|>size 481<|fim▁end|>
<|file_name|>data_types.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from meerkat_abacus.config import config def data_types(param_config=config): with open(param_config.config_directory + param_config.country_config["types_file"], "r", encoding='utf-8', errors="replace") as f: DATA_TYPES_DICT = [_dict for _dict in csv.DictReader(f)] return DATA_TYPES_DICT def data_types_for_form_name(form_name, param_config=config): return [data_type for data_type in data_types(param_config=param_config) if form_name == data_type['form']] DATA_TYPES_DICT = data_types()<|fim▁end|>
import csv
<|file_name|>random_ext.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # ============================================================================= # @file ostap/math/random_ext.py # The simple extention for the standard python module random # @author Vanya BELYAEV # @date 2012-04-28<|fim▁hole|>- poisson (missing in python random module) """ # ============================================================================= __author__ = "Vanya BELYAEV [email protected]" __version__ = "$Revision$" __date__ = "2012-04-28" # ============================================================================= __all__ = ( 'bifur' , ## bifurcated gaussian 've_gauss' , ## gaussian using ValueWithError construction 'poisson' , ## poisson (missing in python random module) ) # ============================================================================= import sys from builtins import range # ============================================================================= from ostap.logger.logger import getLogger # ============================================================================= if '__main__' == __name__ : logger = getLogger ( 'ostap.math.random_ext') else : logger = getLogger ( __name__ ) # ============================================================================= ## generate bifurcated gaussian # @code # value = bifur ( 0 , -1 , +2 ) # @endcode def _bifur_ ( self , mu , sigma1 , sigma2 ) : """Generate the bifurcated gaussian >>> value = bifur ( 0 , -1 , +2 ) """ if sigma1 * sigma2 > 0.0 : raise ValueError( 'Lower and upper errors must have opposite signs' ) _as1 = abs ( float ( sigma1 ) ) _as2 = abs ( float ( sigma2 ) ) _frac = _as1 / ( _as1 + _as2 ) _aux = self.random () _gau = abs ( self.gauss ( 0 , 1 ) ) if _aux <= _frac : return mu + sigma1 * _gau else : return mu + sigma2 * _gau # ============================================================================== _fmin = 1000 * sys.float_info.min # ============================================================================= ## generate Cauchy random numbers # - rely on the distribution of the ratio for two Gaussian variables # @see https://en.wikipedia.org/wiki/Cauchy_distribution def _cauchy_ ( self , mu , gamma ) : """Generate Cauchy random numbers - rely on the distribution of the ratio for two Gaussian variables - see https://en.wikipedia.org/wiki/Cauchy_distribution """ g1 = self.gauss ( 0.0 , 1.0 ) while abs ( g1 ) < _fmin : g1 = self.gauss ( 0.0 , 1.0 ) g2 = self.gauss ( 0.0 , 1.0 ) return 1.0 * mu + ( 1.0 * g2 / g1 ) * gamma # ============================================================================= ## generate bifurcated gaussian using Value # @see Ostap::Math::ValueWithError def _ve_gauss_ ( self , val ) : """Generate the gaussian according to Ostap.Math.ValueWithError >>> ve = VE ( 1 , 2 ) >>> value = ve_gauss ( ve ) """ mean = val.value () sigma = val.error () return self.gauss ( mean , sigma ) # ============================================================================= _poisson = None if not _poisson : try : from numpy.random import poisson as _poisson def _poisson_ ( self , mu ) : return _poisson ( mu ) logger.debug ('use numpy.random.poisson') except ImportError : pass if not _poisson : try : from scipy.random import poisson as _poisson def _poisson_ ( self , mu ) : return _poisson ( mu ) logger.debug ('use scipy.random.poisson') except ImportError : pass if not _poisson : logger.dbug ('Use home-made replacement for poisson') _MAX = 30.0 import math _sqrt = math.sqrt _exp = math.exp import ROOT,cppyy _round = cppyy.gbl.Ostap.Math.round ## hand-made replacement for poisson random number generator def _poisson_ ( self , mu ) : mu = float ( mu ) if _MAX <= mu : r = -1 while r < 0 : r = self.gauss ( mu , _sqrt( mu ) ) return max ( _round ( r ) , 0 ) x = 0 p = _exp ( -mu ) s = p u = self.uniform ( 0 , 1 ) while s < u : x += 1 p *= mu / x s += p return x import random if not hasattr ( random.Random , 'bifur' ) : random.Random.bifur = _bifur_ if not hasattr ( random , 'bifur' ) : random.bifur = random._inst.bifur if not hasattr ( random.Random , 've_gauss' ) : random.Random.ve_gauss = _ve_gauss_ if not hasattr ( random , 've_gauss' ) : random.ve_gauss = random._inst.ve_gauss if not hasattr ( random.Random , 'poisson' ) : random.Random.poisson = _poisson_ if not hasattr ( random , 'poisson' ) : random.poisson = random._inst.poisson if not hasattr ( random.Random , 'cauchy' ) : random.Random.cauchy = _cauchy_ if not hasattr ( random , 'cauchy' ) : random.cauchy = random._inst.cauchy bifur = random.bifur ve_gauss = random.ve_gauss poisson = random.poisson cauchy = random.cauchy # ============================================================================= if '__main__' == __name__ : from ostap.utils.docme import docme docme ( __name__ , logger = logger ) from ostap.stats.counters import SE cnt = SE() mu = 0.4 for i in range(10000) : cnt += poisson(0.4) logger.info ( 'Poisson(mu=%s) : %s' % ( mu , cnt ) ) logger.info ( 80*'*' ) # ============================================================================= # The END # =============================================================================<|fim▁end|>
# ============================================================================= """ The simple extension for the standard python module random - bifurcated gaussian - gaussian using Ostap.Math.ValueWithError as argument