prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>app.module.ts<|end_file_name|><|fim▁begin|>import { BrowserModule } from '@angular/platform-browser'; import { BrowserAnimationsModule } from '@angular/platform-browser/animations'; import { NgModule } from '@angular/core'; import { HttpClientModule } from '@angular/common/http'; import { AngularFireModule } from 'angularfire2'; import { AngularFireDatabaseModule } from 'angularfire2/database'; import { environment } from '../environments/environment'; import { AppComponent } from './app.component'; import { AppRoutingModule } from './app-routing.module'; import { MaterialImportsModule } from './material-imports/material-imports.module'; import { ApiService } from './services/api.service'; import { HomeComponent } from './components/home/home.component'; import { BarChartComponent } from './components/bar-chart/bar-chart.component'; import { TopListComponent } from './components/top-list/top-list.component'; import { TableComponent } from './components/table/table.component'; import { CommaseparatorPipe } from './pipes/commaseparator.pipe';<|fim▁hole|>import { AboutComponent } from './components/about/about.component'; @NgModule({ declarations: [ AppComponent, HomeComponent, BarChartComponent, TopListComponent, TableComponent, CommaseparatorPipe, ShorthandnumberPipe, LineChartComponent, ToolbarComponent, AboutComponent ], imports: [ BrowserModule, BrowserAnimationsModule, AngularFireModule.initializeApp(environment.firebase), AngularFireDatabaseModule, HttpClientModule, AppRoutingModule, MaterialImportsModule ], providers: [ApiService, DataService], bootstrap: [AppComponent] }) export class AppModule { }<|fim▁end|>
import { ShorthandnumberPipe } from './pipes/shorthandnumber.pipe'; import { LineChartComponent } from './components/line-chart/line-chart.component'; import { DataService } from './services/data.service'; import { ToolbarComponent } from './components/toolbar/toolbar.component';
<|file_name|>tools.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from frappe import _<|fim▁end|>
from __future__ import unicode_literals
<|file_name|>interfaces.go<|end_file_name|><|fim▁begin|>// Copyright 2016 Canonical Ltd. // Licensed under the LGPLv3, see LICENCE file for details. package gomaasapi import "github.com/juju/utils/set" const ( // Capability constants. NetworksManagement = "networks-management" StaticIPAddresses = "static-ipaddresses" IPv6DeploymentUbuntu = "ipv6-deployment-ubuntu" DevicesManagement = "devices-management" StorageDeploymentUbuntu = "storage-deployment-ubuntu" NetworkDeploymentUbuntu = "network-deployment-ubuntu" ) // Controller represents an API connection to a MAAS Controller. Since the API // is restful, there is no long held connection to the API server, but instead // HTTP calls are made and JSON response structures parsed. type Controller interface { // Capabilities returns a set of capabilities as defined by the string // constants. Capabilities() set.Strings BootResources() ([]BootResource, error) // Fabrics returns the list of Fabrics defined in the MAAS controller. Fabrics() ([]Fabric, error) // Spaces returns the list of Spaces defined in the MAAS controller. Spaces() ([]Space, error) // Zones lists all the zones known to the MAAS controller. Zones() ([]Zone, error) // Machines returns a list of machines that match the params. Machines(MachinesArgs) ([]Machine, error) // AllocateMachine will attempt to allocate a machine to the user. // If successful, the allocated machine is returned.<|fim▁hole|> // from the user making them available to be allocated again. ReleaseMachines(ReleaseMachinesArgs) error // Devices returns a list of devices that match the params. Devices(DevicesArgs) ([]Device, error) // CreateDevice creates and returns a new Device. CreateDevice(CreateDeviceArgs) (Device, error) // Files returns all the files that match the specified prefix. Files(prefix string) ([]File, error) // Return a single file by its filename. GetFile(filename string) (File, error) // AddFile adds or replaces the content of the specified filename. // If or when the MAAS api is able to return metadata about a single // file without sending the content of the file, we can return a File // instance here too. AddFile(AddFileArgs) error } // File represents a file stored in the MAAS controller. type File interface { // Filename is the name of the file. No path, just the filename. Filename() string // AnonymousURL is a URL that can be used to retrieve the conents of the // file without credentials. AnonymousURL() string // Delete removes the file from the MAAS controller. Delete() error // ReadAll returns the content of the file. ReadAll() ([]byte, error) } // Fabric represents a set of interconnected VLANs that are capable of mutual // communication. A fabric can be thought of as a logical grouping in which // VLANs can be considered unique. // // For example, a distributed network may have a fabric in London containing // VLAN 100, while a separate fabric in San Francisco may contain a VLAN 100, // whose attached subnets are completely different and unrelated. type Fabric interface { ID() int Name() string ClassType() string VLANs() []VLAN } // VLAN represents an instance of a Virtual LAN. VLANs are a common way to // create logically separate networks using the same physical infrastructure. // // Managed switches can assign VLANs to each port in either a “tagged” or an // “untagged” manner. A VLAN is said to be “untagged” on a particular port when // it is the default VLAN for that port, and requires no special configuration // in order to access. // // “Tagged” VLANs (traditionally used by network administrators in order to // aggregate multiple networks over inter-switch “trunk” lines) can also be used // with nodes in MAAS. That is, if a switch port is configured such that // “tagged” VLAN frames can be sent and received by a MAAS node, that MAAS node // can be configured to automatically bring up VLAN interfaces, so that the // deployed node can make use of them. // // A “Default VLAN” is created for every Fabric, to which every new VLAN-aware // object in the fabric will be associated to by default (unless otherwise // specified). type VLAN interface { ID() int Name() string Fabric() string // VID is the VLAN ID. eth0.10 -> VID = 10. VID() int // MTU (maximum transmission unit) is the largest size packet or frame, // specified in octets (eight-bit bytes), that can be sent. MTU() int DHCP() bool PrimaryRack() string SecondaryRack() string } // Zone represents a physical zone that a Machine is in. The meaning of a // physical zone is up to you: it could identify e.g. a server rack, a network, // or a data centre. Users can then allocate nodes from specific physical zones, // to suit their redundancy or performance requirements. type Zone interface { Name() string Description() string } // BootResource is the bomb... find something to say here. type BootResource interface { ID() int Name() string Type() string Architecture() string SubArchitectures() set.Strings KernelFlavor() string } // Device represents some form of device in MAAS. type Device interface { // TODO: add domain SystemID() string Hostname() string FQDN() string IPAddresses() []string Zone() Zone // Parent returns the SystemID of the Parent. Most often this will be a // Machine. Parent() string // Owner is the username of the user that created the device. Owner() string // InterfaceSet returns all the interfaces for the Device. InterfaceSet() []Interface // CreateInterface will create a physical interface for this machine. CreateInterface(CreateInterfaceArgs) (Interface, error) // Delete will remove this Device. Delete() error } // Machine represents a physical machine. type Machine interface { SystemID() string Hostname() string FQDN() string Tags() []string OperatingSystem() string DistroSeries() string Architecture() string Memory() int CPUCount() int IPAddresses() []string PowerState() string // Devices returns a list of devices that match the params and have // this Machine as the parent. Devices(DevicesArgs) ([]Device, error) // Consider bundling the status values into a single struct. // but need to check for consistent representation if exposed on other // entities. StatusName() string StatusMessage() string // BootInterface returns the interface that was used to boot the Machine. BootInterface() Interface // InterfaceSet returns all the interfaces for the Machine. InterfaceSet() []Interface // Interface returns the interface for the machine that matches the id // specified. If there is no match, nil is returned. Interface(id int) Interface // PhysicalBlockDevices returns all the physical block devices on the machine. PhysicalBlockDevices() []BlockDevice // PhysicalBlockDevice returns the physical block device for the machine // that matches the id specified. If there is no match, nil is returned. PhysicalBlockDevice(id int) BlockDevice // BlockDevices returns all the physical and virtual block devices on the machine. BlockDevices() []BlockDevice Zone() Zone // Start the machine and install the operating system specified in the args. Start(StartArgs) error // CreateDevice creates a new Device with this Machine as the parent. // The device will have one interface that is linked to the specified subnet. CreateDevice(CreateMachineDeviceArgs) (Device, error) } // Space is a name for a collection of Subnets. type Space interface { ID() int Name() string Subnets() []Subnet } // Subnet refers to an IP range on a VLAN. type Subnet interface { ID() int Name() string Space() string VLAN() VLAN Gateway() string CIDR() string // dns_mode // DNSServers is a list of ip addresses of the DNS servers for the subnet. // This list may be empty. DNSServers() []string } // Interface represents a physical or virtual network interface on a Machine. type Interface interface { ID() int Name() string // The parents of an interface are the names of interfaces that must exist // for this interface to exist. For example a parent of "eth0.100" would be // "eth0". Parents may be empty. Parents() []string // The children interfaces are the names of those that are dependent on this // interface existing. Children may be empty. Children() []string Type() string Enabled() bool Tags() []string VLAN() VLAN Links() []Link MACAddress() string EffectiveMTU() int // Params is a JSON field, and defaults to an empty string, but is almost // always a JSON object in practice. Gleefully ignoring it until we need it. // Update the name, mac address or VLAN. Update(UpdateInterfaceArgs) error // Delete this interface. Delete() error // LinkSubnet will attempt to make this interface available on the specified // Subnet. LinkSubnet(LinkSubnetArgs) error // UnlinkSubnet will remove the Link to the subnet, and release the IP // address associated if there is one. UnlinkSubnet(Subnet) error } // Link represents a network link between an Interface and a Subnet. type Link interface { ID() int Mode() string Subnet() Subnet // IPAddress returns the address if one has been assigned. // If unavailble, the address will be empty. IPAddress() string } // FileSystem represents a formatted filesystem mounted at a location. type FileSystem interface { // Type is the format type, e.g. "ext4". Type() string MountPoint() string Label() string UUID() string } // Partition represents a partition of a block device. It may be mounted // as a filesystem. type Partition interface { ID() int Path() string // FileSystem may be nil if not mounted. FileSystem() FileSystem UUID() string // UsedFor is a human readable string. UsedFor() string // Size is the number of bytes in the partition. Size() uint64 } // BlockDevice represents an entire block device on the machine. type BlockDevice interface { ID() int Name() string Model() string Path() string UsedFor() string Tags() []string BlockSize() uint64 UsedSize() uint64 Size() uint64 Partitions() []Partition // There are some other attributes for block devices, but we can // expose them on an as needed basis. }<|fim▁end|>
AllocateMachine(AllocateMachineArgs) (Machine, ConstraintMatches, error) // ReleaseMachines will stop the specified machines, and release them
<|file_name|>UserPrincipal.java<|end_file_name|><|fim▁begin|>/* * Copyright 2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package leap.core.security; import java.util.Collections; import java.util.Map; public interface UserPrincipal extends Principal { /** * Returns the user's display name. */ String getName(); /** * Returns the user's login name. */ String getLoginName(); <|fim▁hole|> * Returns the details property. */ default Map<String, Object> getProperties() { return Collections.emptyMap(); } }<|fim▁end|>
/**
<|file_name|>Rgaa32017Rule100901Test.java<|end_file_name|><|fim▁begin|>///* // * Tanaguru - Automated webpage assessment // * Copyright (C) 2008-2017 Tanaguru.org // * // * This program is free software: you can redistribute it and/or modify // * it under the terms of the GNU Affero General Public License as // * published by the Free Software Foundation, either version 3 of the // * License, or (at your option) any later version. // * // * This program is distributed in the hope that it will be useful, // * but WITHOUT ANY WARRANTY; without even the implied warranty of // * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // * GNU Affero General Public License for more details. // * // * You should have received a copy of the GNU Affero General Public License // * along with this program. If not, see <http://www.gnu.org/licenses/>. // * // * Contact us by mail: tanaguru AT tanaguru DOT org // */ //package org.tanaguru.rules.rgaa32017; // //import org.apache.commons.lang3.tuple.ImmutablePair; //import org.tanaguru.entity.audit.ProcessResult;<|fim▁hole|>//import org.tanaguru.entity.audit.TestSolution; //import org.tanaguru.rules.keystore.HtmlElementStore; //import org.tanaguru.rules.keystore.RemarkMessageStore; //import org.tanaguru.rules.rgaa32017.test.Rgaa32017RuleImplementationTestCase; // ///** // * Unit test class for the implementation of the rule 10-9-1 of the referential Rgaa 3-2017. // * // * @author // */ //public class Rgaa32017Rule100901Test extends Rgaa32017RuleImplementationTestCase { // // /** // * Default constructor // * @param testName // */ // public Rgaa32017Rule100901Test (String testName){ // super(testName); // } // // @Override // protected void setUpRuleImplementationClassName() { // setRuleImplementationClassName( // "org.tanaguru.rules.rgaa32017.Rgaa32017Rule100901"); // } // // @Override // protected void setUpWebResourceMap() { // addWebResource("Rgaa32017.Test.10.9.1-1Passed-01"); // addWebResource("Rgaa32017.Test.10.9.1-2Failed-01"); // addWebResource("Rgaa32017.Test.10.9.1-2Failed-02"); // // addWebResource("Rgaa32017.Test.10.9.1-3NMI-01"); //// addWebResource("Rgaa32017.Test.10.9.1-4NA-01"); // } // // @Override // protected void setProcess() { // //---------------------------------------------------------------------- // //------------------------------1Passed-01------------------------------ // //---------------------------------------------------------------------- // // checkResultIsPassed(processPageTest("Rgaa32017.Test.10.9.1-1Passed-01"), 0); // // //---------------------------------------------------------------------- // //------------------------------2Failed-01------------------------------ // //---------------------------------------------------------------------- // ProcessResult processResult = processPageTest("Rgaa32017.Test.10.9.1-2Failed-01"); // checkResultIsFailed(processResult, 1, 1); //// checkRemarkIsPresent( //// processResult, //// TestSolution.FAILED, //// CHECK_IF_USER_HAVE_MECHANISM_TO_DELETE_JUSTIFY_TEXT_ALIGN_MSG, //// "h1", //// 1, //// new ImmutablePair("#ExtractedAttributeAsEvidence", "#ExtractedAttributeValue")); // //---------------------------------------------------------------------- // //------------------------------2Failed-02------------------------------ // //---------------------------------------------------------------------- // processResult = processPageTest("Rgaa32017.Test.10.9.1-2Failed-02"); // checkResultIsFailed(processResult, 1, 1); //// checkRemarkIsPresent( //// processResult, //// TestSolution.FAILED, //// RemarkMessageStore.CHECK_IF_USER_HAVE_MECHANISM_TO_DELETE_JUSTIFY_TEXT_ALIGN_MSG, //// HtmlElementStore.P_ELEMENT, //// 1, //// new ImmutablePair("#ExtractedAttributeAsEvidence", "#ExtractedAttributeValue")); // // //---------------------------------------------------------------------- // //------------------------------3NMI-01--------------------------------- // //---------------------------------------------------------------------- //// ProcessResult processResult = processPageTest("Rgaa32017.Test.10.9.1-3NMI-01"); //// checkResultIsNotTested(processResult); // temporary result to make the result buildable before implementation //// checkResultIsPreQualified(processResult, 1, 1); //// checkRemarkIsPresent( //// processResult, //// TestSolution.NEED_MORE_INFO, //// CHECK_IF_USER_HAVE_MECHANISM_TO_DELETE_JUSTIFY_TEXT_ALIGN_MSG, //// "p", //// 1); // // // //---------------------------------------------------------------------- // //------------------------------4NA-01------------------------------ // //---------------------------------------------------------------------- //// checkResultIsNotApplicable(processPageTest("Rgaa32017.Test.10.9.1-4NA-01")); // } // // @Override // protected void setConsolidate() { // // // The consolidate method can be removed when real implementation is done. // // The assertions are automatically tested regarding the file names by // // the abstract parent class //// assertEquals(TestSolution.NOT_TESTED, //// consolidate("Rgaa32017.Test.10.9.1-3NMI-01").getValue()); // } // //}<|fim▁end|>
<|file_name|>isomorphism.rs<|end_file_name|><|fim▁begin|>use std::marker; use fixedbitset::FixedBitSet; use super::{ EdgeType, Incoming, }; use super::graph::{ Graph, IndexType, NodeIndex, }; use super::visit::GetAdjacencyMatrix; #[derive(Debug)] struct Vf2State<Ty, Ix> { /// The current mapping M(s) of nodes from G0 → G1 and G1 → G0, /// NodeIndex::end() for no mapping. mapping: Vec<NodeIndex<Ix>>, /// out[i] is non-zero if i is in either M_0(s) or Tout_0(s) /// These are all the next vertices that are not mapped yet, but /// have an outgoing edge from the mapping. out: Vec<usize>, /// ins[i] is non-zero if i is in either M_0(s) or Tin_0(s) /// These are all the incoming vertices, those not mapped yet, but /// have an edge from them into the mapping. /// Unused if graph is undirected -- it's identical with out in that case. ins: Vec<usize>, out_size: usize, ins_size: usize, adjacency_matrix: FixedBitSet, generation: usize, _etype: marker::PhantomData<Ty>, } impl<Ty, Ix> Vf2State<Ty, Ix> where Ty: EdgeType, Ix: IndexType, { pub fn new<N, E>(g: &Graph<N, E, Ty, Ix>) -> Self { let c0 = g.node_count(); let mut state = Vf2State { mapping: Vec::with_capacity(c0), out: Vec::with_capacity(c0), ins: Vec::with_capacity(c0 * (g.is_directed() as usize)), out_size: 0, ins_size: 0, adjacency_matrix: g.adjacency_matrix(), generation: 0, _etype: marker::PhantomData, }; for _ in 0..c0 { state.mapping.push(NodeIndex::end()); state.out.push(0); if Ty::is_directed() { state.ins.push(0); } } state } /// Return **true** if we have a complete mapping pub fn is_complete(&self) -> bool { self.generation == self.mapping.len() } /// Add mapping **from** <-> **to** to the state. pub fn push_mapping<N, E>(&mut self, from: NodeIndex<Ix>, to: NodeIndex<Ix>, g: &Graph<N, E, Ty, Ix>) { self.generation += 1; let s = self.generation; self.mapping[from.index()] = to; // update T0 & T1 ins/outs // T0out: Node in G0 not in M0 but successor of a node in M0. // st.out[0]: Node either in M0 or successor of M0 for ix in g.neighbors(from) { if self.out[ix.index()] == 0 { self.out[ix.index()] = s; self.out_size += 1; } } if g.is_directed() { for ix in g.neighbors_directed(from, Incoming) { if self.ins[ix.index()] == 0 { self.ins[ix.index()] = s; self.ins_size += 1; } } } } /// Restore the state to before the last added mapping pub fn pop_mapping<N, E>(&mut self, from: NodeIndex<Ix>, g: &Graph<N, E, Ty, Ix>) { let s = self.generation; self.generation -= 1; // undo (n, m) mapping self.mapping[from.index()] = NodeIndex::end(); // unmark in ins and outs for ix in g.neighbors(from) { if self.out[ix.index()] == s { self.out[ix.index()] = 0; self.out_size -= 1; } } if g.is_directed() { for ix in g.neighbors_directed(from, Incoming) { if self.ins[ix.index()] == s { self.ins[ix.index()] = 0; self.ins_size -= 1; } } } } /// Find the next (least) node in the Tout set. pub fn next_out_index(&self, from_index: usize) -> Option<usize> { self.out[from_index..].iter() .enumerate() .filter(|&(index, elt)| *elt > 0 && self.mapping[from_index + index] == NodeIndex::end()) .next() .map(|(index, _)| index) } /// Find the next (least) node in the Tin set. pub fn next_in_index(&self, from_index: usize) -> Option<usize> { if !Ty::is_directed() { return None } self.ins[from_index..].iter() .enumerate() .filter(|&(index, elt)| *elt > 0 && self.mapping[from_index + index] == NodeIndex::end()) .next() .map(|(index, _)| index) } /// Find the next (least) node in the N - M set. pub fn next_rest_index(&self, from_index: usize) -> Option<usize> { self.mapping[from_index..].iter() .enumerate() .filter(|&(_, elt)| *elt == NodeIndex::end()) .next() .map(|(index, _)| index) } } /// Return `true` if the graphs `g0` and `g1` are isomorphic. /// /// Using the VF2 algorithm, only matching graph syntactically (graph /// structure). /// /// The graphs should not be multigraphs. /// /// **Reference** /// /// * Luigi P. Cordella, Pasquale Foggia, Carlo Sansone, Mario Vento; /// *A (Sub)Graph Isomorphism Algorithm for Matching Large Graphs* pub fn is_isomorphic<N, E, Ty, Ix>(g0: &Graph<N, E, Ty, Ix>, g1: &Graph<N, E, Ty, Ix>) -> bool where Ty: EdgeType, Ix: IndexType, { if g0.node_count() != g1.node_count() || g0.edge_count() != g1.edge_count() { return false } let mut st = [Vf2State::new(g0), Vf2State::new(g1)]; try_match(&mut st, g0, g1, &mut NoSemanticMatch, &mut NoSemanticMatch).unwrap_or(false) } /// Return `true` if the graphs `g0` and `g1` are isomorphic. /// /// Using the VF2 algorithm, examining both syntactic and semantic /// graph isomorphism (graph structure and matching node and edge weights). /// /// The graphs should not be multigraphs. pub fn is_isomorphic_matching<N, E, Ty, Ix, F, G>(g0: &Graph<N, E, Ty, Ix>, g1: &Graph<N, E, Ty, Ix>, mut node_match: F, mut edge_match: G) -> bool where Ty: EdgeType, Ix: IndexType, F: FnMut(&N, &N) -> bool, G: FnMut(&E, &E) -> bool, { if g0.node_count() != g1.node_count() || g0.edge_count() != g1.edge_count() { return false } let mut st = [Vf2State::new(g0), Vf2State::new(g1)]; try_match(&mut st, g0, g1, &mut node_match, &mut edge_match).unwrap_or(false) } trait SemanticMatcher<T> { fn enabled() -> bool; fn eq(&mut self, &T, &T) -> bool; } struct NoSemanticMatch; impl<T> SemanticMatcher<T> for NoSemanticMatch { #[inline] fn enabled() -> bool { false } #[inline] fn eq(&mut self, _: &T, _: &T) -> bool { true } } impl<T, F> SemanticMatcher<T> for F where F: FnMut(&T, &T) -> bool { #[inline] fn enabled() -> bool { true } #[inline] fn eq(&mut self, a: &T, b: &T) -> bool { self(a, b) } } /// Return Some(bool) if isomorphism is decided, else None. fn try_match<N, E, Ty, Ix, F, G>(st: &mut [Vf2State<Ty, Ix>; 2], g0: &Graph<N, E, Ty, Ix>, g1: &Graph<N, E, Ty, Ix>, node_match: &mut F, edge_match: &mut G) -> Option<bool> where Ty: EdgeType, Ix: IndexType, F: SemanticMatcher<N>, G: SemanticMatcher<E>, { let g = [g0, g1]; let graph_indices = 0..2; let end = NodeIndex::end(); // if all are mapped -- we are done and have an iso if st[0].is_complete() { return Some(true) } // A "depth first" search of a valid mapping from graph 1 to graph 2 // F(s, n, m) -- evaluate state s and add mapping n <-> m // Find least T1out node (in st.out[1] but not in M[1]) #[derive(Copy, Clone, PartialEq, Debug)] enum OpenList { Out, In, Other, } let mut open_list = OpenList::Out; let mut to_index; let mut from_index = None; // Try the out list to_index = st[1].next_out_index(0); if to_index.is_some() { from_index = st[0].next_out_index(0); open_list = OpenList::Out; } // Try the in list if to_index.is_none() || from_index.is_none() { to_index = st[1].next_in_index(0); if to_index.is_some() { from_index = st[0].next_in_index(0); open_list = OpenList::In; } } // Try the other list -- disconnected graph if to_index.is_none() || from_index.is_none() { to_index = st[1].next_rest_index(0); if to_index.is_some() { from_index = st[0].next_rest_index(0); open_list = OpenList::Other; } } let (cand0, cand1) = match (from_index, to_index) { (Some(n), Some(m)) => (n, m), // No more candidates _ => return None, }; let mut nx = NodeIndex::new(cand0); let mx = NodeIndex::new(cand1); let mut first = true; 'candidates: loop { if !first { // Find the next node index to try on the `from` side of the mapping let start = nx.index() + 1; let cand0 = match open_list { OpenList::Out => st[0].next_out_index(start), OpenList::In => st[0].next_in_index(start), OpenList::Other => st[0].next_rest_index(start), }.map(|c| c + start); // compensate for start offset. nx = match cand0 { None => break, // no more candidates Some(ix) => NodeIndex::new(ix), }; debug_assert!(nx.index() >= start); } first = false; let nodes = [nx, mx]; // Check syntactic feasibility of mapping by ensuring adjacencies // of nx map to adjacencies of mx. // // nx == map to => mx // // R_succ // // Check that every neighbor of nx is mapped to a neighbor of mx, // then check the reverse, from mx to nx. Check that they have the same // count of edges. // // Note: We want to check the lookahead measures here if we can, // R_out: Equal for G0, G1: Card(Succ(G, n) ^ Tout); for both Succ and Pred // R_in: Same with Tin // R_new: Equal for G0, G1: Ñ n Pred(G, n); both Succ and Pred, // Ñ is G0 - M - Tin - Tout // last attempt to add these did not speed up any of the testcases let mut succ_count = [0, 0]; for j in graph_indices.clone() { for n_neigh in g[j].neighbors(nodes[j]) { succ_count[j] += 1;<|fim▁hole|> let m_neigh = if nodes[j] != n_neigh { st[j].mapping[n_neigh.index()] } else { nodes[1 - j] }; if m_neigh == end { continue; } let has_edge = g[1-j].is_adjacent(&st[1-j].adjacency_matrix, nodes[1-j], m_neigh); if !has_edge { continue 'candidates; } } } if succ_count[0] != succ_count[1] { continue 'candidates; } // R_pred if g[0].is_directed() { let mut pred_count = [0, 0]; for j in graph_indices.clone() { for n_neigh in g[j].neighbors_directed(nodes[j], Incoming) { pred_count[j] += 1; // the self loop case is handled in outgoing let m_neigh = st[j].mapping[n_neigh.index()]; if m_neigh == end { continue; } let has_edge = g[1-j].is_adjacent(&st[1-j].adjacency_matrix, m_neigh, nodes[1-j]); if !has_edge { continue 'candidates; } } } if pred_count[0] != pred_count[1] { continue 'candidates; } } // semantic feasibility: compare associated data for nodes if F::enabled() { if !node_match.eq(&g[0][nodes[0]], &g[1][nodes[1]]) { continue 'candidates; } } // semantic feasibility: compare associated data for edges if G::enabled() { // outgoing edges for j in graph_indices.clone() { let mut edges = g[j].neighbors(nodes[j]).detach(); while let Some((n_edge, n_neigh)) = edges.next(&g[j]) { // handle the self loop case; it's not in the mapping (yet) let m_neigh = if nodes[j] != n_neigh { st[j].mapping[n_neigh.index()] } else { nodes[1 - j] }; if m_neigh == end { continue; } match g[1-j].find_edge(nodes[1 - j], m_neigh) { Some(m_edge) => { if !edge_match.eq(&g[j][n_edge], &g[1-j][m_edge]) { continue 'candidates; } } None => unreachable!() // covered by syntactic check } } } // incoming edges if g[0].is_directed() { for j in graph_indices.clone() { let mut edges = g[j].neighbors_directed(nodes[j], Incoming).detach(); while let Some((n_edge, n_neigh)) = edges.next(&g[j]) { // the self loop case is handled in outgoing let m_neigh = st[j].mapping[n_neigh.index()]; if m_neigh == end { continue; } match g[1-j].find_edge(m_neigh, nodes[1-j]) { Some(m_edge) => { if !edge_match.eq(&g[j][n_edge], &g[1-j][m_edge]) { continue 'candidates; } } None => unreachable!() // covered by syntactic check } } } } } // Add mapping nx <-> mx to the state for j in graph_indices.clone() { st[j].push_mapping(nodes[j], nodes[1-j], g[j]); } // Check cardinalities of Tin, Tout sets if st[0].out_size == st[1].out_size && st[0].ins_size == st[1].ins_size { // Recurse match try_match(st, g0, g1, node_match, edge_match) { None => {} result => return result, } } // Restore state. for j in graph_indices.clone() { st[j].pop_mapping(nodes[j], g[j]); } } None }<|fim▁end|>
// handle the self loop case; it's not in the mapping (yet)
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls.defaults import * from twistranet.core.views import AsView from views import * <|fim▁hole|> url(r'^like_toggle_by_id/(\d+)$', AsView(LikeToggleView, lookup = 'id'), name=LikeToggleView.name), url(r'^like_toggle_by_slug/(\d+)$', AsView(LikeToggleView, lookup = 'slug'), name=LikeToggleView.name), )<|fim▁end|>
urlpatterns = patterns('sharing',
<|file_name|>errors.go<|end_file_name|><|fim▁begin|>/* PRLPKS - OpenPGP Synchronized Key Server with Deletion Copyright (c) 2014 Pruthvirajsinh Rajendrasinh Chauhan PRLPKS is based heavily on hockeypuck(https://launchpad.net/hockeypuck) by Casey Marshall, copyright 2013(GNU GPL v3). This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. */ package prlpks import ( "errors" ) // Lookup key was not found in the database. var ErrKeyNotFound = errors.New("Key not found.") // An internal inconsistency between the stored key material and our indexing was detected. var ErrInconsistentKey = errors.New("Stored key is internally inconsistent.") // Key ID is invalid. var ErrInvalidKeyId = errors.New("Invalid key ID.") // Key hash is invalid. var ErrInvalidKeyHash = errors.New("Invalid key hash.") // A lookup with a short key ID found a collision. // This is quite possible with short key IDs, remotely possibly with long IDs. var ErrKeyIdCollision = errors.New("Key ID matches multiple public keys. Try again with a longer key ID.") // A query resulted in more responses than we'd care to respond with. var ErrTooManyResponses = errors.New("Too many responses.") // Something was attempted that isn't fully baked yet. var ErrUnsupportedOperation = errors.New("Unsupported operation.") // Template path was not found. Installation or configuration problem.<|fim▁hole|>var ErrTemplatePathNotFound = errors.New("Could not find templates. Check your installation and configuration.")<|fim▁end|>
<|file_name|>ambiguity.rs<|end_file_name|><|fim▁begin|>#![deny(rustdoc::broken_intra_doc_links)] #![allow(non_camel_case_types)] #![allow(non_upper_case_globals)] pub fn ambiguous() {} pub struct ambiguous {} #[macro_export] macro_rules! multi_conflict { () => {} } #[allow(non_camel_case_types)] pub struct multi_conflict {} pub fn multi_conflict() {} pub mod type_and_value {} pub const type_and_value: i32 = 0; pub mod foo { pub enum bar {} pub fn bar() {} } /// [`ambiguous`] is ambiguous. //~ERROR `ambiguous` /// /// [ambiguous] is ambiguous. //~ERROR ambiguous /// /// [`multi_conflict`] is a three-way conflict. //~ERROR `multi_conflict` ///<|fim▁hole|>pub struct Docs {} /// [true] //~ ERROR `true` is both a module and a builtin type /// [primitive@true] pub mod r#true {}<|fim▁end|>
/// Ambiguous [type_and_value]. //~ERROR type_and_value /// /// Ambiguous non-implied shortcut link [`foo::bar`]. //~ERROR `foo::bar`
<|file_name|>web.py<|end_file_name|><|fim▁begin|>helppage = "https://github.com/JorisPLA7/Super-D-mineur" githubpage = "https://github.com/JorisPLA7/Super-D-mineur/" rulepage = "http://demineur.hugames.fr/help.php" import webbrowser def help():<|fim▁hole|> webbrowser.open(githubpage) def rules(): webbrowser.open(rulepage) if __name__ == '__main__': help()<|fim▁end|>
webbrowser.open(helppage) def github():
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ use crate::document_loader::{DocumentLoader, LoadType}; use crate::dom::bindings::cell::DomRefCell; use crate::dom::bindings::codegen::Bindings::DocumentBinding::{ DocumentMethods, DocumentReadyState, }; use crate::dom::bindings::codegen::Bindings::HTMLImageElementBinding::HTMLImageElementMethods; use crate::dom::bindings::codegen::Bindings::HTMLTemplateElementBinding::HTMLTemplateElementMethods; use crate::dom::bindings::codegen::Bindings::NodeBinding::NodeMethods; use crate::dom::bindings::codegen::Bindings::ServoParserBinding; use crate::dom::bindings::inheritance::Castable; use crate::dom::bindings::refcounted::Trusted; use crate::dom::bindings::reflector::{reflect_dom_object, DomObject, Reflector}; use crate::dom::bindings::root::{Dom, DomRoot, MutNullableDom}; use crate::dom::bindings::settings_stack::is_execution_stack_empty; use crate::dom::bindings::str::{DOMString, USVString}; use crate::dom::characterdata::CharacterData; use crate::dom::comment::Comment; use crate::dom::document::{Document, DocumentSource, HasBrowsingContext, IsHTMLDocument}; use crate::dom::documenttype::DocumentType; use crate::dom::element::{CustomElementCreationMode, Element, ElementCreator}; use crate::dom::globalscope::GlobalScope; use crate::dom::htmlformelement::{FormControlElementHelpers, HTMLFormElement}; use crate::dom::htmlimageelement::HTMLImageElement; use crate::dom::htmlscriptelement::{HTMLScriptElement, ScriptResult}; use crate::dom::htmltemplateelement::HTMLTemplateElement; use crate::dom::node::Node; use crate::dom::performanceentry::PerformanceEntry; use crate::dom::performancenavigationtiming::PerformanceNavigationTiming; use crate::dom::processinginstruction::ProcessingInstruction; use crate::dom::text::Text; use crate::dom::virtualmethods::vtable_for; use crate::network_listener::PreInvoke; use crate::script_thread::ScriptThread; use dom_struct::dom_struct; use embedder_traits::resources::{self, Resource}; use encoding_rs::Encoding; use html5ever::buffer_queue::BufferQueue; use html5ever::tendril::fmt::UTF8; use html5ever::tendril::{ByteTendril, StrTendril, TendrilSink}; use html5ever::tree_builder::{ElementFlags, NextParserState, NodeOrText, QuirksMode, TreeSink}; use html5ever::{Attribute, ExpandedName, LocalName, QualName}; use hyper_serde::Serde; use mime::{self, Mime}; use msg::constellation_msg::PipelineId; use net_traits::{FetchMetadata, FetchResponseListener, Metadata, NetworkError}; use net_traits::{ResourceFetchTiming, ResourceTimingType}; use profile_traits::time::{ profile, ProfilerCategory, TimerMetadata, TimerMetadataFrameType, TimerMetadataReflowType, }; use script_traits::DocumentActivity; use servo_config::prefs::PREFS; use servo_url::ServoUrl; use std::borrow::Cow; use std::cell::Cell; use std::mem; use style::context::QuirksMode as ServoQuirksMode; use tendril::stream::LossyDecoder; mod async_html; mod html; mod xml; #[dom_struct] /// The parser maintains two input streams: one for input from script through /// document.write(), and one for input from network. /// /// There is no concrete representation of the insertion point, instead it /// always points to just before the next character from the network input, /// with all of the script input before itself. /// /// ```text /// ... script input ... | ... network input ... /// ^ /// insertion point /// ``` pub struct ServoParser { reflector: Reflector, /// The document associated with this parser. document: Dom<Document>, /// The decoder used for the network input. network_decoder: DomRefCell<Option<NetworkDecoder>>, /// Input received from network. #[ignore_malloc_size_of = "Defined in html5ever"] network_input: DomRefCell<BufferQueue>, /// Input received from script. Used only to support document.write(). #[ignore_malloc_size_of = "Defined in html5ever"] script_input: DomRefCell<BufferQueue>, /// The tokenizer of this parser. tokenizer: DomRefCell<Tokenizer>, /// Whether to expect any further input from the associated network request. last_chunk_received: Cell<bool>, /// Whether this parser should avoid passing any further data to the tokenizer. suspended: Cell<bool>, /// <https://html.spec.whatwg.org/multipage/#script-nesting-level> script_nesting_level: Cell<usize>, /// <https://html.spec.whatwg.org/multipage/#abort-a-parser> aborted: Cell<bool>, /// <https://html.spec.whatwg.org/multipage/#script-created-parser> script_created_parser: bool, } #[derive(PartialEq)] enum LastChunkState { Received, NotReceived, } pub struct ElementAttribute { name: QualName, value: DOMString, } #[derive(Clone, Copy, JSTraceable, MallocSizeOf, PartialEq)] pub enum ParsingAlgorithm { Normal, Fragment, } impl ElementAttribute { pub fn new(name: QualName, value: DOMString) -> ElementAttribute { ElementAttribute { name: name, value: value, } } } impl ServoParser { pub fn parser_is_not_active(&self) -> bool { self.can_write() || self.tokenizer.try_borrow_mut().is_ok() } pub fn parse_html_document(document: &Document, input: DOMString, url: ServoUrl) { let parser = if PREFS .get("dom.servoparser.async_html_tokenizer.enabled") .as_boolean() .unwrap() { ServoParser::new( document, Tokenizer::AsyncHtml(self::async_html::Tokenizer::new(document, url, None)), LastChunkState::NotReceived, ParserKind::Normal, ) } else { ServoParser::new( document, Tokenizer::Html(self::html::Tokenizer::new( document, url, None, ParsingAlgorithm::Normal, )), LastChunkState::NotReceived, ParserKind::Normal, ) }; parser.parse_string_chunk(String::from(input)); } // https://html.spec.whatwg.org/multipage/#parsing-html-fragments pub fn parse_html_fragment( context: &Element, input: DOMString, ) -> impl Iterator<Item = DomRoot<Node>> { let context_node = context.upcast::<Node>(); let context_document = context_node.owner_doc(); let window = context_document.window(); let url = context_document.url(); // Step 1. let loader = DocumentLoader::new_with_threads( context_document.loader().resource_threads().clone(), Some(url.clone()), ); let document = Document::new( window, HasBrowsingContext::No, Some(url.clone()), context_document.origin().clone(), IsHTMLDocument::HTMLDocument, None, None, DocumentActivity::Inactive, DocumentSource::FromParser, loader, None, None, Default::default(), ); // Step 2. document.set_quirks_mode(context_document.quirks_mode()); // Step 11. let form = context_node .inclusive_ancestors() .find(|element| element.is::<HTMLFormElement>()); let fragment_context = FragmentContext { context_elem: context_node, form_elem: form.deref(), }; let parser = ServoParser::new( &document, Tokenizer::Html(self::html::Tokenizer::new( &document, url, Some(fragment_context), ParsingAlgorithm::Fragment, )), LastChunkState::Received, ParserKind::Normal, ); parser.parse_string_chunk(String::from(input)); // Step 14.<|fim▁hole|> } } pub fn parse_html_script_input(document: &Document, url: ServoUrl) { let parser = ServoParser::new( document, Tokenizer::Html(self::html::Tokenizer::new( document, url, None, ParsingAlgorithm::Normal, )), LastChunkState::NotReceived, ParserKind::ScriptCreated, ); document.set_current_parser(Some(&parser)); } pub fn parse_xml_document(document: &Document, input: DOMString, url: ServoUrl) { let parser = ServoParser::new( document, Tokenizer::Xml(self::xml::Tokenizer::new(document, url)), LastChunkState::NotReceived, ParserKind::Normal, ); parser.parse_string_chunk(String::from(input)); } pub fn script_nesting_level(&self) -> usize { self.script_nesting_level.get() } pub fn is_script_created(&self) -> bool { self.script_created_parser } /// Corresponds to the latter part of the "Otherwise" branch of the 'An end /// tag whose tag name is "script"' of /// <https://html.spec.whatwg.org/multipage/#parsing-main-incdata> /// /// This first moves everything from the script input to the beginning of /// the network input, effectively resetting the insertion point to just /// before the next character to be consumed. /// /// /// ```text /// | ... script input ... network input ... /// ^ /// insertion point /// ``` pub fn resume_with_pending_parsing_blocking_script( &self, script: &HTMLScriptElement, result: ScriptResult, ) { assert!(self.suspended.get()); self.suspended.set(false); mem::swap( &mut *self.script_input.borrow_mut(), &mut *self.network_input.borrow_mut(), ); while let Some(chunk) = self.script_input.borrow_mut().pop_front() { self.network_input.borrow_mut().push_back(chunk); } let script_nesting_level = self.script_nesting_level.get(); assert_eq!(script_nesting_level, 0); self.script_nesting_level.set(script_nesting_level + 1); script.execute(result); self.script_nesting_level.set(script_nesting_level); if !self.suspended.get() { self.parse_sync(); } } pub fn can_write(&self) -> bool { self.script_created_parser || self.script_nesting_level.get() > 0 } /// Steps 6-8 of https://html.spec.whatwg.org/multipage/#document.write() pub fn write(&self, text: Vec<DOMString>) { assert!(self.can_write()); if self.document.has_pending_parsing_blocking_script() { // There is already a pending parsing blocking script so the // parser is suspended, we just append everything to the // script input and abort these steps. for chunk in text { self.script_input .borrow_mut() .push_back(String::from(chunk).into()); } return; } // There is no pending parsing blocking script, so all previous calls // to document.write() should have seen their entire input tokenized // and process, with nothing pushed to the parser script input. assert!(self.script_input.borrow().is_empty()); let mut input = BufferQueue::new(); for chunk in text { input.push_back(String::from(chunk).into()); } self.tokenize(|tokenizer| tokenizer.feed(&mut input)); if self.suspended.get() { // Parser got suspended, insert remaining input at end of // script input, following anything written by scripts executed // reentrantly during this call. while let Some(chunk) = input.pop_front() { self.script_input.borrow_mut().push_back(chunk); } return; } assert!(input.is_empty()); } // Steps 4-6 of https://html.spec.whatwg.org/multipage/#dom-document-close pub fn close(&self) { assert!(self.script_created_parser); // Step 4. self.last_chunk_received.set(true); if self.suspended.get() { // Step 5. return; } // Step 6. self.parse_sync(); } // https://html.spec.whatwg.org/multipage/#abort-a-parser pub fn abort(&self) { assert!(!self.aborted.get()); self.aborted.set(true); // Step 1. *self.script_input.borrow_mut() = BufferQueue::new(); *self.network_input.borrow_mut() = BufferQueue::new(); // Step 2. self.document .set_ready_state(DocumentReadyState::Interactive); // Step 3. self.tokenizer.borrow_mut().end(); self.document.set_current_parser(None); // Step 4. self.document.set_ready_state(DocumentReadyState::Complete); } // https://html.spec.whatwg.org/multipage/#active-parser pub fn is_active(&self) -> bool { self.script_nesting_level() > 0 && !self.aborted.get() } #[allow(unrooted_must_root)] fn new_inherited( document: &Document, tokenizer: Tokenizer, last_chunk_state: LastChunkState, kind: ParserKind, ) -> Self { ServoParser { reflector: Reflector::new(), document: Dom::from_ref(document), network_decoder: DomRefCell::new(Some(NetworkDecoder::new(document.encoding()))), network_input: DomRefCell::new(BufferQueue::new()), script_input: DomRefCell::new(BufferQueue::new()), tokenizer: DomRefCell::new(tokenizer), last_chunk_received: Cell::new(last_chunk_state == LastChunkState::Received), suspended: Default::default(), script_nesting_level: Default::default(), aborted: Default::default(), script_created_parser: kind == ParserKind::ScriptCreated, } } #[allow(unrooted_must_root)] fn new( document: &Document, tokenizer: Tokenizer, last_chunk_state: LastChunkState, kind: ParserKind, ) -> DomRoot<Self> { reflect_dom_object( Box::new(ServoParser::new_inherited( document, tokenizer, last_chunk_state, kind, )), document.window(), ServoParserBinding::Wrap, ) } fn push_bytes_input_chunk(&self, chunk: Vec<u8>) { let chunk = self .network_decoder .borrow_mut() .as_mut() .unwrap() .decode(chunk); if !chunk.is_empty() { self.network_input.borrow_mut().push_back(chunk); } } fn push_string_input_chunk(&self, chunk: String) { self.network_input.borrow_mut().push_back(chunk.into()); } fn parse_sync(&self) { let metadata = TimerMetadata { url: self.document.url().as_str().into(), iframe: TimerMetadataFrameType::RootWindow, incremental: TimerMetadataReflowType::FirstReflow, }; let profiler_category = self.tokenizer.borrow().profiler_category(); profile( profiler_category, Some(metadata), self.document .window() .upcast::<GlobalScope>() .time_profiler_chan() .clone(), || self.do_parse_sync(), ) } fn do_parse_sync(&self) { assert!(self.script_input.borrow().is_empty()); // This parser will continue to parse while there is either pending input or // the parser remains unsuspended. if self.last_chunk_received.get() { if let Some(decoder) = self.network_decoder.borrow_mut().take() { let chunk = decoder.finish(); if !chunk.is_empty() { self.network_input.borrow_mut().push_back(chunk); } } } self.tokenize(|tokenizer| tokenizer.feed(&mut *self.network_input.borrow_mut())); if self.suspended.get() { return; } assert!(self.network_input.borrow().is_empty()); if self.last_chunk_received.get() { self.finish(); } } fn parse_string_chunk(&self, input: String) { self.document.set_current_parser(Some(self)); self.push_string_input_chunk(input); if !self.suspended.get() { self.parse_sync(); } } fn parse_bytes_chunk(&self, input: Vec<u8>) { self.document.set_current_parser(Some(self)); self.push_bytes_input_chunk(input); if !self.suspended.get() { self.parse_sync(); } } fn tokenize<F>(&self, mut feed: F) where F: FnMut(&mut Tokenizer) -> Result<(), DomRoot<HTMLScriptElement>>, { loop { assert!(!self.suspended.get()); assert!(!self.aborted.get()); self.document.reflow_if_reflow_timer_expired(); let script = match feed(&mut *self.tokenizer.borrow_mut()) { Ok(()) => return, Err(script) => script, }; let script_nesting_level = self.script_nesting_level.get(); self.script_nesting_level.set(script_nesting_level + 1); script.prepare(); self.script_nesting_level.set(script_nesting_level); if self.document.has_pending_parsing_blocking_script() { self.suspended.set(true); return; } if self.aborted.get() { return; } } } // https://html.spec.whatwg.org/multipage/#the-end fn finish(&self) { assert!(!self.suspended.get()); assert!(self.last_chunk_received.get()); assert!(self.script_input.borrow().is_empty()); assert!(self.network_input.borrow().is_empty()); assert!(self.network_decoder.borrow().is_none()); // Step 1. self.document .set_ready_state(DocumentReadyState::Interactive); // Step 2. self.tokenizer.borrow_mut().end(); self.document.set_current_parser(None); // Steps 3-12 are in another castle, namely finish_load. let url = self.tokenizer.borrow().url().clone(); self.document.finish_load(LoadType::PageSource(url)); } } struct FragmentParsingResult<I> where I: Iterator<Item = DomRoot<Node>>, { inner: I, } impl<I> Iterator for FragmentParsingResult<I> where I: Iterator<Item = DomRoot<Node>>, { type Item = DomRoot<Node>; fn next(&mut self) -> Option<DomRoot<Node>> { let next = self.inner.next()?; next.remove_self(); Some(next) } fn size_hint(&self) -> (usize, Option<usize>) { self.inner.size_hint() } } #[derive(JSTraceable, MallocSizeOf, PartialEq)] enum ParserKind { Normal, ScriptCreated, } #[derive(JSTraceable, MallocSizeOf)] #[must_root] enum Tokenizer { Html(self::html::Tokenizer), AsyncHtml(self::async_html::Tokenizer), Xml(self::xml::Tokenizer), } impl Tokenizer { fn feed(&mut self, input: &mut BufferQueue) -> Result<(), DomRoot<HTMLScriptElement>> { match *self { Tokenizer::Html(ref mut tokenizer) => tokenizer.feed(input), Tokenizer::AsyncHtml(ref mut tokenizer) => tokenizer.feed(input), Tokenizer::Xml(ref mut tokenizer) => tokenizer.feed(input), } } fn end(&mut self) { match *self { Tokenizer::Html(ref mut tokenizer) => tokenizer.end(), Tokenizer::AsyncHtml(ref mut tokenizer) => tokenizer.end(), Tokenizer::Xml(ref mut tokenizer) => tokenizer.end(), } } fn url(&self) -> &ServoUrl { match *self { Tokenizer::Html(ref tokenizer) => tokenizer.url(), Tokenizer::AsyncHtml(ref tokenizer) => tokenizer.url(), Tokenizer::Xml(ref tokenizer) => tokenizer.url(), } } fn set_plaintext_state(&mut self) { match *self { Tokenizer::Html(ref mut tokenizer) => tokenizer.set_plaintext_state(), Tokenizer::AsyncHtml(ref mut tokenizer) => tokenizer.set_plaintext_state(), Tokenizer::Xml(_) => unimplemented!(), } } fn profiler_category(&self) -> ProfilerCategory { match *self { Tokenizer::Html(_) => ProfilerCategory::ScriptParseHTML, Tokenizer::AsyncHtml(_) => ProfilerCategory::ScriptParseHTML, Tokenizer::Xml(_) => ProfilerCategory::ScriptParseXML, } } } /// The context required for asynchronously fetching a document /// and parsing it progressively. #[derive(JSTraceable)] pub struct ParserContext { /// The parser that initiated the request. parser: Option<Trusted<ServoParser>>, /// Is this a synthesized document is_synthesized_document: bool, /// The pipeline associated with this document. id: PipelineId, /// The URL for this document. url: ServoUrl, /// timing data for this resource resource_timing: ResourceFetchTiming, } impl ParserContext { pub fn new(id: PipelineId, url: ServoUrl) -> ParserContext { ParserContext { parser: None, is_synthesized_document: false, id: id, url: url, resource_timing: ResourceFetchTiming::new(ResourceTimingType::Navigation), } } } impl FetchResponseListener for ParserContext { fn process_request_body(&mut self) {} fn process_request_eof(&mut self) {} fn process_response(&mut self, meta_result: Result<FetchMetadata, NetworkError>) { let mut ssl_error = None; let mut network_error = None; let metadata = match meta_result { Ok(meta) => Some(match meta { FetchMetadata::Unfiltered(m) => m, FetchMetadata::Filtered { unsafe_, .. } => unsafe_, }), Err(NetworkError::SslValidation(url, reason)) => { ssl_error = Some(reason); let mut meta = Metadata::default(url); let mime: Option<Mime> = "text/html".parse().ok(); meta.set_content_type(mime.as_ref()); Some(meta) }, Err(NetworkError::Internal(reason)) => { network_error = Some(reason); let mut meta = Metadata::default(self.url.clone()); let mime: Option<Mime> = "text/html".parse().ok(); meta.set_content_type(mime.as_ref()); Some(meta) }, Err(_) => None, }; let content_type: Option<Mime> = metadata .clone() .and_then(|meta| meta.content_type) .map(Serde::into_inner) .map(Into::into); let parser = match ScriptThread::page_headers_available(&self.id, metadata) { Some(parser) => parser, None => return, }; if parser.aborted.get() { return; } self.parser = Some(Trusted::new(&*parser)); match content_type { Some(ref mime) if mime.type_() == mime::IMAGE => { self.is_synthesized_document = true; let page = "<html><body></body></html>".into(); parser.push_string_input_chunk(page); parser.parse_sync(); let doc = &parser.document; let doc_body = DomRoot::upcast::<Node>(doc.GetBody().unwrap()); let img = HTMLImageElement::new(local_name!("img"), None, doc); img.SetSrc(USVString(self.url.to_string())); doc_body .AppendChild(&DomRoot::upcast::<Node>(img)) .expect("Appending failed"); }, Some(ref mime) if mime.type_() == mime::TEXT && mime.subtype() == mime::PLAIN => { // https://html.spec.whatwg.org/multipage/#read-text let page = "<pre>\n".into(); parser.push_string_input_chunk(page); parser.parse_sync(); parser.tokenizer.borrow_mut().set_plaintext_state(); }, Some(ref mime) if mime.type_() == mime::TEXT && mime.subtype() == mime::HTML => { // Handle text/html if let Some(reason) = ssl_error { self.is_synthesized_document = true; let page = resources::read_string(Resource::BadCertHTML); let page = page.replace("${reason}", &reason); parser.push_string_input_chunk(page); parser.parse_sync(); } if let Some(reason) = network_error { self.is_synthesized_document = true; let page = resources::read_string(Resource::NetErrorHTML); let page = page.replace("${reason}", &reason); parser.push_string_input_chunk(page); parser.parse_sync(); } }, // Handle text/xml, application/xml Some(ref mime) if (mime.type_() == mime::TEXT && mime.subtype() == mime::XML) || (mime.type_() == mime::APPLICATION && mime.subtype() == mime::XML) => {}, Some(ref mime) if mime.type_() == mime::APPLICATION && mime.subtype().as_str() == "xhtml" && mime.suffix() == Some(mime::XML) => {}, // Handle xhtml (application/xhtml+xml) Some(ref mime) => { // Show warning page for unknown mime types. let page = format!( "<html><body><p>Unknown content type ({}/{}).</p></body></html>", mime.type_().as_str(), mime.subtype().as_str() ); self.is_synthesized_document = true; parser.push_string_input_chunk(page); parser.parse_sync(); }, None => { // No content-type header. // Merge with #4212 when fixed. }, } } fn process_response_chunk(&mut self, payload: Vec<u8>) { if self.is_synthesized_document { return; } let parser = match self.parser.as_ref() { Some(parser) => parser.root(), None => return, }; if parser.aborted.get() { return; } parser.parse_bytes_chunk(payload); } // This method is called via script_thread::handle_fetch_eof, so we must call // submit_resource_timing in this function // Resource listeners are called via net_traits::Action::process, which handles submission for them fn process_response_eof(&mut self, status: Result<ResourceFetchTiming, NetworkError>) { let parser = match self.parser.as_ref() { Some(parser) => parser.root(), None => return, }; if parser.aborted.get() { return; } match status { // are we throwing this away or can we use it? Ok(_) => (), // TODO(Savago): we should send a notification to callers #5463. Err(err) => debug!("Failed to load page URL {}, error: {:?}", self.url, err), } parser .document .set_redirect_count(self.resource_timing.redirect_count); parser.last_chunk_received.set(true); if !parser.suspended.get() { parser.parse_sync(); } //TODO only submit if this is the current document resource self.submit_resource_timing(); } fn resource_timing_mut(&mut self) -> &mut ResourceFetchTiming { &mut self.resource_timing } fn resource_timing(&self) -> &ResourceFetchTiming { &self.resource_timing } // store a PerformanceNavigationTiming entry in the globalscope's Performance buffer fn submit_resource_timing(&mut self) { let parser = match self.parser.as_ref() { Some(parser) => parser.root(), None => return, }; if parser.aborted.get() { return; } let document = &parser.document; //TODO nav_start and nav_start_precise let performance_entry = PerformanceNavigationTiming::new(&document.global(), 0, 0, &document); document .global() .performance() .queue_entry(performance_entry.upcast::<PerformanceEntry>(), true); } } impl PreInvoke for ParserContext {} pub struct FragmentContext<'a> { pub context_elem: &'a Node, pub form_elem: Option<&'a Node>, } #[allow(unrooted_must_root)] fn insert(parent: &Node, reference_child: Option<&Node>, child: NodeOrText<Dom<Node>>) { match child { NodeOrText::AppendNode(n) => { parent.InsertBefore(&n, reference_child).unwrap(); }, NodeOrText::AppendText(t) => { let text = reference_child .and_then(Node::GetPreviousSibling) .or_else(|| parent.GetLastChild()) .and_then(DomRoot::downcast::<Text>); if let Some(text) = text { text.upcast::<CharacterData>().append_data(&t); } else { let text = Text::new(String::from(t).into(), &parent.owner_doc()); parent.InsertBefore(text.upcast(), reference_child).unwrap(); } }, } } #[derive(JSTraceable, MallocSizeOf)] #[must_root] pub struct Sink { base_url: ServoUrl, document: Dom<Document>, current_line: u64, script: MutNullableDom<HTMLScriptElement>, parsing_algorithm: ParsingAlgorithm, } impl Sink { fn same_tree(&self, x: &Dom<Node>, y: &Dom<Node>) -> bool { let x = x.downcast::<Element>().expect("Element node expected"); let y = y.downcast::<Element>().expect("Element node expected"); x.is_in_same_home_subtree(y) } fn has_parent_node(&self, node: &Dom<Node>) -> bool { node.GetParentNode().is_some() } } #[allow(unrooted_must_root)] // FIXME: really? impl TreeSink for Sink { type Output = Self; fn finish(self) -> Self { self } type Handle = Dom<Node>; fn get_document(&mut self) -> Dom<Node> { Dom::from_ref(self.document.upcast()) } fn get_template_contents(&mut self, target: &Dom<Node>) -> Dom<Node> { let template = target .downcast::<HTMLTemplateElement>() .expect("tried to get template contents of non-HTMLTemplateElement in HTML parsing"); Dom::from_ref(template.Content().upcast()) } fn same_node(&self, x: &Dom<Node>, y: &Dom<Node>) -> bool { x == y } fn elem_name<'a>(&self, target: &'a Dom<Node>) -> ExpandedName<'a> { let elem = target .downcast::<Element>() .expect("tried to get name of non-Element in HTML parsing"); ExpandedName { ns: elem.namespace(), local: elem.local_name(), } } fn create_element( &mut self, name: QualName, attrs: Vec<Attribute>, _flags: ElementFlags, ) -> Dom<Node> { let attrs = attrs .into_iter() .map(|attr| ElementAttribute::new(attr.name, DOMString::from(String::from(attr.value)))) .collect(); let element = create_element_for_token( name, attrs, &*self.document, ElementCreator::ParserCreated(self.current_line), self.parsing_algorithm, ); Dom::from_ref(element.upcast()) } fn create_comment(&mut self, text: StrTendril) -> Dom<Node> { let comment = Comment::new(DOMString::from(String::from(text)), &*self.document); Dom::from_ref(comment.upcast()) } fn create_pi(&mut self, target: StrTendril, data: StrTendril) -> Dom<Node> { let doc = &*self.document; let pi = ProcessingInstruction::new( DOMString::from(String::from(target)), DOMString::from(String::from(data)), doc, ); Dom::from_ref(pi.upcast()) } fn associate_with_form( &mut self, target: &Dom<Node>, form: &Dom<Node>, nodes: (&Dom<Node>, Option<&Dom<Node>>), ) { let (element, prev_element) = nodes; let tree_node = prev_element.map_or(element, |prev| { if self.has_parent_node(element) { element } else { prev } }); if !self.same_tree(tree_node, form) { return; } let node = target; let form = DomRoot::downcast::<HTMLFormElement>(DomRoot::from_ref(&**form)) .expect("Owner must be a form element"); let elem = node.downcast::<Element>(); let control = elem.and_then(|e| e.as_maybe_form_control()); if let Some(control) = control { control.set_form_owner_from_parser(&form); } else { // TODO remove this code when keygen is implemented. assert_eq!( node.NodeName(), "KEYGEN", "Unknown form-associatable element" ); } } fn append_before_sibling(&mut self, sibling: &Dom<Node>, new_node: NodeOrText<Dom<Node>>) { let parent = sibling .GetParentNode() .expect("append_before_sibling called on node without parent"); insert(&parent, Some(&*sibling), new_node); } fn parse_error(&mut self, msg: Cow<'static, str>) { debug!("Parse error: {}", msg); } fn set_quirks_mode(&mut self, mode: QuirksMode) { let mode = match mode { QuirksMode::Quirks => ServoQuirksMode::Quirks, QuirksMode::LimitedQuirks => ServoQuirksMode::LimitedQuirks, QuirksMode::NoQuirks => ServoQuirksMode::NoQuirks, }; self.document.set_quirks_mode(mode); } fn append(&mut self, parent: &Dom<Node>, child: NodeOrText<Dom<Node>>) { insert(&parent, None, child); } fn append_based_on_parent_node( &mut self, elem: &Dom<Node>, prev_elem: &Dom<Node>, child: NodeOrText<Dom<Node>>, ) { if self.has_parent_node(elem) { self.append_before_sibling(elem, child); } else { self.append(prev_elem, child); } } fn append_doctype_to_document( &mut self, name: StrTendril, public_id: StrTendril, system_id: StrTendril, ) { let doc = &*self.document; let doctype = DocumentType::new( DOMString::from(String::from(name)), Some(DOMString::from(String::from(public_id))), Some(DOMString::from(String::from(system_id))), doc, ); doc.upcast::<Node>() .AppendChild(doctype.upcast()) .expect("Appending failed"); } fn add_attrs_if_missing(&mut self, target: &Dom<Node>, attrs: Vec<Attribute>) { let elem = target .downcast::<Element>() .expect("tried to set attrs on non-Element in HTML parsing"); for attr in attrs { elem.set_attribute_from_parser( attr.name, DOMString::from(String::from(attr.value)), None, ); } } fn remove_from_parent(&mut self, target: &Dom<Node>) { if let Some(ref parent) = target.GetParentNode() { parent.RemoveChild(&*target).unwrap(); } } fn mark_script_already_started(&mut self, node: &Dom<Node>) { let script = node.downcast::<HTMLScriptElement>(); script.map(|script| script.set_already_started(true)); } fn complete_script(&mut self, node: &Dom<Node>) -> NextParserState { if let Some(script) = node.downcast() { self.script.set(Some(script)); NextParserState::Suspend } else { NextParserState::Continue } } fn reparent_children(&mut self, node: &Dom<Node>, new_parent: &Dom<Node>) { while let Some(ref child) = node.GetFirstChild() { new_parent.AppendChild(&child).unwrap(); } } /// <https://html.spec.whatwg.org/multipage/#html-integration-point> /// Specifically, the <annotation-xml> cases. fn is_mathml_annotation_xml_integration_point(&self, handle: &Dom<Node>) -> bool { let elem = handle.downcast::<Element>().unwrap(); elem.get_attribute(&ns!(), &local_name!("encoding")) .map_or(false, |attr| { attr.value().eq_ignore_ascii_case("text/html") || attr.value().eq_ignore_ascii_case("application/xhtml+xml") }) } fn set_current_line(&mut self, line_number: u64) { self.current_line = line_number; } fn pop(&mut self, node: &Dom<Node>) { let node = DomRoot::from_ref(&**node); vtable_for(&node).pop(); } } /// https://html.spec.whatwg.org/multipage/#create-an-element-for-the-token fn create_element_for_token( name: QualName, attrs: Vec<ElementAttribute>, document: &Document, creator: ElementCreator, parsing_algorithm: ParsingAlgorithm, ) -> DomRoot<Element> { // Step 3. let is = attrs .iter() .find(|attr| attr.name.local.eq_str_ignore_ascii_case("is")) .map(|attr| LocalName::from(&*attr.value)); // Step 4. let definition = document.lookup_custom_element_definition(&name.ns, &name.local, is.as_ref()); // Step 5. let will_execute_script = definition.is_some() && parsing_algorithm != ParsingAlgorithm::Fragment; // Step 6. if will_execute_script { // Step 6.1. document.increment_throw_on_dynamic_markup_insertion_counter(); // Step 6.2 if is_execution_stack_empty() { document .window() .upcast::<GlobalScope>() .perform_a_microtask_checkpoint(); } // Step 6.3 ScriptThread::push_new_element_queue() } // Step 7. let creation_mode = if will_execute_script { CustomElementCreationMode::Synchronous } else { CustomElementCreationMode::Asynchronous }; let element = Element::create(name, is, document, creator, creation_mode); // Step 8. for attr in attrs { element.set_attribute_from_parser(attr.name, attr.value, None); } // Step 9. if will_execute_script { // Steps 9.1 - 9.2. ScriptThread::pop_current_element_queue(); // Step 9.3. document.decrement_throw_on_dynamic_markup_insertion_counter(); } // TODO: Step 10. // TODO: Step 11. // Step 12 is handled in `associate_with_form`. // Step 13. element } #[derive(JSTraceable, MallocSizeOf)] struct NetworkDecoder { #[ignore_malloc_size_of = "Defined in tendril"] decoder: LossyDecoder<NetworkSink>, } impl NetworkDecoder { fn new(encoding: &'static Encoding) -> Self { Self { decoder: LossyDecoder::new_encoding_rs(encoding, Default::default()), } } fn decode(&mut self, chunk: Vec<u8>) -> StrTendril { self.decoder.process(ByteTendril::from(&*chunk)); mem::replace( &mut self.decoder.inner_sink_mut().output, Default::default(), ) } fn finish(self) -> StrTendril { self.decoder.finish() } } #[derive(Default, JSTraceable)] struct NetworkSink { output: StrTendril, } impl TendrilSink<UTF8> for NetworkSink { type Output = StrTendril; fn process(&mut self, t: StrTendril) { if self.output.is_empty() { self.output = t; } else { self.output.push_tendril(&t); } } fn error(&mut self, _desc: Cow<'static, str>) {} fn finish(self) -> Self::Output { self.output } }<|fim▁end|>
let root_element = document.GetDocumentElement().expect("no document element"); FragmentParsingResult { inner: root_element.upcast::<Node>().children(),
<|file_name|>test_colour_operation.py<|end_file_name|><|fim▁begin|>import unittest import numpy as np from bsym import ColourOperation, Configuration from unittest.mock import patch<|fim▁hole|> def test_symmetry_operation_is_initialised_from_a_matrix( self ): matrix = np.array( [ [ 1, 0 ], [ 0, 1 ] ] ) mapping = [ { 1: 0, 0: 1 }, { 1: 1, 0: 0 } ] co = ColourOperation( matrix, colour_mapping=mapping ) np.testing.assert_array_equal( co.matrix, matrix ) self.assertEqual( co.colour_mapping, mapping ) def test_from_vector( self ): vector = [ 2, 3, 1 ] mapping = [ { 1: 0, 0: 1 }, { 1: 1, 0: 0 }, { 1: 1, 0: 0 } ] co = ColourOperation.from_vector( vector, mapping ) np.testing.assert_array_equal( co.matrix, np.array( [ [ 0, 0, 1 ], [ 1, 0, 0 ], [ 0, 1, 0 ] ] ) ) self.assertEqual( co.colour_mapping, mapping ) def test_from_vector_with_label( self ): vector = [ 2, 3, 1 ] mapping = [ { 1: 0, 0: 1 }, { 1: 1, 0: 0 } ] label = 'A' co = ColourOperation.from_vector( vector, mapping, label=label ) np.testing.assert_array_equal( co.matrix, np.array( [ [ 0, 0, 1 ], [ 1, 0, 0 ], [ 0, 1, 0 ] ] ) ) self.assertEqual( co.label, label ) self.assertEqual( co.colour_mapping, mapping ) def test_symmetry_operation_is_initialised_with_label( self ): matrix = np.array( [ [ 1, 0 ], [ 0, 1 ] ] ) label = 'E' mapping = [ { 1: 0, 0: 1 }, { 1: 1, 0: 0 } ] co = ColourOperation( matrix, mapping, label=label ) self.assertEqual( co.label, label ) self.assertEqual( co.colour_mapping, mapping ) def test_from_vector_counting_from_zero( self ): vector = [ 1, 2, 0 ] mapping = [ { 1: 0, 0: 1 }, { 1: 1, 0: 0 } ] co = ColourOperation.from_vector( vector, mapping, count_from_zero=True ) np.testing.assert_array_equal( co.matrix, np.array( [ [ 0, 0, 1 ], [ 1, 0, 0 ], [ 0, 1, 0 ] ] ) ) self.assertEqual( co.colour_mapping, mapping ) def test_operate_on( self ): matrix = np.array( [ [ 0, 1, 0 ], [ 0, 0, 1 ], [ 1, 0, 0 ] ] ) colour_mapping = [ { 1:1, 2:2, 3:3 }, { 1:2, 2:3, 3:1 }, { 1:3, 2:2, 3:1 } ] co = ColourOperation( matrix, colour_mapping ) configuration = Configuration( [ 1, 2, 3 ] ) co.operate_on( configuration ) np.testing.assert_array_equal( co.operate_on( configuration ).vector, np.array( [ 2, 1, 3 ] ) ) def test_mul( self ): matrix_a = np.array( [ [ 1, 0 ], [ 0, 1 ] ] ) colour_mapping_a = [ { 0:1, 1:0 }, { 0:1, 1:0 } ] matrix_b = np.array( [ [ 0, 1 ], [ 1, 0 ] ] ) colour_mapping_b = [ { 0:1, 1:0 }, { 0:1, 1:0 } ] co_a = ColourOperation( matrix_a, colour_mapping_a ) co_b = ColourOperation( matrix_b, colour_mapping_b ) co_c = co_a * co_b np.testing.assert_array_equal( co_c.matrix , np.array( [ [ 0, 1 ], [ 1, 0 ] ] ) ) self.assertEqual( co_c.colour_mapping, [ { 0:0, 1:1 }, { 0:0, 1:1 } ] ) if __name__ == '__main__': unittest.main()<|fim▁end|>
class ColourOperationTestCase( unittest.TestCase ): """Tests for colour operation methods"""
<|file_name|>main.py<|end_file_name|><|fim▁begin|>""" Fichier main qui lance le programme<|fim▁hole|>game = Game() game.play()<|fim▁end|>
""" from Game import *
<|file_name|>eventarc_client.ts<|end_file_name|><|fim▁begin|>// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // ** This file is automatically generated by gapic-generator-typescript. ** // ** https://github.com/googleapis/gapic-generator-typescript ** // ** All changes to this file may be overwritten. ** /* global window */ import * as gax from 'google-gax'; import { Callback, CallOptions, Descriptors, ClientOptions, LROperation, PaginationCallback, GaxCall, } from 'google-gax'; import {Transform} from 'stream'; import {RequestType} from 'google-gax/build/src/apitypes'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); /** * Client JSON configuration object, loaded from * `src/v1/eventarc_client_config.json`. * This file defines retry strategy and timeouts for all API methods in this library. */ import * as gapicConfig from './eventarc_client_config.json'; import {operationsProtos} from 'google-gax'; const version = require('../../../package.json').version; /** * Eventarc allows users to subscribe to various events that are provided by * Google Cloud services and forward them to supported destinations. * @class * @memberof v1 */ export class EventarcClient { private _terminated = false; private _opts: ClientOptions; private _providedCustomServicePath: boolean; private _gaxModule: typeof gax | typeof gax.fallback; private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; private _protos: {}; private _defaults: {[method: string]: gax.CallSettings}; auth: gax.GoogleAuth; descriptors: Descriptors = { page: {}, stream: {}, longrunning: {}, batching: {}, }; warn: (code: string, message: string, warnType?: string) => void; innerApiCalls: {[name: string]: Function}; pathTemplates: {[name: string]: gax.PathTemplate}; operationsClient: gax.OperationsClient; eventarcStub?: Promise<{[name: string]: Function}>; /** * Construct an instance of EventarcClient. * * @param {object} [options] - The configuration object. * The options accepted by the constructor are described in detail * in [this document](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#creating-the-client-instance). * The common options are: * @param {object} [options.credentials] - Credentials object. * @param {string} [options.credentials.client_email] * @param {string} [options.credentials.private_key] * @param {string} [options.email] - Account email address. Required when * using a .pem or .p12 keyFilename. * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or * .p12 key downloaded from the Google Developers Console. If you provide * a path to a JSON file, the projectId option below is not necessary. * NOTE: .pem and .p12 require you to specify options.email as well. * @param {number} [options.port] - The port on which to connect to * the remote host. * @param {string} [options.projectId] - The project ID from the Google * Developer's Console, e.g. 'grape-spaceship-123'. We will also check * the environment variable GCLOUD_PROJECT for your project ID. If your * app is running in an environment which supports * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, * your project ID will be detected automatically. * @param {string} [options.apiEndpoint] - The domain name of the * API remote host. * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. * Follows the structure of {@link gapicConfig}. * @param {boolean} [options.fallback] - Use HTTP fallback mode. * In fallback mode, a special browser-compatible transport implementation is used * instead of gRPC transport. In browser context (if the `window` object is defined) * the fallback mode is enabled automatically; set `options.fallback` to `false` * if you need to override this behavior. */ constructor(opts?: ClientOptions) { // Ensure that options include all the required fields. const staticMembers = this.constructor as typeof EventarcClient; const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; this._providedCustomServicePath = !!( opts?.servicePath || opts?.apiEndpoint ); const port = opts?.port || staticMembers.port; const clientConfig = opts?.clientConfig ?? {}; const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { opts['scopes'] = staticMembers.scopes; } // Choose either gRPC or proto-over-HTTP implementation of google-gax. this._gaxModule = opts.fallback ? gax.fallback : gax; // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. this._gaxGrpc = new this._gaxModule.GrpcClient(opts); // Save options to use in initialize() method. this._opts = opts; // Save the auth object to the client, for use by other methods. this.auth = this._gaxGrpc.auth as gax.GoogleAuth; // Set useJWTAccessWithScope on the auth object. this.auth.useJWTAccessWithScope = true; // Set defaultServicePath on the auth object. this.auth.defaultServicePath = staticMembers.servicePath; // Set the default scopes in auth client if needed. if (servicePath === staticMembers.servicePath) { this.auth.defaultScopes = staticMembers.scopes; } // Determine the client header string. const clientHeader = [`gax/${this._gaxModule.version}`, `gapic/${version}`]; if (typeof process !== 'undefined' && 'versions' in process) { clientHeader.push(`gl-node/${process.versions.node}`); } else { clientHeader.push(`gl-web/${this._gaxModule.version}`); } if (!opts.fallback) { clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); } else if (opts.fallback === 'rest') { clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); } if (opts.libName && opts.libVersion) { clientHeader.push(`${opts.libName}/${opts.libVersion}`); } // Load the applicable protos. this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); // This API contains "path templates"; forward-slash-separated // identifiers to uniquely identify resources within the API. // Create useful helper objects for these. this.pathTemplates = { channelPathTemplate: new this._gaxModule.PathTemplate( 'projects/{project}/locations/{location}/channels/{channel}' ), channelConnectionPathTemplate: new this._gaxModule.PathTemplate( 'projects/{project}/locations/{location}/channelConnections/{channel_connection}' ), locationPathTemplate: new this._gaxModule.PathTemplate( 'projects/{project}/locations/{location}' ), projectPathTemplate: new this._gaxModule.PathTemplate( 'projects/{project}' ), triggerPathTemplate: new this._gaxModule.PathTemplate( 'projects/{project}/locations/{location}/triggers/{trigger}' ), }; // Some of the methods on this service return "paged" results, // (e.g. 50 results at a time, with tokens to get subsequent // pages). Denote the keys used for pagination and results. this.descriptors.page = { listTriggers: new this._gaxModule.PageDescriptor( 'pageToken', 'nextPageToken', 'triggers' ), listChannels: new this._gaxModule.PageDescriptor( 'pageToken', 'nextPageToken', 'channels' ), listChannelConnections: new this._gaxModule.PageDescriptor( 'pageToken', 'nextPageToken', 'channelConnections' ), }; const protoFilesRoot = this._gaxModule.protobuf.Root.fromJSON(jsonProtos); // This API contains "long-running operations", which return a // an Operation object that allows for tracking of the operation, // rather than holding a request open. this.operationsClient = this._gaxModule .lro({ auth: this.auth, grpc: 'grpc' in this._gaxGrpc ? this._gaxGrpc.grpc : undefined, }) .operationsClient(opts); const createTriggerResponse = protoFilesRoot.lookup( '.google.cloud.eventarc.v1.Trigger' ) as gax.protobuf.Type; const createTriggerMetadata = protoFilesRoot.lookup( '.google.cloud.eventarc.v1.OperationMetadata' ) as gax.protobuf.Type; const updateTriggerResponse = protoFilesRoot.lookup( '.google.cloud.eventarc.v1.Trigger' ) as gax.protobuf.Type; const updateTriggerMetadata = protoFilesRoot.lookup( '.google.cloud.eventarc.v1.OperationMetadata' ) as gax.protobuf.Type; const deleteTriggerResponse = protoFilesRoot.lookup( '.google.cloud.eventarc.v1.Trigger' ) as gax.protobuf.Type; const deleteTriggerMetadata = protoFilesRoot.lookup( '.google.cloud.eventarc.v1.OperationMetadata' ) as gax.protobuf.Type; const createChannelResponse = protoFilesRoot.lookup( '.google.cloud.eventarc.v1.Channel' ) as gax.protobuf.Type; const createChannelMetadata = protoFilesRoot.lookup( '.google.cloud.eventarc.v1.OperationMetadata' ) as gax.protobuf.Type; const updateChannelResponse = protoFilesRoot.lookup( '.google.cloud.eventarc.v1.Channel' ) as gax.protobuf.Type; const updateChannelMetadata = protoFilesRoot.lookup( '.google.cloud.eventarc.v1.OperationMetadata' ) as gax.protobuf.Type; const deleteChannelResponse = protoFilesRoot.lookup( '.google.cloud.eventarc.v1.Channel' ) as gax.protobuf.Type; const deleteChannelMetadata = protoFilesRoot.lookup( '.google.cloud.eventarc.v1.OperationMetadata' ) as gax.protobuf.Type; const createChannelConnectionResponse = protoFilesRoot.lookup( '.google.cloud.eventarc.v1.ChannelConnection' ) as gax.protobuf.Type; const createChannelConnectionMetadata = protoFilesRoot.lookup( '.google.cloud.eventarc.v1.OperationMetadata' ) as gax.protobuf.Type; const deleteChannelConnectionResponse = protoFilesRoot.lookup( '.google.cloud.eventarc.v1.ChannelConnection' ) as gax.protobuf.Type; const deleteChannelConnectionMetadata = protoFilesRoot.lookup( '.google.cloud.eventarc.v1.OperationMetadata' ) as gax.protobuf.Type; this.descriptors.longrunning = { createTrigger: new this._gaxModule.LongrunningDescriptor( this.operationsClient, createTriggerResponse.decode.bind(createTriggerResponse), createTriggerMetadata.decode.bind(createTriggerMetadata) ), updateTrigger: new this._gaxModule.LongrunningDescriptor( this.operationsClient, updateTriggerResponse.decode.bind(updateTriggerResponse), updateTriggerMetadata.decode.bind(updateTriggerMetadata) ), deleteTrigger: new this._gaxModule.LongrunningDescriptor( this.operationsClient, deleteTriggerResponse.decode.bind(deleteTriggerResponse), deleteTriggerMetadata.decode.bind(deleteTriggerMetadata) ), createChannel: new this._gaxModule.LongrunningDescriptor( this.operationsClient, createChannelResponse.decode.bind(createChannelResponse), createChannelMetadata.decode.bind(createChannelMetadata) ), updateChannel: new this._gaxModule.LongrunningDescriptor( this.operationsClient, updateChannelResponse.decode.bind(updateChannelResponse), updateChannelMetadata.decode.bind(updateChannelMetadata) ), deleteChannel: new this._gaxModule.LongrunningDescriptor( this.operationsClient, deleteChannelResponse.decode.bind(deleteChannelResponse), deleteChannelMetadata.decode.bind(deleteChannelMetadata) ), createChannelConnection: new this._gaxModule.LongrunningDescriptor( this.operationsClient, createChannelConnectionResponse.decode.bind( createChannelConnectionResponse ), createChannelConnectionMetadata.decode.bind( createChannelConnectionMetadata ) ), deleteChannelConnection: new this._gaxModule.LongrunningDescriptor( this.operationsClient, deleteChannelConnectionResponse.decode.bind( deleteChannelConnectionResponse ), deleteChannelConnectionMetadata.decode.bind( deleteChannelConnectionMetadata ) ), }; // Put together the default options sent with requests. this._defaults = this._gaxGrpc.constructSettings( 'google.cloud.eventarc.v1.Eventarc', gapicConfig as gax.ClientConfig, opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')} ); // Set up a dictionary of "inner API calls"; the core implementation // of calling the API is handled in `google-gax`, with this code // merely providing the destination and request information. this.innerApiCalls = {}; // Add a warn function to the client constructor so it can be easily tested. this.warn = gax.warn; } /** * Initialize the client. * Performs asynchronous operations (such as authentication) and prepares the client. * This function will be called automatically when any class method is called for the * first time, but if you need to initialize it before calling an actual method, * feel free to call initialize() directly. * * You can await on this method if you want to make sure the client is initialized. * * @returns {Promise} A promise that resolves to an authenticated service stub. */ initialize() { // If the client stub promise is already initialized, return immediately. if (this.eventarcStub) { return this.eventarcStub; } // Put together the "service stub" for // google.cloud.eventarc.v1.Eventarc. this.eventarcStub = this._gaxGrpc.createStub( this._opts.fallback ? (this._protos as protobuf.Root).lookupService( 'google.cloud.eventarc.v1.Eventarc' ) : // eslint-disable-next-line @typescript-eslint/no-explicit-any (this._protos as any).google.cloud.eventarc.v1.Eventarc, this._opts, this._providedCustomServicePath ) as Promise<{[method: string]: Function}>; // Iterate over each of the methods that the service provides // and create an API call method for each. const eventarcStubMethods = [ 'getTrigger', 'listTriggers', 'createTrigger', 'updateTrigger', 'deleteTrigger', 'getChannel', 'listChannels', 'createChannel', 'updateChannel', 'deleteChannel', 'getChannelConnection', 'listChannelConnections', 'createChannelConnection', 'deleteChannelConnection', ]; for (const methodName of eventarcStubMethods) { const callPromise = this.eventarcStub.then( stub => (...args: Array<{}>) => { if (this._terminated) { return Promise.reject('The client has already been closed.'); } const func = stub[methodName]; return func.apply(stub, args); }, (err: Error | null | undefined) => () => { throw err; } ); const descriptor = this.descriptors.page[methodName] || this.descriptors.longrunning[methodName] || undefined; const apiCall = this._gaxModule.createApiCall( callPromise, this._defaults[methodName], descriptor ); this.innerApiCalls[methodName] = apiCall; } return this.eventarcStub; } /** * The DNS address for this API service. * @returns {string} The DNS address for this service. */ static get servicePath() { return 'eventarc.googleapis.com'; } /** * The DNS address for this API service - same as servicePath(), * exists for compatibility reasons. * @returns {string} The DNS address for this service. */ static get apiEndpoint() { return 'eventarc.googleapis.com'; } /** * The port for this API service. * @returns {number} The default port for this service. */ static get port() { return 443; } /** * The scopes needed to make gRPC calls for every method defined * in this service. * @returns {string[]} List of default scopes. */ static get scopes() { return ['https://www.googleapis.com/auth/cloud-platform']; } getProjectId(): Promise<string>; getProjectId(callback: Callback<string, undefined, undefined>): void; /** * Return the project ID used by this class. * @returns {Promise} A promise that resolves to string containing the project ID. */ getProjectId( callback?: Callback<string, undefined, undefined> ): Promise<string> | void { if (callback) { this.auth.getProjectId(callback); return; } return this.auth.getProjectId(); } // ------------------- // -- Service calls -- // ------------------- /** * Get a single trigger. * * @param {Object} request * The request object that will be sent. * @param {string} request.name * Required. The name of the trigger to get. * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. * The first element of the array is an object representing [Trigger]{@link google.cloud.eventarc.v1.Trigger}. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) * for more details and examples. * @example <caption>include:samples/generated/v1/eventarc.get_trigger.js</caption> * region_tag:eventarc_v1_generated_Eventarc_GetTrigger_async */ getTrigger( request?: protos.google.cloud.eventarc.v1.IGetTriggerRequest, options?: CallOptions ): Promise< [ protos.google.cloud.eventarc.v1.ITrigger, protos.google.cloud.eventarc.v1.IGetTriggerRequest | undefined, {} | undefined ] >; getTrigger( request: protos.google.cloud.eventarc.v1.IGetTriggerRequest, options: CallOptions, callback: Callback< protos.google.cloud.eventarc.v1.ITrigger, protos.google.cloud.eventarc.v1.IGetTriggerRequest | null | undefined, {} | null | undefined > ): void; getTrigger( request: protos.google.cloud.eventarc.v1.IGetTriggerRequest, callback: Callback< protos.google.cloud.eventarc.v1.ITrigger, protos.google.cloud.eventarc.v1.IGetTriggerRequest | null | undefined, {} | null | undefined > ): void; getTrigger( request?: protos.google.cloud.eventarc.v1.IGetTriggerRequest, optionsOrCallback?: | CallOptions | Callback< protos.google.cloud.eventarc.v1.ITrigger, protos.google.cloud.eventarc.v1.IGetTriggerRequest | null | undefined, {} | null | undefined >, callback?: Callback< protos.google.cloud.eventarc.v1.ITrigger, protos.google.cloud.eventarc.v1.IGetTriggerRequest | null | undefined, {} | null | undefined > ): Promise< [ protos.google.cloud.eventarc.v1.ITrigger, protos.google.cloud.eventarc.v1.IGetTriggerRequest | undefined, {} | undefined ] > | void { request = request || {}; let options: CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; } else { options = optionsOrCallback as CallOptions; } options = options || {}; options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ name: request.name || '', }); this.initialize(); return this.innerApiCalls.getTrigger(request, options, callback); } /** * Get a single Channel. * * @param {Object} request * The request object that will be sent. * @param {string} request.name * Required. The name of the channel to get. * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. * The first element of the array is an object representing [Channel]{@link google.cloud.eventarc.v1.Channel}. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) * for more details and examples. * @example <caption>include:samples/generated/v1/eventarc.get_channel.js</caption> * region_tag:eventarc_v1_generated_Eventarc_GetChannel_async */ getChannel( request?: protos.google.cloud.eventarc.v1.IGetChannelRequest, options?: CallOptions ): Promise< [ protos.google.cloud.eventarc.v1.IChannel, protos.google.cloud.eventarc.v1.IGetChannelRequest | undefined, {} | undefined ] >; getChannel( request: protos.google.cloud.eventarc.v1.IGetChannelRequest, options: CallOptions, callback: Callback< protos.google.cloud.eventarc.v1.IChannel, protos.google.cloud.eventarc.v1.IGetChannelRequest | null | undefined, {} | null | undefined > ): void; getChannel( request: protos.google.cloud.eventarc.v1.IGetChannelRequest, callback: Callback< protos.google.cloud.eventarc.v1.IChannel, protos.google.cloud.eventarc.v1.IGetChannelRequest | null | undefined, {} | null | undefined > ): void; getChannel( request?: protos.google.cloud.eventarc.v1.IGetChannelRequest, optionsOrCallback?: | CallOptions | Callback< protos.google.cloud.eventarc.v1.IChannel, protos.google.cloud.eventarc.v1.IGetChannelRequest | null | undefined, {} | null | undefined >, callback?: Callback< protos.google.cloud.eventarc.v1.IChannel, protos.google.cloud.eventarc.v1.IGetChannelRequest | null | undefined, {} | null | undefined > ): Promise< [ protos.google.cloud.eventarc.v1.IChannel, protos.google.cloud.eventarc.v1.IGetChannelRequest | undefined, {} | undefined ] > | void { request = request || {}; let options: CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; } else { options = optionsOrCallback as CallOptions; } options = options || {}; options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ name: request.name || '', }); this.initialize(); return this.innerApiCalls.getChannel(request, options, callback); } /** * Get a single ChannelConnection. * * @param {Object} request * The request object that will be sent. * @param {string} request.name * Required. The name of the channel connection to get. * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. * The first element of the array is an object representing [ChannelConnection]{@link google.cloud.eventarc.v1.ChannelConnection}. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) * for more details and examples. * @example <caption>include:samples/generated/v1/eventarc.get_channel_connection.js</caption> * region_tag:eventarc_v1_generated_Eventarc_GetChannelConnection_async */ getChannelConnection( request?: protos.google.cloud.eventarc.v1.IGetChannelConnectionRequest, options?: CallOptions ): Promise< [ protos.google.cloud.eventarc.v1.IChannelConnection, protos.google.cloud.eventarc.v1.IGetChannelConnectionRequest | undefined, {} | undefined ] >; getChannelConnection( request: protos.google.cloud.eventarc.v1.IGetChannelConnectionRequest, options: CallOptions, callback: Callback< protos.google.cloud.eventarc.v1.IChannelConnection, | protos.google.cloud.eventarc.v1.IGetChannelConnectionRequest | null | undefined, {} | null | undefined > ): void; getChannelConnection( request: protos.google.cloud.eventarc.v1.IGetChannelConnectionRequest, callback: Callback< protos.google.cloud.eventarc.v1.IChannelConnection, | protos.google.cloud.eventarc.v1.IGetChannelConnectionRequest | null | undefined, {} | null | undefined > ): void; getChannelConnection( request?: protos.google.cloud.eventarc.v1.IGetChannelConnectionRequest, optionsOrCallback?: | CallOptions | Callback< protos.google.cloud.eventarc.v1.IChannelConnection, | protos.google.cloud.eventarc.v1.IGetChannelConnectionRequest | null | undefined, {} | null | undefined >, callback?: Callback< protos.google.cloud.eventarc.v1.IChannelConnection, | protos.google.cloud.eventarc.v1.IGetChannelConnectionRequest | null | undefined, {} | null | undefined > ): Promise< [ protos.google.cloud.eventarc.v1.IChannelConnection, protos.google.cloud.eventarc.v1.IGetChannelConnectionRequest | undefined, {} | undefined ] > | void { request = request || {}; let options: CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; } else { options = optionsOrCallback as CallOptions; } options = options || {}; options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ name: request.name || '', }); this.initialize(); return this.innerApiCalls.getChannelConnection(request, options, callback); } /** * Create a new trigger in a particular project and location. * * @param {Object} request * The request object that will be sent. * @param {string} request.parent * Required. The parent collection in which to add this trigger. * @param {google.cloud.eventarc.v1.Trigger} request.trigger * Required. The trigger to create. * @param {string} request.triggerId * Required. The user-provided ID to be assigned to the trigger. * @param {boolean} request.validateOnly * Required. If set, validate the request and preview the review, but do not * post it. * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. * The first element of the array is an object representing * a long running operation. Its `promise()` method returns a promise * you can `await` for. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) * for more details and examples. * @example <caption>include:samples/generated/v1/eventarc.create_trigger.js</caption> * region_tag:eventarc_v1_generated_Eventarc_CreateTrigger_async */ createTrigger( request?: protos.google.cloud.eventarc.v1.ICreateTriggerRequest, options?: CallOptions ): Promise< [ LROperation< protos.google.cloud.eventarc.v1.ITrigger, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | undefined, {} | undefined ] >; createTrigger( request: protos.google.cloud.eventarc.v1.ICreateTriggerRequest, options: CallOptions, callback: Callback< LROperation< protos.google.cloud.eventarc.v1.ITrigger, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined > ): void; createTrigger( request: protos.google.cloud.eventarc.v1.ICreateTriggerRequest, callback: Callback< LROperation< protos.google.cloud.eventarc.v1.ITrigger, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined > ): void; createTrigger( request?: protos.google.cloud.eventarc.v1.ICreateTriggerRequest, optionsOrCallback?: | CallOptions | Callback< LROperation< protos.google.cloud.eventarc.v1.ITrigger, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined >, callback?: Callback< LROperation< protos.google.cloud.eventarc.v1.ITrigger, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined > ): Promise< [ LROperation< protos.google.cloud.eventarc.v1.ITrigger, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | undefined, {} | undefined ] > | void { request = request || {}; let options: CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; } else { options = optionsOrCallback as CallOptions; } options = options || {}; options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ parent: request.parent || '', }); this.initialize(); return this.innerApiCalls.createTrigger(request, options, callback); } /** * Check the status of the long running operation returned by `createTrigger()`. * @param {String} name * The operation name that will be passed. * @returns {Promise} - The promise which resolves to an object. * The decoded operation object has result and metadata field to get information from. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) * for more details and examples. * @example <caption>include:samples/generated/v1/eventarc.create_trigger.js</caption> * region_tag:eventarc_v1_generated_Eventarc_CreateTrigger_async */ async checkCreateTriggerProgress( name: string ): Promise< LROperation< protos.google.cloud.eventarc.v1.Trigger, protos.google.cloud.eventarc.v1.OperationMetadata > > { const request = new operationsProtos.google.longrunning.GetOperationRequest( {name} ); const [operation] = await this.operationsClient.getOperation(request); const decodeOperation = new gax.Operation( operation, this.descriptors.longrunning.createTrigger, gax.createDefaultBackoffSettings() ); return decodeOperation as LROperation< protos.google.cloud.eventarc.v1.Trigger, protos.google.cloud.eventarc.v1.OperationMetadata >; } /** * Update a single trigger. * * @param {Object} request * The request object that will be sent. * @param {google.cloud.eventarc.v1.Trigger} request.trigger * The trigger to be updated. * @param {google.protobuf.FieldMask} request.updateMask * The fields to be updated; only fields explicitly provided are updated. * If no field mask is provided, all provided fields in the request are * updated. To update all fields, provide a field mask of "*". * @param {boolean} request.allowMissing * If set to true, and the trigger is not found, a new trigger will be * created. In this situation, `update_mask` is ignored. * @param {boolean} request.validateOnly * Required. If set, validate the request and preview the review, but do not * post it. * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. * The first element of the array is an object representing * a long running operation. Its `promise()` method returns a promise * you can `await` for. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) * for more details and examples. * @example <caption>include:samples/generated/v1/eventarc.update_trigger.js</caption> * region_tag:eventarc_v1_generated_Eventarc_UpdateTrigger_async */ updateTrigger( request?: protos.google.cloud.eventarc.v1.IUpdateTriggerRequest, options?: CallOptions ): Promise< [ LROperation< protos.google.cloud.eventarc.v1.ITrigger, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | undefined, {} | undefined ] >; updateTrigger( request: protos.google.cloud.eventarc.v1.IUpdateTriggerRequest, options: CallOptions, callback: Callback< LROperation< protos.google.cloud.eventarc.v1.ITrigger, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined > ): void; updateTrigger( request: protos.google.cloud.eventarc.v1.IUpdateTriggerRequest, callback: Callback< LROperation< protos.google.cloud.eventarc.v1.ITrigger, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined > ): void; updateTrigger( request?: protos.google.cloud.eventarc.v1.IUpdateTriggerRequest, optionsOrCallback?: | CallOptions | Callback< LROperation< protos.google.cloud.eventarc.v1.ITrigger, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined >, callback?: Callback< LROperation< protos.google.cloud.eventarc.v1.ITrigger, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined > ): Promise< [ LROperation< protos.google.cloud.eventarc.v1.ITrigger, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | undefined, {} | undefined ] > | void { request = request || {}; let options: CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; } else { options = optionsOrCallback as CallOptions; } options = options || {}; options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ 'trigger.name': request.trigger!.name || '', }); this.initialize(); return this.innerApiCalls.updateTrigger(request, options, callback); } /** * Check the status of the long running operation returned by `updateTrigger()`. * @param {String} name * The operation name that will be passed. * @returns {Promise} - The promise which resolves to an object. * The decoded operation object has result and metadata field to get information from. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) * for more details and examples. * @example <caption>include:samples/generated/v1/eventarc.update_trigger.js</caption> * region_tag:eventarc_v1_generated_Eventarc_UpdateTrigger_async */ async checkUpdateTriggerProgress( name: string ): Promise< LROperation< protos.google.cloud.eventarc.v1.Trigger, protos.google.cloud.eventarc.v1.OperationMetadata > > { const request = new operationsProtos.google.longrunning.GetOperationRequest( {name} ); const [operation] = await this.operationsClient.getOperation(request); const decodeOperation = new gax.Operation( operation, this.descriptors.longrunning.updateTrigger, gax.createDefaultBackoffSettings() ); return decodeOperation as LROperation< protos.google.cloud.eventarc.v1.Trigger, protos.google.cloud.eventarc.v1.OperationMetadata >; } /** * Delete a single trigger. * * @param {Object} request * The request object that will be sent. * @param {string} request.name * Required. The name of the trigger to be deleted. * @param {string} request.etag * If provided, the trigger will only be deleted if the etag matches the * current etag on the resource. * @param {boolean} request.allowMissing * If set to true, and the trigger is not found, the request will succeed * but no action will be taken on the server. * @param {boolean} request.validateOnly * Required. If set, validate the request and preview the review, but do not * post it. * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. * The first element of the array is an object representing * a long running operation. Its `promise()` method returns a promise * you can `await` for. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) * for more details and examples. * @example <caption>include:samples/generated/v1/eventarc.delete_trigger.js</caption> * region_tag:eventarc_v1_generated_Eventarc_DeleteTrigger_async */ deleteTrigger( request?: protos.google.cloud.eventarc.v1.IDeleteTriggerRequest, options?: CallOptions ): Promise< [ LROperation< protos.google.cloud.eventarc.v1.ITrigger, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | undefined, {} | undefined ] >; deleteTrigger( request: protos.google.cloud.eventarc.v1.IDeleteTriggerRequest, options: CallOptions, callback: Callback< LROperation< protos.google.cloud.eventarc.v1.ITrigger, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined > ): void; deleteTrigger( request: protos.google.cloud.eventarc.v1.IDeleteTriggerRequest, callback: Callback< LROperation< protos.google.cloud.eventarc.v1.ITrigger, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined > ): void; deleteTrigger( request?: protos.google.cloud.eventarc.v1.IDeleteTriggerRequest, optionsOrCallback?: | CallOptions | Callback< LROperation< protos.google.cloud.eventarc.v1.ITrigger, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined >, callback?: Callback< LROperation< protos.google.cloud.eventarc.v1.ITrigger, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined > ): Promise< [ LROperation< protos.google.cloud.eventarc.v1.ITrigger, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | undefined, {} | undefined ] > | void { request = request || {}; let options: CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; } else { options = optionsOrCallback as CallOptions; } options = options || {}; options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ name: request.name || '', }); this.initialize(); return this.innerApiCalls.deleteTrigger(request, options, callback); } /** * Check the status of the long running operation returned by `deleteTrigger()`. * @param {String} name * The operation name that will be passed. * @returns {Promise} - The promise which resolves to an object. * The decoded operation object has result and metadata field to get information from. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) * for more details and examples. * @example <caption>include:samples/generated/v1/eventarc.delete_trigger.js</caption> * region_tag:eventarc_v1_generated_Eventarc_DeleteTrigger_async */ async checkDeleteTriggerProgress( name: string ): Promise< LROperation< protos.google.cloud.eventarc.v1.Trigger, protos.google.cloud.eventarc.v1.OperationMetadata > > { const request = new operationsProtos.google.longrunning.GetOperationRequest( {name} ); const [operation] = await this.operationsClient.getOperation(request); const decodeOperation = new gax.Operation( operation, this.descriptors.longrunning.deleteTrigger, gax.createDefaultBackoffSettings() ); return decodeOperation as LROperation< protos.google.cloud.eventarc.v1.Trigger, protos.google.cloud.eventarc.v1.OperationMetadata >; } /** * Create a new channel in a particular project and location. * * @param {Object} request * The request object that will be sent. * @param {string} request.parent * Required. The parent collection in which to add this channel. * @param {google.cloud.eventarc.v1.Channel} request.channel * Required. The channel to create. * @param {string} request.channelId * Required. The user-provided ID to be assigned to the channel. * @param {boolean} request.validateOnly * Required. If set, validate the request and preview the review, but do not * post it. * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. * The first element of the array is an object representing * a long running operation. Its `promise()` method returns a promise * you can `await` for. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) * for more details and examples. * @example <caption>include:samples/generated/v1/eventarc.create_channel.js</caption> * region_tag:eventarc_v1_generated_Eventarc_CreateChannel_async */ createChannel( request?: protos.google.cloud.eventarc.v1.ICreateChannelRequest, options?: CallOptions ): Promise< [ LROperation< protos.google.cloud.eventarc.v1.IChannel, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | undefined, {} | undefined ] >; createChannel( request: protos.google.cloud.eventarc.v1.ICreateChannelRequest, options: CallOptions, callback: Callback< LROperation< protos.google.cloud.eventarc.v1.IChannel, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined > ): void; createChannel( request: protos.google.cloud.eventarc.v1.ICreateChannelRequest, callback: Callback< LROperation< protos.google.cloud.eventarc.v1.IChannel, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined > ): void; createChannel( request?: protos.google.cloud.eventarc.v1.ICreateChannelRequest, optionsOrCallback?: | CallOptions | Callback< LROperation< protos.google.cloud.eventarc.v1.IChannel, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined >, callback?: Callback< LROperation< protos.google.cloud.eventarc.v1.IChannel, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined > ): Promise< [ LROperation< protos.google.cloud.eventarc.v1.IChannel, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | undefined, {} | undefined ] > | void { request = request || {}; let options: CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; } else { options = optionsOrCallback as CallOptions; } options = options || {}; options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ parent: request.parent || '', }); this.initialize(); return this.innerApiCalls.createChannel(request, options, callback); } /** * Check the status of the long running operation returned by `createChannel()`. * @param {String} name * The operation name that will be passed. * @returns {Promise} - The promise which resolves to an object. * The decoded operation object has result and metadata field to get information from. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) * for more details and examples. * @example <caption>include:samples/generated/v1/eventarc.create_channel.js</caption> * region_tag:eventarc_v1_generated_Eventarc_CreateChannel_async */ async checkCreateChannelProgress( name: string ): Promise< LROperation< protos.google.cloud.eventarc.v1.Channel, protos.google.cloud.eventarc.v1.OperationMetadata > > { const request = new operationsProtos.google.longrunning.GetOperationRequest( {name} ); const [operation] = await this.operationsClient.getOperation(request); const decodeOperation = new gax.Operation( operation, this.descriptors.longrunning.createChannel, gax.createDefaultBackoffSettings() ); return decodeOperation as LROperation< protos.google.cloud.eventarc.v1.Channel, protos.google.cloud.eventarc.v1.OperationMetadata >; } /** * Update a single channel. * * @param {Object} request * The request object that will be sent. * @param {google.cloud.eventarc.v1.Channel} request.channel * The channel to be updated. * @param {google.protobuf.FieldMask} request.updateMask * The fields to be updated; only fields explicitly provided are updated. * If no field mask is provided, all provided fields in the request are * updated. To update all fields, provide a field mask of "*". * @param {boolean} request.validateOnly * Required. If set, validate the request and preview the review, but do not * post it. * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. * The first element of the array is an object representing * a long running operation. Its `promise()` method returns a promise * you can `await` for. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) * for more details and examples. * @example <caption>include:samples/generated/v1/eventarc.update_channel.js</caption> * region_tag:eventarc_v1_generated_Eventarc_UpdateChannel_async */ updateChannel( request?: protos.google.cloud.eventarc.v1.IUpdateChannelRequest, options?: CallOptions ): Promise< [ LROperation< protos.google.cloud.eventarc.v1.IChannel, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | undefined, {} | undefined ] >; updateChannel( request: protos.google.cloud.eventarc.v1.IUpdateChannelRequest, options: CallOptions, callback: Callback< LROperation< protos.google.cloud.eventarc.v1.IChannel, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined > ): void; updateChannel( request: protos.google.cloud.eventarc.v1.IUpdateChannelRequest, callback: Callback< LROperation< protos.google.cloud.eventarc.v1.IChannel, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined > ): void; updateChannel( request?: protos.google.cloud.eventarc.v1.IUpdateChannelRequest, optionsOrCallback?: | CallOptions | Callback< LROperation< protos.google.cloud.eventarc.v1.IChannel, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined >, callback?: Callback< LROperation< protos.google.cloud.eventarc.v1.IChannel, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined > ): Promise< [ LROperation< protos.google.cloud.eventarc.v1.IChannel, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | undefined, {} | undefined ] > | void { request = request || {}; let options: CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; } else { options = optionsOrCallback as CallOptions; } options = options || {}; options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ 'channel.name': request.channel!.name || '', }); this.initialize(); return this.innerApiCalls.updateChannel(request, options, callback); } /** * Check the status of the long running operation returned by `updateChannel()`. * @param {String} name * The operation name that will be passed. * @returns {Promise} - The promise which resolves to an object. * The decoded operation object has result and metadata field to get information from. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) * for more details and examples. * @example <caption>include:samples/generated/v1/eventarc.update_channel.js</caption> * region_tag:eventarc_v1_generated_Eventarc_UpdateChannel_async */ async checkUpdateChannelProgress( name: string ): Promise< LROperation< protos.google.cloud.eventarc.v1.Channel, protos.google.cloud.eventarc.v1.OperationMetadata > > { const request = new operationsProtos.google.longrunning.GetOperationRequest( {name} ); const [operation] = await this.operationsClient.getOperation(request); const decodeOperation = new gax.Operation( operation, this.descriptors.longrunning.updateChannel, gax.createDefaultBackoffSettings() ); return decodeOperation as LROperation< protos.google.cloud.eventarc.v1.Channel, protos.google.cloud.eventarc.v1.OperationMetadata >; } /** * Delete a single channel. * * @param {Object} request * The request object that will be sent. * @param {string} request.name * Required. The name of the channel to be deleted. * @param {boolean} request.validateOnly * Required. If set, validate the request and preview the review, but do not * post it. * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. * The first element of the array is an object representing * a long running operation. Its `promise()` method returns a promise * you can `await` for. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) * for more details and examples. * @example <caption>include:samples/generated/v1/eventarc.delete_channel.js</caption> * region_tag:eventarc_v1_generated_Eventarc_DeleteChannel_async */ deleteChannel( request?: protos.google.cloud.eventarc.v1.IDeleteChannelRequest, options?: CallOptions ): Promise< [ LROperation< protos.google.cloud.eventarc.v1.IChannel, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | undefined, {} | undefined ] >; deleteChannel( request: protos.google.cloud.eventarc.v1.IDeleteChannelRequest, options: CallOptions, callback: Callback< LROperation< protos.google.cloud.eventarc.v1.IChannel, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined > ): void; deleteChannel( request: protos.google.cloud.eventarc.v1.IDeleteChannelRequest, callback: Callback< LROperation< protos.google.cloud.eventarc.v1.IChannel, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined > ): void; deleteChannel( request?: protos.google.cloud.eventarc.v1.IDeleteChannelRequest, optionsOrCallback?: | CallOptions | Callback< LROperation< protos.google.cloud.eventarc.v1.IChannel, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined >, callback?: Callback< LROperation< protos.google.cloud.eventarc.v1.IChannel, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined > ): Promise< [ LROperation< protos.google.cloud.eventarc.v1.IChannel, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | undefined, {} | undefined ] > | void { request = request || {}; let options: CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; } else { options = optionsOrCallback as CallOptions; } options = options || {}; options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ name: request.name || '', }); this.initialize(); return this.innerApiCalls.deleteChannel(request, options, callback); } /** * Check the status of the long running operation returned by `deleteChannel()`. * @param {String} name * The operation name that will be passed. * @returns {Promise} - The promise which resolves to an object. * The decoded operation object has result and metadata field to get information from. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) * for more details and examples. * @example <caption>include:samples/generated/v1/eventarc.delete_channel.js</caption> * region_tag:eventarc_v1_generated_Eventarc_DeleteChannel_async */ async checkDeleteChannelProgress( name: string ): Promise< LROperation< protos.google.cloud.eventarc.v1.Channel, protos.google.cloud.eventarc.v1.OperationMetadata > > { const request = new operationsProtos.google.longrunning.GetOperationRequest( {name} ); const [operation] = await this.operationsClient.getOperation(request); const decodeOperation = new gax.Operation( operation, this.descriptors.longrunning.deleteChannel, gax.createDefaultBackoffSettings() ); return decodeOperation as LROperation< protos.google.cloud.eventarc.v1.Channel, protos.google.cloud.eventarc.v1.OperationMetadata >; } /** * Create a new ChannelConnection in a particular project and location. * * @param {Object} request * The request object that will be sent. * @param {string} request.parent * Required. The parent collection in which to add this channel connection. * @param {google.cloud.eventarc.v1.ChannelConnection} request.channelConnection * Required. Channel connection to create. * @param {string} request.channelConnectionId * Required. The user-provided ID to be assigned to the channel connection. * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. * The first element of the array is an object representing * a long running operation. Its `promise()` method returns a promise * you can `await` for. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) * for more details and examples. * @example <caption>include:samples/generated/v1/eventarc.create_channel_connection.js</caption> * region_tag:eventarc_v1_generated_Eventarc_CreateChannelConnection_async */ createChannelConnection( request?: protos.google.cloud.eventarc.v1.ICreateChannelConnectionRequest, options?: CallOptions ): Promise< [ LROperation< protos.google.cloud.eventarc.v1.IChannelConnection, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | undefined, {} | undefined ] >; createChannelConnection( request: protos.google.cloud.eventarc.v1.ICreateChannelConnectionRequest, options: CallOptions, callback: Callback< LROperation< protos.google.cloud.eventarc.v1.IChannelConnection, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined > ): void; createChannelConnection( request: protos.google.cloud.eventarc.v1.ICreateChannelConnectionRequest, callback: Callback< LROperation< protos.google.cloud.eventarc.v1.IChannelConnection, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined > ): void; createChannelConnection( request?: protos.google.cloud.eventarc.v1.ICreateChannelConnectionRequest, optionsOrCallback?: | CallOptions | Callback< LROperation< protos.google.cloud.eventarc.v1.IChannelConnection, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined >, callback?: Callback< LROperation< protos.google.cloud.eventarc.v1.IChannelConnection, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined > ): Promise< [ LROperation< protos.google.cloud.eventarc.v1.IChannelConnection, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | undefined, {} | undefined ] > | void { request = request || {}; let options: CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; } else { options = optionsOrCallback as CallOptions; } options = options || {}; options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ parent: request.parent || '', }); this.initialize(); return this.innerApiCalls.createChannelConnection( request, options, callback ); } /** * Check the status of the long running operation returned by `createChannelConnection()`. * @param {String} name * The operation name that will be passed. * @returns {Promise} - The promise which resolves to an object. * The decoded operation object has result and metadata field to get information from. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) * for more details and examples. * @example <caption>include:samples/generated/v1/eventarc.create_channel_connection.js</caption> * region_tag:eventarc_v1_generated_Eventarc_CreateChannelConnection_async */ async checkCreateChannelConnectionProgress( name: string ): Promise< LROperation< protos.google.cloud.eventarc.v1.ChannelConnection, protos.google.cloud.eventarc.v1.OperationMetadata > > { const request = new operationsProtos.google.longrunning.GetOperationRequest( {name} ); const [operation] = await this.operationsClient.getOperation(request); const decodeOperation = new gax.Operation( operation, this.descriptors.longrunning.createChannelConnection, gax.createDefaultBackoffSettings() ); return decodeOperation as LROperation< protos.google.cloud.eventarc.v1.ChannelConnection, protos.google.cloud.eventarc.v1.OperationMetadata >; } /** * Delete a single ChannelConnection. * * @param {Object} request * The request object that will be sent. * @param {string} request.name * Required. The name of the channel connection to delete. * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. * The first element of the array is an object representing * a long running operation. Its `promise()` method returns a promise * you can `await` for. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) * for more details and examples. * @example <caption>include:samples/generated/v1/eventarc.delete_channel_connection.js</caption> * region_tag:eventarc_v1_generated_Eventarc_DeleteChannelConnection_async */ deleteChannelConnection( request?: protos.google.cloud.eventarc.v1.IDeleteChannelConnectionRequest, options?: CallOptions ): Promise< [ LROperation< protos.google.cloud.eventarc.v1.IChannelConnection, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | undefined, {} | undefined ] >; deleteChannelConnection( request: protos.google.cloud.eventarc.v1.IDeleteChannelConnectionRequest, options: CallOptions, callback: Callback< LROperation< protos.google.cloud.eventarc.v1.IChannelConnection, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined > ): void; deleteChannelConnection( request: protos.google.cloud.eventarc.v1.IDeleteChannelConnectionRequest, callback: Callback< LROperation< protos.google.cloud.eventarc.v1.IChannelConnection, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined > ): void; deleteChannelConnection( request?: protos.google.cloud.eventarc.v1.IDeleteChannelConnectionRequest, optionsOrCallback?: | CallOptions | Callback< LROperation< protos.google.cloud.eventarc.v1.IChannelConnection, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined >, callback?: Callback< LROperation< protos.google.cloud.eventarc.v1.IChannelConnection, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined > ): Promise< [ LROperation< protos.google.cloud.eventarc.v1.IChannelConnection, protos.google.cloud.eventarc.v1.IOperationMetadata >, protos.google.longrunning.IOperation | undefined, {} | undefined ] > | void { request = request || {}; let options: CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; } else { options = optionsOrCallback as CallOptions; } options = options || {}; options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ name: request.name || '', }); this.initialize(); return this.innerApiCalls.deleteChannelConnection( request, options, callback ); } /** * Check the status of the long running operation returned by `deleteChannelConnection()`. * @param {String} name * The operation name that will be passed. * @returns {Promise} - The promise which resolves to an object. * The decoded operation object has result and metadata field to get information from. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) * for more details and examples. * @example <caption>include:samples/generated/v1/eventarc.delete_channel_connection.js</caption> * region_tag:eventarc_v1_generated_Eventarc_DeleteChannelConnection_async */ async checkDeleteChannelConnectionProgress( name: string ): Promise< LROperation< protos.google.cloud.eventarc.v1.ChannelConnection, protos.google.cloud.eventarc.v1.OperationMetadata > > { const request = new operationsProtos.google.longrunning.GetOperationRequest( {name} ); const [operation] = await this.operationsClient.getOperation(request); const decodeOperation = new gax.Operation( operation, this.descriptors.longrunning.deleteChannelConnection, gax.createDefaultBackoffSettings() ); return decodeOperation as LROperation< protos.google.cloud.eventarc.v1.ChannelConnection, protos.google.cloud.eventarc.v1.OperationMetadata >; } /** * List triggers. * * @param {Object} request * The request object that will be sent. * @param {string} request.parent * Required. The parent collection to list triggers on. * @param {number} request.pageSize * The maximum number of triggers to return on each page. * Note: The service may send fewer. * @param {string} request.pageToken * The page token; provide the value from the `next_page_token` field in a * previous `ListTriggers` call to retrieve the subsequent page. * * When paginating, all other parameters provided to `ListTriggers` must match * the call that provided the page token. * @param {string} request.orderBy * The sorting order of the resources returned. Value should be a * comma-separated list of fields. The default sorting order is ascending. To * specify descending order for a field, append a `desc` suffix; for example: * `name desc, trigger_id`. * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. * The first element of the array is Array of [Trigger]{@link google.cloud.eventarc.v1.Trigger}. * The client library will perform auto-pagination by default: it will call the API as many * times as needed and will merge results from all the pages into this array. * Note that it can affect your quota. * We recommend using `listTriggersAsync()` * method described below for async iteration which you can stop as needed. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) * for more details and examples. */ listTriggers( request?: protos.google.cloud.eventarc.v1.IListTriggersRequest, options?: CallOptions ): Promise< [ protos.google.cloud.eventarc.v1.ITrigger[], protos.google.cloud.eventarc.v1.IListTriggersRequest | null, protos.google.cloud.eventarc.v1.IListTriggersResponse ] >; listTriggers( request: protos.google.cloud.eventarc.v1.IListTriggersRequest, options: CallOptions, callback: PaginationCallback< protos.google.cloud.eventarc.v1.IListTriggersRequest, protos.google.cloud.eventarc.v1.IListTriggersResponse | null | undefined, protos.google.cloud.eventarc.v1.ITrigger > ): void; listTriggers( request: protos.google.cloud.eventarc.v1.IListTriggersRequest, callback: PaginationCallback< protos.google.cloud.eventarc.v1.IListTriggersRequest, protos.google.cloud.eventarc.v1.IListTriggersResponse | null | undefined, protos.google.cloud.eventarc.v1.ITrigger > ): void; listTriggers( request?: protos.google.cloud.eventarc.v1.IListTriggersRequest, optionsOrCallback?: | CallOptions | PaginationCallback< protos.google.cloud.eventarc.v1.IListTriggersRequest, | protos.google.cloud.eventarc.v1.IListTriggersResponse | null | undefined, protos.google.cloud.eventarc.v1.ITrigger >, callback?: PaginationCallback< protos.google.cloud.eventarc.v1.IListTriggersRequest, protos.google.cloud.eventarc.v1.IListTriggersResponse | null | undefined, protos.google.cloud.eventarc.v1.ITrigger > ): Promise< [ protos.google.cloud.eventarc.v1.ITrigger[], protos.google.cloud.eventarc.v1.IListTriggersRequest | null, protos.google.cloud.eventarc.v1.IListTriggersResponse ] > | void { request = request || {}; let options: CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; } else { options = optionsOrCallback as CallOptions; } options = options || {}; options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ parent: request.parent || '', }); this.initialize(); return this.innerApiCalls.listTriggers(request, options, callback); } /** * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. * @param {Object} request * The request object that will be sent. * @param {string} request.parent * Required. The parent collection to list triggers on. * @param {number} request.pageSize * The maximum number of triggers to return on each page. * Note: The service may send fewer. * @param {string} request.pageToken * The page token; provide the value from the `next_page_token` field in a * previous `ListTriggers` call to retrieve the subsequent page. * * When paginating, all other parameters provided to `ListTriggers` must match * the call that provided the page token. * @param {string} request.orderBy * The sorting order of the resources returned. Value should be a * comma-separated list of fields. The default sorting order is ascending. To * specify descending order for a field, append a `desc` suffix; for example: * `name desc, trigger_id`. * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Stream} * An object stream which emits an object representing [Trigger]{@link google.cloud.eventarc.v1.Trigger} on 'data' event. * The client library will perform auto-pagination by default: it will call the API as many * times as needed. Note that it can affect your quota. * We recommend using `listTriggersAsync()` * method described below for async iteration which you can stop as needed. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) * for more details and examples. */ listTriggersStream( request?: protos.google.cloud.eventarc.v1.IListTriggersRequest, options?: CallOptions ): Transform { request = request || {}; options = options || {}; options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ parent: request.parent || '', }); const defaultCallSettings = this._defaults['listTriggers']; const callSettings = defaultCallSettings.merge(options); this.initialize(); return this.descriptors.page.listTriggers.createStream( this.innerApiCalls.listTriggers as gax.GaxCall, request, callSettings ); } /** * Equivalent to `listTriggers`, but returns an iterable object. * * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. * @param {Object} request * The request object that will be sent. * @param {string} request.parent * Required. The parent collection to list triggers on. * @param {number} request.pageSize * The maximum number of triggers to return on each page. * Note: The service may send fewer. * @param {string} request.pageToken * The page token; provide the value from the `next_page_token` field in a * previous `ListTriggers` call to retrieve the subsequent page. * * When paginating, all other parameters provided to `ListTriggers` must match * the call that provided the page token. * @param {string} request.orderBy * The sorting order of the resources returned. Value should be a * comma-separated list of fields. The default sorting order is ascending. To * specify descending order for a field, append a `desc` suffix; for example: * `name desc, trigger_id`. * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Object} * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). * When you iterate the returned iterable, each element will be an object representing * [Trigger]{@link google.cloud.eventarc.v1.Trigger}. The API will be called under the hood as needed, once per the page, * so you can stop the iteration when you don't need more results. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) * for more details and examples. * @example <caption>include:samples/generated/v1/eventarc.list_triggers.js</caption> * region_tag:eventarc_v1_generated_Eventarc_ListTriggers_async */ listTriggersAsync( request?: protos.google.cloud.eventarc.v1.IListTriggersRequest, options?: CallOptions ): AsyncIterable<protos.google.cloud.eventarc.v1.ITrigger> { request = request || {}; options = options || {}; options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ parent: request.parent || '', }); const defaultCallSettings = this._defaults['listTriggers']; const callSettings = defaultCallSettings.merge(options); this.initialize(); return this.descriptors.page.listTriggers.asyncIterate( this.innerApiCalls['listTriggers'] as GaxCall, request as unknown as RequestType, callSettings ) as AsyncIterable<protos.google.cloud.eventarc.v1.ITrigger>; } /** * List channels. * * @param {Object} request * The request object that will be sent. * @param {string} request.parent * Required. The parent collection to list channels on. * @param {number} request.pageSize * The maximum number of channels to return on each page. * Note: The service may send fewer. * @param {string} request.pageToken * The page token; provide the value from the `next_page_token` field in a * previous `ListChannels` call to retrieve the subsequent page. * * When paginating, all other parameters provided to `ListChannels` must * match the call that provided the page token. * @param {string} request.orderBy * The sorting order of the resources returned. Value should be a * comma-separated list of fields. The default sorting order is ascending. To * specify descending order for a field, append a `desc` suffix; for example: * `name desc, channel_id`. * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. * The first element of the array is Array of [Channel]{@link google.cloud.eventarc.v1.Channel}. * The client library will perform auto-pagination by default: it will call the API as many * times as needed and will merge results from all the pages into this array. * Note that it can affect your quota. * We recommend using `listChannelsAsync()` * method described below for async iteration which you can stop as needed. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) * for more details and examples. */ listChannels( request?: protos.google.cloud.eventarc.v1.IListChannelsRequest, options?: CallOptions ): Promise< [ protos.google.cloud.eventarc.v1.IChannel[], protos.google.cloud.eventarc.v1.IListChannelsRequest | null, protos.google.cloud.eventarc.v1.IListChannelsResponse ] >; listChannels( request: protos.google.cloud.eventarc.v1.IListChannelsRequest, options: CallOptions, callback: PaginationCallback< protos.google.cloud.eventarc.v1.IListChannelsRequest, protos.google.cloud.eventarc.v1.IListChannelsResponse | null | undefined, protos.google.cloud.eventarc.v1.IChannel > ): void; listChannels( request: protos.google.cloud.eventarc.v1.IListChannelsRequest, callback: PaginationCallback< protos.google.cloud.eventarc.v1.IListChannelsRequest, protos.google.cloud.eventarc.v1.IListChannelsResponse | null | undefined, protos.google.cloud.eventarc.v1.IChannel > ): void; listChannels( request?: protos.google.cloud.eventarc.v1.IListChannelsRequest, optionsOrCallback?: | CallOptions | PaginationCallback< protos.google.cloud.eventarc.v1.IListChannelsRequest, | protos.google.cloud.eventarc.v1.IListChannelsResponse | null | undefined, protos.google.cloud.eventarc.v1.IChannel >, callback?: PaginationCallback< protos.google.cloud.eventarc.v1.IListChannelsRequest, protos.google.cloud.eventarc.v1.IListChannelsResponse | null | undefined, protos.google.cloud.eventarc.v1.IChannel > ): Promise< [ protos.google.cloud.eventarc.v1.IChannel[], protos.google.cloud.eventarc.v1.IListChannelsRequest | null, protos.google.cloud.eventarc.v1.IListChannelsResponse ] > | void { request = request || {}; let options: CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; } else { options = optionsOrCallback as CallOptions; }<|fim▁hole|> options = options || {}; options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ parent: request.parent || '', }); this.initialize(); return this.innerApiCalls.listChannels(request, options, callback); } /** * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. * @param {Object} request * The request object that will be sent. * @param {string} request.parent * Required. The parent collection to list channels on. * @param {number} request.pageSize * The maximum number of channels to return on each page. * Note: The service may send fewer. * @param {string} request.pageToken * The page token; provide the value from the `next_page_token` field in a * previous `ListChannels` call to retrieve the subsequent page. * * When paginating, all other parameters provided to `ListChannels` must * match the call that provided the page token. * @param {string} request.orderBy * The sorting order of the resources returned. Value should be a * comma-separated list of fields. The default sorting order is ascending. To * specify descending order for a field, append a `desc` suffix; for example: * `name desc, channel_id`. * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Stream} * An object stream which emits an object representing [Channel]{@link google.cloud.eventarc.v1.Channel} on 'data' event. * The client library will perform auto-pagination by default: it will call the API as many * times as needed. Note that it can affect your quota. * We recommend using `listChannelsAsync()` * method described below for async iteration which you can stop as needed. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) * for more details and examples. */ listChannelsStream( request?: protos.google.cloud.eventarc.v1.IListChannelsRequest, options?: CallOptions ): Transform { request = request || {}; options = options || {}; options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ parent: request.parent || '', }); const defaultCallSettings = this._defaults['listChannels']; const callSettings = defaultCallSettings.merge(options); this.initialize(); return this.descriptors.page.listChannels.createStream( this.innerApiCalls.listChannels as gax.GaxCall, request, callSettings ); } /** * Equivalent to `listChannels`, but returns an iterable object. * * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. * @param {Object} request * The request object that will be sent. * @param {string} request.parent * Required. The parent collection to list channels on. * @param {number} request.pageSize * The maximum number of channels to return on each page. * Note: The service may send fewer. * @param {string} request.pageToken * The page token; provide the value from the `next_page_token` field in a * previous `ListChannels` call to retrieve the subsequent page. * * When paginating, all other parameters provided to `ListChannels` must * match the call that provided the page token. * @param {string} request.orderBy * The sorting order of the resources returned. Value should be a * comma-separated list of fields. The default sorting order is ascending. To * specify descending order for a field, append a `desc` suffix; for example: * `name desc, channel_id`. * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Object} * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). * When you iterate the returned iterable, each element will be an object representing * [Channel]{@link google.cloud.eventarc.v1.Channel}. The API will be called under the hood as needed, once per the page, * so you can stop the iteration when you don't need more results. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) * for more details and examples. * @example <caption>include:samples/generated/v1/eventarc.list_channels.js</caption> * region_tag:eventarc_v1_generated_Eventarc_ListChannels_async */ listChannelsAsync( request?: protos.google.cloud.eventarc.v1.IListChannelsRequest, options?: CallOptions ): AsyncIterable<protos.google.cloud.eventarc.v1.IChannel> { request = request || {}; options = options || {}; options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ parent: request.parent || '', }); const defaultCallSettings = this._defaults['listChannels']; const callSettings = defaultCallSettings.merge(options); this.initialize(); return this.descriptors.page.listChannels.asyncIterate( this.innerApiCalls['listChannels'] as GaxCall, request as unknown as RequestType, callSettings ) as AsyncIterable<protos.google.cloud.eventarc.v1.IChannel>; } /** * List channel connections. * * @param {Object} request * The request object that will be sent. * @param {string} request.parent * Required. The parent collection from which to list channel connections. * @param {number} request.pageSize * The maximum number of channel connections to return on each page. * Note: The service may send fewer responses. * @param {string} request.pageToken * The page token; provide the value from the `next_page_token` field in a * previous `ListChannelConnections` call to retrieve the subsequent page. * * When paginating, all other parameters provided to `ListChannelConnetions` * match the call that provided the page token. * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. * The first element of the array is Array of [ChannelConnection]{@link google.cloud.eventarc.v1.ChannelConnection}. * The client library will perform auto-pagination by default: it will call the API as many * times as needed and will merge results from all the pages into this array. * Note that it can affect your quota. * We recommend using `listChannelConnectionsAsync()` * method described below for async iteration which you can stop as needed. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) * for more details and examples. */ listChannelConnections( request?: protos.google.cloud.eventarc.v1.IListChannelConnectionsRequest, options?: CallOptions ): Promise< [ protos.google.cloud.eventarc.v1.IChannelConnection[], protos.google.cloud.eventarc.v1.IListChannelConnectionsRequest | null, protos.google.cloud.eventarc.v1.IListChannelConnectionsResponse ] >; listChannelConnections( request: protos.google.cloud.eventarc.v1.IListChannelConnectionsRequest, options: CallOptions, callback: PaginationCallback< protos.google.cloud.eventarc.v1.IListChannelConnectionsRequest, | protos.google.cloud.eventarc.v1.IListChannelConnectionsResponse | null | undefined, protos.google.cloud.eventarc.v1.IChannelConnection > ): void; listChannelConnections( request: protos.google.cloud.eventarc.v1.IListChannelConnectionsRequest, callback: PaginationCallback< protos.google.cloud.eventarc.v1.IListChannelConnectionsRequest, | protos.google.cloud.eventarc.v1.IListChannelConnectionsResponse | null | undefined, protos.google.cloud.eventarc.v1.IChannelConnection > ): void; listChannelConnections( request?: protos.google.cloud.eventarc.v1.IListChannelConnectionsRequest, optionsOrCallback?: | CallOptions | PaginationCallback< protos.google.cloud.eventarc.v1.IListChannelConnectionsRequest, | protos.google.cloud.eventarc.v1.IListChannelConnectionsResponse | null | undefined, protos.google.cloud.eventarc.v1.IChannelConnection >, callback?: PaginationCallback< protos.google.cloud.eventarc.v1.IListChannelConnectionsRequest, | protos.google.cloud.eventarc.v1.IListChannelConnectionsResponse | null | undefined, protos.google.cloud.eventarc.v1.IChannelConnection > ): Promise< [ protos.google.cloud.eventarc.v1.IChannelConnection[], protos.google.cloud.eventarc.v1.IListChannelConnectionsRequest | null, protos.google.cloud.eventarc.v1.IListChannelConnectionsResponse ] > | void { request = request || {}; let options: CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; } else { options = optionsOrCallback as CallOptions; } options = options || {}; options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ parent: request.parent || '', }); this.initialize(); return this.innerApiCalls.listChannelConnections( request, options, callback ); } /** * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. * @param {Object} request * The request object that will be sent. * @param {string} request.parent * Required. The parent collection from which to list channel connections. * @param {number} request.pageSize * The maximum number of channel connections to return on each page. * Note: The service may send fewer responses. * @param {string} request.pageToken * The page token; provide the value from the `next_page_token` field in a * previous `ListChannelConnections` call to retrieve the subsequent page. * * When paginating, all other parameters provided to `ListChannelConnetions` * match the call that provided the page token. * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Stream} * An object stream which emits an object representing [ChannelConnection]{@link google.cloud.eventarc.v1.ChannelConnection} on 'data' event. * The client library will perform auto-pagination by default: it will call the API as many * times as needed. Note that it can affect your quota. * We recommend using `listChannelConnectionsAsync()` * method described below for async iteration which you can stop as needed. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) * for more details and examples. */ listChannelConnectionsStream( request?: protos.google.cloud.eventarc.v1.IListChannelConnectionsRequest, options?: CallOptions ): Transform { request = request || {}; options = options || {}; options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ parent: request.parent || '', }); const defaultCallSettings = this._defaults['listChannelConnections']; const callSettings = defaultCallSettings.merge(options); this.initialize(); return this.descriptors.page.listChannelConnections.createStream( this.innerApiCalls.listChannelConnections as gax.GaxCall, request, callSettings ); } /** * Equivalent to `listChannelConnections`, but returns an iterable object. * * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. * @param {Object} request * The request object that will be sent. * @param {string} request.parent * Required. The parent collection from which to list channel connections. * @param {number} request.pageSize * The maximum number of channel connections to return on each page. * Note: The service may send fewer responses. * @param {string} request.pageToken * The page token; provide the value from the `next_page_token` field in a * previous `ListChannelConnections` call to retrieve the subsequent page. * * When paginating, all other parameters provided to `ListChannelConnetions` * match the call that provided the page token. * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Object} * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). * When you iterate the returned iterable, each element will be an object representing * [ChannelConnection]{@link google.cloud.eventarc.v1.ChannelConnection}. The API will be called under the hood as needed, once per the page, * so you can stop the iteration when you don't need more results. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) * for more details and examples. * @example <caption>include:samples/generated/v1/eventarc.list_channel_connections.js</caption> * region_tag:eventarc_v1_generated_Eventarc_ListChannelConnections_async */ listChannelConnectionsAsync( request?: protos.google.cloud.eventarc.v1.IListChannelConnectionsRequest, options?: CallOptions ): AsyncIterable<protos.google.cloud.eventarc.v1.IChannelConnection> { request = request || {}; options = options || {}; options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ parent: request.parent || '', }); const defaultCallSettings = this._defaults['listChannelConnections']; const callSettings = defaultCallSettings.merge(options); this.initialize(); return this.descriptors.page.listChannelConnections.asyncIterate( this.innerApiCalls['listChannelConnections'] as GaxCall, request as unknown as RequestType, callSettings ) as AsyncIterable<protos.google.cloud.eventarc.v1.IChannelConnection>; } // -------------------- // -- Path templates -- // -------------------- /** * Return a fully-qualified channel resource name string. * * @param {string} project * @param {string} location * @param {string} channel * @returns {string} Resource name string. */ channelPath(project: string, location: string, channel: string) { return this.pathTemplates.channelPathTemplate.render({ project: project, location: location, channel: channel, }); } /** * Parse the project from Channel resource. * * @param {string} channelName * A fully-qualified path representing Channel resource. * @returns {string} A string representing the project. */ matchProjectFromChannelName(channelName: string) { return this.pathTemplates.channelPathTemplate.match(channelName).project; } /** * Parse the location from Channel resource. * * @param {string} channelName * A fully-qualified path representing Channel resource. * @returns {string} A string representing the location. */ matchLocationFromChannelName(channelName: string) { return this.pathTemplates.channelPathTemplate.match(channelName).location; } /** * Parse the channel from Channel resource. * * @param {string} channelName * A fully-qualified path representing Channel resource. * @returns {string} A string representing the channel. */ matchChannelFromChannelName(channelName: string) { return this.pathTemplates.channelPathTemplate.match(channelName).channel; } /** * Return a fully-qualified channelConnection resource name string. * * @param {string} project * @param {string} location * @param {string} channel_connection * @returns {string} Resource name string. */ channelConnectionPath( project: string, location: string, channelConnection: string ) { return this.pathTemplates.channelConnectionPathTemplate.render({ project: project, location: location, channel_connection: channelConnection, }); } /** * Parse the project from ChannelConnection resource. * * @param {string} channelConnectionName * A fully-qualified path representing ChannelConnection resource. * @returns {string} A string representing the project. */ matchProjectFromChannelConnectionName(channelConnectionName: string) { return this.pathTemplates.channelConnectionPathTemplate.match( channelConnectionName ).project; } /** * Parse the location from ChannelConnection resource. * * @param {string} channelConnectionName * A fully-qualified path representing ChannelConnection resource. * @returns {string} A string representing the location. */ matchLocationFromChannelConnectionName(channelConnectionName: string) { return this.pathTemplates.channelConnectionPathTemplate.match( channelConnectionName ).location; } /** * Parse the channel_connection from ChannelConnection resource. * * @param {string} channelConnectionName * A fully-qualified path representing ChannelConnection resource. * @returns {string} A string representing the channel_connection. */ matchChannelConnectionFromChannelConnectionName( channelConnectionName: string ) { return this.pathTemplates.channelConnectionPathTemplate.match( channelConnectionName ).channel_connection; } /** * Return a fully-qualified location resource name string. * * @param {string} project * @param {string} location * @returns {string} Resource name string. */ locationPath(project: string, location: string) { return this.pathTemplates.locationPathTemplate.render({ project: project, location: location, }); } /** * Parse the project from Location resource. * * @param {string} locationName * A fully-qualified path representing Location resource. * @returns {string} A string representing the project. */ matchProjectFromLocationName(locationName: string) { return this.pathTemplates.locationPathTemplate.match(locationName).project; } /** * Parse the location from Location resource. * * @param {string} locationName * A fully-qualified path representing Location resource. * @returns {string} A string representing the location. */ matchLocationFromLocationName(locationName: string) { return this.pathTemplates.locationPathTemplate.match(locationName).location; } /** * Return a fully-qualified project resource name string. * * @param {string} project * @returns {string} Resource name string. */ projectPath(project: string) { return this.pathTemplates.projectPathTemplate.render({ project: project, }); } /** * Parse the project from Project resource. * * @param {string} projectName * A fully-qualified path representing Project resource. * @returns {string} A string representing the project. */ matchProjectFromProjectName(projectName: string) { return this.pathTemplates.projectPathTemplate.match(projectName).project; } /** * Return a fully-qualified trigger resource name string. * * @param {string} project * @param {string} location * @param {string} trigger * @returns {string} Resource name string. */ triggerPath(project: string, location: string, trigger: string) { return this.pathTemplates.triggerPathTemplate.render({ project: project, location: location, trigger: trigger, }); } /** * Parse the project from Trigger resource. * * @param {string} triggerName * A fully-qualified path representing Trigger resource. * @returns {string} A string representing the project. */ matchProjectFromTriggerName(triggerName: string) { return this.pathTemplates.triggerPathTemplate.match(triggerName).project; } /** * Parse the location from Trigger resource. * * @param {string} triggerName * A fully-qualified path representing Trigger resource. * @returns {string} A string representing the location. */ matchLocationFromTriggerName(triggerName: string) { return this.pathTemplates.triggerPathTemplate.match(triggerName).location; } /** * Parse the trigger from Trigger resource. * * @param {string} triggerName * A fully-qualified path representing Trigger resource. * @returns {string} A string representing the trigger. */ matchTriggerFromTriggerName(triggerName: string) { return this.pathTemplates.triggerPathTemplate.match(triggerName).trigger; } /** * Terminate the gRPC channel and close the client. * * The client will no longer be usable and all future behavior is undefined. * @returns {Promise} A promise that resolves when the client is closed. */ close(): Promise<void> { if (this.eventarcStub && !this._terminated) { return this.eventarcStub.then(stub => { this._terminated = true; stub.close(); this.operationsClient.close(); }); } return Promise.resolve(); } }<|fim▁end|>
<|file_name|>videomega.py<|end_file_name|><|fim▁begin|># coding: utf-8 from __future__ import unicode_literals import re from .common import InfoExtractor from ..compat import compat_urllib_request class VideoMegaIE(InfoExtractor): _VALID_URL = r'(?:videomega:|https?://(?:www\.)?videomega\.tv/(?:(?:view|iframe|cdn)\.php)?\?ref=)(?P<id>[A-Za-z0-9]+)' _TESTS = [{ 'url': 'http://videomega.tv/cdn.php?ref=AOSQBJYKIDDIKYJBQSOA', 'md5': 'cc1920a58add3f05c6a93285b84fb3aa', 'info_dict': { 'id': 'AOSQBJYKIDDIKYJBQSOA',<|fim▁hole|> 'thumbnail': 're:^https?://.*\.jpg$', } }, { 'url': 'http://videomega.tv/cdn.php?ref=AOSQBJYKIDDIKYJBQSOA&width=1070&height=600', 'only_matching': True, }, { 'url': 'http://videomega.tv/view.php?ref=090051111052065112106089103052052103089106112065052111051090', 'only_matching': True, }] def _real_extract(self, url): video_id = self._match_id(url) iframe_url = 'http://videomega.tv/cdn.php?ref=%s' % video_id req = compat_urllib_request.Request(iframe_url) req.add_header('Referer', url) req.add_header('Cookie', 'noadvtday=0') webpage = self._download_webpage(req, video_id) title = self._html_search_regex( r'<title>(.+?)</title>', webpage, 'title') title = re.sub( r'(?:^[Vv]ideo[Mm]ega\.tv\s-\s*|\s*-\svideomega\.tv$)', '', title) thumbnail = self._search_regex( r'<video[^>]+?poster="([^"]+)"', webpage, 'thumbnail', fatal=False) video_url = self._search_regex( r'<source[^>]+?src="([^"]+)"', webpage, 'video URL') return { 'id': video_id, 'title': title, 'url': video_url, 'thumbnail': thumbnail, 'http_headers': { 'Referer': iframe_url, }, }<|fim▁end|>
'ext': 'mp4', 'title': '1254207',
<|file_name|>get_satellite.py<|end_file_name|><|fim▁begin|>#!usr/bin/python ''' Get satellite data according to input file. ''' from random import shuffle,random import os,json from utils.mapbox_static import MapboxStatic from utils.coordinate_converter import CoordConvert from modules.getFeatures import latLon,getBBox from libs.foldernames import satDataFolder,testDataFolder def get_satellite(inputFile,mapboxtoken=None,count=1000,zoomLevel=17, outputFolder='data',xpixel=480,ypixel=360,epsg=None,elements=None, randomImages=False): ''' Get satellite data in order to input GIS information. Parameters: 'inputFile': Input file (GeoJSON format or parsed into GeoJSON) 'mapboxtoken': Access token for Mapbox (go to mapbox.com to create one) 'count': Number of satellite images to be downloaded 'zoomLevel': Zoom level (see libs/zoomLevel.csv for resolutions) 'outputFolder': Folder to store output data in 'xpixel': Number of pixels of satellite images (width) 'ypixel': Number of pixels of satellite images (height) 'epsg': EPSG code for coordinate system in GIS data (will try to find automatically if not provided) 'elements': GIS data can also be input directly 'randomImages': Get center of random polygons (False) or within Boundary Box of data (True) ''' if (not inputFile) and (not elements): print "Error: Provide input file." exit() if not mapboxtoken: print "Error: Provide mapbox token (more informations on www.mapbox.com)." exit() #parser.add_argument('--sport', # type=str, default='baseball', # help='Sport tag, for example: baseball, tennis, or soccer.') # We need the elements if not elements: print 'Loading %s...' % inputFile with open(inputFile, 'r') as f: elements = json.load(f) #get coordinate system myCoordConvert = CoordConvert() code=myCoordConvert.getCoordSystem(elements,epsg) #create folders subpath=outputFolder+"/"+os.path.split(inputFile)[-1][:-5] if not os.path.isdir(subpath): os.mkdir(subpath) print 'Directory',subpath,'created' if not os.path.isdir(subpath+satDataFolder):<|fim▁hole|> os.mkdir(subpath+testDataFolder) print 'Directory',subpath+testDataFolder,'created' #Write metadata with open(subpath+satDataFolder+"meta.csv","a+") as f: f.write("ZoomLevel,,"+str(zoomLevel)+"\n") #get bbox if set to random if randomImages: xlist=[] ylist=[] for element in elements['features']: minxe,maxxe,minye,maxye=getBBox(element) xlist.append(minxe) xlist.append(maxxe) ylist.append(minye) ylist.append(maxye) minx=min(xlist) maxx=max(xlist) miny=min(ylist) maxy=max(ylist) element_list = [] index_list = range(len(elements['features'])) #featue map # Randomize elements list to make sure we don't download all pics from the shuffle(index_list) for i in index_list: element_list.append(elements['features'][i]) #feature map # Now we're gonna download the satellite images for these locations namespace= os.path.split(inputFile)[-1][:-5] #get input file name as namespace mapbox_static = MapboxStatic( namespace=namespace, root_folder=subpath+satDataFolder[0:-1]) total_downloaded = 0 c = 0 print "------------------- Getting Satellite data -------------------" for element in element_list: if randomImages: randomValue=random() av_lon=minx+((maxx-minx)*randomValue) av_lat=miny+((maxy-miny)*randomValue) element_id_str=1000000+c #1000000 indicates random value with open(subpath+satDataFolder+"meta.csv","a+") as f: f.write(str(element_id_str)+","+str(av_lon)+","+str(av_lat)+"\n") else: element_id_str = index_list[c] #figure out center of polygon av_lon,av_lat=latLon(element) #Convert to standard format if code != 4319: # if not already in wgs84 standard format lotlan= myCoordConvert.convert(av_lon,av_lat) longitude=lotlan[0] latitude=lotlan[1] else: #if already in wgs84 format latitude= av_lat longitude= av_lon #get url print "Coordinates WSG64: "+str(longitude)+','+str(latitude) if (av_lon != longitude) and (av_lat != latitude): print "Coordinates Native: "+str(av_lon)+','+str(av_lat) url = mapbox_static.get_url( latitude=latitude, longitude=longitude, mapbox_zoom=zoomLevel, access_token=mapboxtoken, width=xpixel, height=ypixel) #download data success = mapbox_static.download_tile( element_id=element_id_str, url=url,verbose=True) if success: total_downloaded += 1 print total_downloaded,'/',count c += 1 if total_downloaded >= count: break<|fim▁end|>
os.mkdir(subpath+satDataFolder) print 'Directory',subpath+satDataFolder,'created' if not os.path.isdir(subpath+testDataFolder):
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>from operator import and_ from functools import reduce from django import forms from django.db.models import Q from django.utils.six import PY3 from django.utils.translation import ugettext_lazy as _ from api.dc.domain.views import dc_domain from api.dns.domain.views import dns_domain from api.dns.record.views import dns_record_list, dns_record from api.vm.utils import get_owners from gui.forms import SerializerForm from gui.fields import ArrayField from gui.widgets import NumberInput from pdns.models import Domain, Record TEXT_INPUT_ATTRS = {'class': 'input-transparent narrow', 'required': 'required'} SELECT_ATTRS = {'class': 'narrow input-select2'} if PY3: t_long = int else: t_long = long # noqa: F821 class DcDomainForm(SerializerForm): """ Create or remove DC<->DNS Domain link by calling dc_domain. """ _api_call = dc_domain name = forms.ChoiceField(label=_('Domain'), required=True, widget=forms.Select(attrs={'class': 'input-select2 narrow disable_created2'})) def __init__(self, request, domains, *args, **kwargs): super(DcDomainForm, self).__init__(request, None, *args, **kwargs) self.fields['name'].choices = domains.values_list('name', 'name') def _final_data(self, data=None): return {} class AdminDomainForm(SerializerForm): """ Create DNS domain by calling dns_domain. """ _api_call = dns_domain dc_bound = forms.BooleanField(label=_('DC-bound?'), required=False, widget=forms.CheckboxInput(attrs={'class': 'normal-check'})) name = forms.CharField(label=_('Name'), max_length=255, required=True, widget=forms.TextInput(attrs={'class': 'input-transparent narrow disable_created', 'required': 'required', 'pattern': '[A-Za-z0-9._-]+'})) owner = forms.ChoiceField(label=_('Owner'), required=False, widget=forms.Select(attrs=SELECT_ATTRS)) access = forms.TypedChoiceField(label=_('Access'), required=False, coerce=int, choices=Domain.ACCESS, widget=forms.Select(attrs=SELECT_ATTRS)) type = forms.ChoiceField(label=_('Type'), required=False, choices=Domain.TYPE_MASTER, widget=forms.Select(attrs=SELECT_ATTRS), help_text=_('PowerDNS domain type. ' 'MASTER - use DNS protocol messages to communicate changes ' 'with slaves. NATIVE - use database replication ' 'between master DNS server and slave DNS servers.')) desc = forms.CharField(label=_('Description'), max_length=128, required=False, widget=forms.TextInput(attrs={'class': 'input-transparent wide', 'required': ''})) tsig_keys = forms.CharField(label=_('TSIG Key(s)'), max_length=1000, required=False, widget=forms.TextInput(attrs={'class': 'input-transparent', 'required': ''}), help_text=_('TSIG DNS keys for external zone transfers. Zone transfers to ' 'external DNS slaves will only be allowed using this key. ' 'For more info on how to generate the key see Danube Cloud docs.' )) def __init__(self, request, domain, *args, **kwargs):<|fim▁hole|> self.fields['dc_bound'].widget.attrs['disabled'] = 'disabled' def _initial_data(self, request, obj): return obj.web_data def _final_data(self, data=None): data = super(AdminDomainForm, self)._final_data(data=data) if self.action == 'create': # Add dc parameter when doing POST (required by api.db.utils.get_virt_object) data['dc'] = self._request.dc.name return data class DnsRecordFilterForm(forms.Form): """ Filter DNS records for a domain. """ all = forms.BooleanField(widget=forms.HiddenInput(attrs={'class': 'always-include-navigation'}), required=False) domain = forms.ChoiceField(label=_('Domain'), required=False, widget=forms.Select(attrs={'class': 'fill-up input-navigation select-transparent ' 'always-include-navigation'})) type = forms.ChoiceField(label=_('Type'), required=False, choices=(('', _('Type (all)')),) + Record.TYPE_USED, widget=forms.Select(attrs={'class': 'fill-up input-navigation select-transparent'})) name = forms.CharField(label=_('Name'), required=False, widget=forms.TextInput(attrs={'class': 'fill-up input-navigation input-transparent', 'placeholder': _('Search by name')})) content = forms.CharField(label=_('Content'), required=False, widget=forms.TextInput(attrs={'class': 'fill-up input-navigation input-transparent', 'placeholder': _('Search by content')})) changed_since = forms.DateField(label=_('Changed since'), required=False, input_formats=('%Y-%m-%d',), widget=forms.DateInput(format='%Y-%m-%d', attrs={'placeholder': _('Changed since'), 'class': 'fill-up input-navigation input-transparent ' 'input-date'})) def __init__(self, request, data, _all=False, **kwargs): super(DnsRecordFilterForm, self).__init__(data, **kwargs) domains = Domain.objects.order_by('name') user, dc = request.user, request.dc if request.GET.get('deleted', False): domains = domains.exclude(access=Domain.INTERNAL) else: domains = domains.exclude(access__in=Domain.INVISIBLE) if user.is_staff and _all: domain_choices = [(d.name, d.name) for d in domains] else: dc_domain_ids = list(dc.domaindc_set.values_list('domain_id', flat=True)) domains = domains.filter(Q(id__in=dc_domain_ids) | Q(user=user.id)) domain_choices = [(d.name, d.name) for d in domains if (user.is_staff or d.user == user.id or d.dc_bound == dc.id)] self.fields['domain'].choices = domain_choices def get_filters(self): data = self.cleaned_data query = [] _type = data.get('type') if _type: query.append(Q(type=_type)) name = data.get('name') if name: query.append(Q(name__icontains=name)) content = data.get('content') if content: query.append(Q(content__icontains=content)) changed_since = data.get('changed_since') if changed_since: query.append(Q(change_date__gte=changed_since.strftime('%s'))) if query: return reduce(and_, query) else: return None class DnsRecordForm(SerializerForm): """ Create, update or delete network DNS record. """ _ip = None _api_call = dns_record template = 'gui/dc/domain_record_form.html' id = forms.IntegerField(label=_('ID'), required=True, widget=forms.HiddenInput()) name = forms.CharField(label=_('Name'), required=True, help_text=_('The full URI the DNS server should pick up on.'), widget=forms.TextInput(attrs=TEXT_INPUT_ATTRS)) content = forms.CharField(label=_('Content'), required=False, # help_text=_('The answer of the DNS query.'), widget=forms.TextInput(attrs={'class': 'input-transparent narrow'})) type = forms.ChoiceField(label=_('Type'), required=True, choices=Record.TYPE_USED, widget=forms.Select(attrs=SELECT_ATTRS)) ttl = forms.IntegerField(label=_('TTL'), required=False, help_text=_('How long the DNS client is allowed to remember this record.'), widget=NumberInput(attrs={'class': 'input-transparent narrow'})) prio = forms.IntegerField(label=_('Priority'), required=False, # help_text=_('Priority used by some record types.'), widget=NumberInput(attrs={'class': 'input-transparent narrow'})) disabled = forms.BooleanField(label=_('Disabled?'), required=False, help_text=_('If set to true, this record is hidden from DNS clients.'), widget=forms.CheckboxInput(attrs={'class': 'normal-check'})) def __init__(self, request, domain, record, *args, **kwargs): self.domain = domain super(DnsRecordForm, self).__init__(request, record, *args, **kwargs) def _initial_data(self, request, obj): return obj.web_data def api_call_args(self, domain_name): if self.action == 'create': return domain_name, else: return domain_name, self.cleaned_data['id'] class MultiDnsRecordForm(SerializerForm): """ Delete multiple DNS records at once. """ _api_call = dns_record_list template = 'gui/dc/domain_records_form.html' records = ArrayField(required=True, widget=forms.HiddenInput()) def __init__(self, request, domain, record, *args, **kwargs): self.domain = domain super(MultiDnsRecordForm, self).__init__(request, record, *args, **kwargs) @staticmethod def api_call_args(domain_name): return domain_name,<|fim▁end|>
super(AdminDomainForm, self).__init__(request, domain, *args, **kwargs) self.fields['owner'].choices = get_owners(request).values_list('username', 'username') if not request.user.is_staff:
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>from django.db.models import Q from links.models import Post from comments.models import ThreadedComment as comments from django.utils import timezone from datetime import datetime, timedelta from django.contrib import messages <|fim▁hole|>KARMA_MEDIUM = 1000 KARMA_HIGH = 5000 INTERVAL_LOW = 3600 INTERVAL_MEDIUM = 360 INTERVAL_HIGH = 36 COMMENT_PER_INTERVAL = 20 COMMENT_MAX = 80 def allowed_to_comment(user): karma = user.userprofile.karma now = timezone.now() time_threshold = now - timedelta(seconds=3600) comments_number = comments.objects.filter(Q(user=user) and Q(submit_date__gt=time_threshold)).count() if karma < KARMA_HIGH: if comments_number > COMMENT_PER_INTERVAL: return False else: return True else: if comments_number > COMMENT_MAX: return False else: return True def allowed_to_post(request, user): karma = user.userprofile.karma print karma now = timezone.now() try: posted = Post.objects.filter(post__submitter__exact=user).latest('submit_date') diff = now - posted.submit_date diff = diff.seconds except: diff = INTERVAL_LOW + 1 print diff if karma < KARMA_LOW: result = diff > INTERVAL_LOW if not result: messages.success(request, 'Please try in an hour!') return result elif karma > KARMA_LOW and karma < KARMA_HIGH: result = diff > INTERVAL_MEDIUM if not result: messages.success(request, 'Please try in ten minutes!') return result else: result = diff > INTERVAL_HIGH if not result: messages.warning(request, 'Please try in 30 sec') return result def get_client_ip(request): x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR') if x_forwarded_for: ip = x_forwarded_for.split(',')[0] else: ip = request.META.get('REMOTE_ADDR') return ip<|fim▁end|>
KARMA_LOW = 100
<|file_name|>test_base.py<|end_file_name|><|fim▁begin|># ----------------------------------------------------------------------------- # Copyright (c) 2014--, The Qiita Development Team. # # Distributed under the terms of the BSD 3-clause License. # # The full license is in the file LICENSE, distributed with this software. # ----------------------------------------------------------------------------- from unittest import TestCase, main from qiita_core.exceptions import IncompetentQiitaDeveloperError from qiita_core.util import qiita_test_checker from qiita_core.qiita_settings import qiita_config import qiita_db as qdb @qiita_test_checker() class QiitaBaseTest(TestCase): """Tests that the base class functions act correctly""" def setUp(self): # We need an actual subclass in order to test the equality functions self.tester = qdb.artifact.Artifact(1)<|fim▁hole|> self.portal = qiita_config.portal def tearDown(self): qiita_config.portal = self.portal def test_init_base_error(self): """Raises an error when instantiating a base class directly""" with self.assertRaises(IncompetentQiitaDeveloperError): qdb.base.QiitaObject(1) def test_init_error_inexistent(self): """Raises an error when instantiating an object that does not exists""" with self.assertRaises(qdb.exceptions.QiitaDBUnknownIDError): qdb.artifact.Artifact(10) def test_check_subclass(self): """Nothing happens if check_subclass called from a subclass""" self.tester._check_subclass() def test_check_subclass_error(self): """check_subclass raises an error if called from a base class""" # Checked through the __init__ call with self.assertRaises(IncompetentQiitaDeveloperError): qdb.base.QiitaObject(1) with self.assertRaises(IncompetentQiitaDeveloperError): qdb.base.QiitaStatusObject(1) def test_check_id(self): """Correctly checks if an id exists on the database""" self.assertTrue(self.tester._check_id(1)) self.assertFalse(self.tester._check_id(100)) def test_check_portal(self): """Correctly checks if object is accessable in portal given""" qiita_config.portal = 'QIITA' tester = qdb.analysis.Analysis(1) self.assertTrue(tester._check_portal(1)) qiita_config.portal = 'EMP' self.assertFalse(tester._check_portal(1)) self.assertTrue(self.tester._check_portal(1)) def test_equal_self(self): """Equality works with the same object""" self.assertEqual(self.tester, self.tester) def test_equal(self): """Equality works with two objects pointing to the same instance""" new = qdb.artifact.Artifact(1) self.assertEqual(self.tester, new) def test_not_equal(self): """Not equals works with object of the same type""" sp1 = qdb.study.StudyPerson(1) sp2 = qdb.study.StudyPerson(2) self.assertNotEqual(sp1, sp2) def test_not_equal_type(self): """Not equals works with object of different type""" new = qdb.study.Study(1) self.assertNotEqual(self.tester, new) @qiita_test_checker() class QiitaStatusObjectTest(TestCase): """Tests that the QittaStatusObject class functions act correctly""" def setUp(self): # We need an actual subclass in order to test the equality functions self.tester = qdb.analysis.Analysis(1) def test_status(self): """Correctly returns the status of the object""" self.assertEqual(self.tester.status, "in_construction") def test_check_status_single(self): """check_status works passing a single status""" self.assertTrue(self.tester.check_status(["in_construction"])) self.assertFalse(self.tester.check_status(["queued"])) def test_check_status_exclude_single(self): """check_status works passing a single status and the exclude flag""" self.assertTrue(self.tester.check_status(["public"], exclude=True)) self.assertFalse(self.tester.check_status(["in_construction"], exclude=True)) def test_check_status_list(self): """check_status work passing a list of status""" self.assertTrue(self.tester.check_status( ["in_construction", "queued"])) self.assertFalse(self.tester.check_status( ["public", "queued"])) def test_check_status_exclude_list(self): """check_status work passing a list of status and the exclude flag""" self.assertTrue(self.tester.check_status( ["public", "queued"], exclude=True)) self.assertFalse(self.tester.check_status( ["in_construction", "queued"], exclude=True)) def test_check_status_unknown_status(self): """check_status raises an error if an invalid status is provided""" with self.assertRaises(ValueError): self.tester.check_status(["foo"]) with self.assertRaises(ValueError): self.tester.check_status(["foo"], exclude=True) def test_check_status_unknown_status_list(self): """check_status raises an error if an invalid status list is provided """ with self.assertRaises(ValueError): self.tester.check_status(["foo", "bar"]) with self.assertRaises(ValueError): self.tester.check_status(["foo", "bar"], exclude=True) if __name__ == '__main__': main()<|fim▁end|>
<|file_name|>hb-subset-glyf.hh<|end_file_name|><|fim▁begin|>/* * Copyright © 2018 Google, Inc. * * This is part of HarfBuzz, a text shaping library. * * Permission is hereby granted, without written agreement and without * license or royalty fees, to use, copy, modify, and distribute this * software and its documentation for any purpose, provided that the * above copyright notice and the following two paragraphs appear in * all copies of this software. * * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH * DAMAGE. * * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. * * Google Author(s): Garret Rieger<|fim▁hole|> */ #ifndef HB_SUBSET_GLYF_HH #define HB_SUBSET_GLYF_HH #include "hb.hh" #include "hb-subset.hh" HB_INTERNAL bool hb_subset_glyf_and_loca (hb_subset_plan_t *plan, bool *use_short_loca, /* OUT */ hb_blob_t **glyf_prime /* OUT */, hb_blob_t **loca_prime /* OUT */); #endif /* HB_SUBSET_GLYF_HH */<|fim▁end|>
<|file_name|>oauth_scopes.py<|end_file_name|><|fim▁begin|>""" Define a set of scopes to be used by COS Internal OAuth implementation, specifically tailored to work with APIv2. List of scopes, nomenclature, and rationale can be found in the relevant "Login as OSF- phase 2" proposal document """ from collections import namedtuple from website import settings # Public scopes are described with 3 pieces of information: list of constituent scopes, a description, and whether or # not this scope is available to be requested by the general public class scope(namedtuple('scope', ['parts_', 'description', 'is_public'])): """ Patch to add `ALWAYS_PUBLIC` scope to every selectable scope, ensuring that public endpoints are accessible with any token. """ @property def parts(self): return frozenset((CoreScopes.ALWAYS_PUBLIC, )).union(self.parts_) class CoreScopes(object): """ The smallest units of permission that can be granted- all other scopes are built out of these. Each named constant is a single string.""" # IMPORTANT: All views should be based on the smallest number of Core scopes required to describe # the data in that view USERS_READ = 'users_read' USERS_WRITE = 'users_write' USERS_CREATE = 'users_create' USER_SETTINGS_READ = 'user.settings_read' USER_SETTINGS_WRITE = 'user.settings_write' USER_EMAIL_READ = 'users.email_read' USER_ADDON_READ = 'users.addon_read' SUBSCRIPTIONS_READ = 'subscriptions_read' SUBSCRIPTIONS_WRITE = 'subscriptions_write' MEETINGS_READ = 'meetings.base_read' NODE_BASE_READ = 'nodes.base_read' NODE_BASE_WRITE = 'nodes.base_write' NODE_CHILDREN_READ = 'nodes.children_read' NODE_CHILDREN_WRITE = 'nodes.children_write' NODE_FORKS_READ = 'nodes.forks_read' NODE_FORKS_WRITE = 'nodes.forks_write' NODE_CONTRIBUTORS_READ = 'nodes.contributors_read' NODE_CONTRIBUTORS_WRITE = 'nodes.contributors_write' PREPRINT_CONTRIBUTORS_READ = 'preprints.contributors_read' PREPRINT_CONTRIBUTORS_WRITE = 'preprints.contributors_write' NODE_FILE_READ = 'nodes.files_read' NODE_FILE_WRITE = 'nodes.files_write' PREPRINT_FILE_READ = 'preprints.files_read' PREPRINT_FILE_WRITE = 'preprints.files_write' NODE_ADDON_READ = 'nodes.addon_read' NODE_ADDON_WRITE = 'nodes.addon_write' NODE_LINKS_READ = 'nodes.links_read' NODE_LINKS_WRITE = 'nodes.links_write' NODE_VIEW_ONLY_LINKS_READ = 'node.view_only_links_read' NODE_VIEW_ONLY_LINKS_WRITE = 'node.view_only_links_write' NODE_PREPRINTS_READ = 'node.preprints_read' NODE_PREPRINTS_WRITE = 'node.preprints_write' PREPRINTS_READ = 'preprint.preprints_read' PREPRINTS_WRITE = 'preprint.preprints_write' REGISTRATION_VIEW_ONLY_LINKS_READ = 'registration.view_only_links_read' REGISTRATION_VIEW_ONLY_LINKS_WRITE = 'registration.view_only_links_write' SCHEMA_READ = 'schemas.read' NODE_DRAFT_REGISTRATIONS_READ = 'nodes.draft_registrations_read' NODE_DRAFT_REGISTRATIONS_WRITE = 'nodes.draft_registrations_write' NODE_REGISTRATIONS_READ = 'nodes.registrations_read' NODE_REGISTRATIONS_WRITE = 'nodes.registrations_write' NODE_CITATIONS_READ = 'nodes.citations_read' NODE_CITATIONS_WRITE = 'nodes.citations_write' PREPRINT_CITATIONS_READ = 'preprints.citations_read' PREPRINT_CITATIONS_WRITE = 'preprints.citations_write' NODE_COMMENTS_READ = 'comments.data_read' NODE_COMMENTS_WRITE = 'comments.data_write' LICENSE_READ = 'license.data_read' COMMENT_REPORTS_READ = 'comments.reports_read' COMMENT_REPORTS_WRITE = 'comments.reports_write' APPLICATIONS_READ = 'applications_read' APPLICATIONS_WRITE = 'applications_write' NODE_LOG_READ = 'nodes.logs_read' TOKENS_READ = 'tokens_read' TOKENS_WRITE = 'tokens_write' ALERTS_READ = 'alerts_read' ALERTS_WRITE = 'alerts_write' INSTITUTION_READ = 'institutions_read' SCOPES_READ = 'scopes_read' SEARCH = 'search_read' ACTIONS_READ = 'actions_read' ACTIONS_WRITE = 'actions_write' MODERATORS_READ = 'moderators_read' MODERATORS_WRITE = 'moderators_write' NODE_REQUESTS_READ = 'node_requests_read' NODE_REQUESTS_WRITE = 'node_requests_write' NODE_SETTINGS_READ = 'node_settings_read' NODE_SETTINGS_WRITE = 'node_settings_write' PREPRINT_REQUESTS_READ = 'preprint_requests_read' PREPRINT_REQUESTS_WRITE = 'preprint_requests_write' PROVIDERS_WRITE = 'providers_write' CHRONOS_SUBMISSION_READ = 'chronos_submission_read' CHRONOS_SUBMISSION_WRITE = 'chronos_submission_write' WAFFLE_READ = 'waffle_read' NULL = 'null' # NOTE: Use with extreme caution. # This should NEVER be assigned to endpoints: # - with mutable data, # - that might contain *anything* that could be personally-identifiable, # - as a write scope ALWAYS_PUBLIC = 'always_public' ORGANIZER_COLLECTIONS_BASE_READ = 'collections.base_read' ORGANIZER_COLLECTIONS_BASE_WRITE = 'collections.base_write' COLLECTED_META_READ = 'collected_meta_read' COLLECTED_META_WRITE = 'collected_meta_write' GUIDS_READ = 'guids.base_read' WIKI_BASE_READ = 'wikis.base_read' WIKI_BASE_WRITE = 'wikis.base_write' IDENTIFIERS_READ = 'identifiers.data_read' IDENTIFIERS_WRITE = 'identifiers.data_write' METRICS_BASIC = 'metrics_basic' METRICS_RESTRICTED = 'metrics_restricted' class ComposedScopes(object): """ Composed scopes, listed in increasing order of access (most restrictive first). Each named constant is a tuple. """ # IMPORTANT: Composed scopes exist only as an internal implementation detail. # All views should be based on selections from CoreScopes, above # Users collection USERS_READ = (CoreScopes.USERS_READ, CoreScopes.SUBSCRIPTIONS_READ, CoreScopes.ALERTS_READ, CoreScopes.USER_SETTINGS_READ) USERS_WRITE = USERS_READ + (CoreScopes.USERS_WRITE, CoreScopes.SUBSCRIPTIONS_WRITE, CoreScopes.ALERTS_WRITE, CoreScopes.USER_SETTINGS_WRITE) USERS_CREATE = USERS_READ + (CoreScopes.USERS_CREATE, ) # User extensions USER_EMAIL_READ = (CoreScopes.USER_EMAIL_READ, ) # Applications collection APPLICATIONS_READ = (CoreScopes.APPLICATIONS_READ, ) APPLICATIONS_WRITE = APPLICATIONS_READ + (CoreScopes.APPLICATIONS_WRITE,) # Tokens collection TOKENS_READ = (CoreScopes.TOKENS_READ,) TOKENS_WRITE = TOKENS_READ + (CoreScopes.TOKENS_WRITE,) # Guid redirect view GUIDS_READ = (CoreScopes.GUIDS_READ, ) # Metaschemas collection METASCHEMAS_READ = (CoreScopes.SCHEMA_READ, ) # Draft registrations DRAFT_READ = (CoreScopes.NODE_DRAFT_REGISTRATIONS_READ, ) DRAFT_WRITE = (CoreScopes.NODE_DRAFT_REGISTRATIONS_WRITE, ) # Identifier views IDENTIFIERS_READ = (CoreScopes.IDENTIFIERS_READ, ) IDENTIFIERS_WRITE = (CoreScopes.IDENTIFIERS_WRITE, ) # Comment reports collection COMMENT_REPORTS_READ = (CoreScopes.COMMENT_REPORTS_READ,)<|fim▁hole|> COMMENT_REPORTS_WRITE = COMMENT_REPORTS_READ + (CoreScopes.COMMENT_REPORTS_WRITE,) # Nodes collection. # Base node data includes node metadata, links, children, and preprints. NODE_METADATA_READ = (CoreScopes.NODE_BASE_READ, CoreScopes.NODE_CHILDREN_READ, CoreScopes.NODE_LINKS_READ, CoreScopes.NODE_CITATIONS_READ, CoreScopes.NODE_COMMENTS_READ, CoreScopes.NODE_LOG_READ, CoreScopes.NODE_FORKS_READ, CoreScopes.WIKI_BASE_READ, CoreScopes.LICENSE_READ, CoreScopes.IDENTIFIERS_READ, CoreScopes.NODE_PREPRINTS_READ, CoreScopes.PREPRINT_REQUESTS_READ) NODE_METADATA_WRITE = NODE_METADATA_READ + \ (CoreScopes.NODE_BASE_WRITE, CoreScopes.NODE_CHILDREN_WRITE, CoreScopes.NODE_LINKS_WRITE, CoreScopes.IDENTIFIERS_WRITE, CoreScopes.NODE_CITATIONS_WRITE, CoreScopes.NODE_COMMENTS_WRITE, CoreScopes.NODE_FORKS_WRITE, CoreScopes.NODE_PREPRINTS_WRITE, CoreScopes.PREPRINT_REQUESTS_WRITE, CoreScopes.WIKI_BASE_WRITE) # Preprints collection # TODO: Move Metrics scopes to their own restricted composed scope once the Admin app can manage scopes on tokens/apps PREPRINT_METADATA_READ = (CoreScopes.PREPRINTS_READ, CoreScopes.PREPRINT_CITATIONS_READ, CoreScopes.IDENTIFIERS_READ, CoreScopes.METRICS_BASIC,) PREPRINT_METADATA_WRITE = PREPRINT_METADATA_READ + (CoreScopes.PREPRINTS_WRITE, CoreScopes.PREPRINT_CITATIONS_WRITE, CoreScopes.METRICS_RESTRICTED,) # Organizer Collections collection # Using Organizer Collections and the node links they collect. Reads Node Metadata. ORGANIZER_READ = (CoreScopes.ORGANIZER_COLLECTIONS_BASE_READ, CoreScopes.COLLECTED_META_READ,) + NODE_METADATA_READ ORGANIZER_WRITE = ORGANIZER_READ + (CoreScopes.ORGANIZER_COLLECTIONS_BASE_WRITE, CoreScopes.NODE_LINKS_WRITE, CoreScopes.COLLECTED_META_WRITE) # Privileges relating to editing content uploaded under that node NODE_DATA_READ = (CoreScopes.NODE_FILE_READ, CoreScopes.WIKI_BASE_READ) NODE_DATA_WRITE = NODE_DATA_READ + \ (CoreScopes.NODE_FILE_WRITE, CoreScopes.WIKI_BASE_WRITE) # Privileges relating to editing content uploaded under that preprint PREPRINT_DATA_READ = (CoreScopes.PREPRINT_FILE_READ,) PREPRINT_DATA_WRITE = PREPRINT_DATA_READ + \ (CoreScopes.PREPRINT_FILE_WRITE,) # Privileges relating to who can access a node (via contributors or registrations) NODE_ACCESS_READ = (CoreScopes.NODE_CONTRIBUTORS_READ, CoreScopes.NODE_REGISTRATIONS_READ, CoreScopes.NODE_VIEW_ONLY_LINKS_READ, CoreScopes.REGISTRATION_VIEW_ONLY_LINKS_READ, CoreScopes.NODE_REQUESTS_READ, CoreScopes.NODE_SETTINGS_READ) NODE_ACCESS_WRITE = NODE_ACCESS_READ + \ (CoreScopes.NODE_CONTRIBUTORS_WRITE, CoreScopes.NODE_REGISTRATIONS_WRITE, CoreScopes.NODE_VIEW_ONLY_LINKS_WRITE, CoreScopes.REGISTRATION_VIEW_ONLY_LINKS_WRITE, CoreScopes.NODE_REQUESTS_WRITE, CoreScopes.NODE_SETTINGS_WRITE) # Privileges relating to who can access a preprint via contributors PREPRINT_ACCESS_READ = (CoreScopes.PREPRINT_CONTRIBUTORS_READ,) PREPRINT_ACCESS_WRITE = PREPRINT_ACCESS_READ + \ (CoreScopes.PREPRINT_CONTRIBUTORS_WRITE,) # Combine all sets of node permissions into one convenience level NODE_ALL_READ = NODE_METADATA_READ + NODE_DATA_READ + NODE_ACCESS_READ NODE_ALL_WRITE = NODE_ALL_READ + NODE_METADATA_WRITE + NODE_DATA_WRITE + NODE_ACCESS_WRITE # Combine preprint permissions PREPRINT_ALL_READ = PREPRINT_METADATA_READ + PREPRINT_ACCESS_READ + PREPRINT_DATA_READ PREPRINT_ALL_WRITE = PREPRINT_ALL_READ + PREPRINT_METADATA_WRITE + PREPRINT_ACCESS_WRITE + PREPRINT_DATA_WRITE # Reviews REVIEWS_READ = (CoreScopes.ACTIONS_READ, CoreScopes.MODERATORS_READ) REVIEWS_WRITE = (CoreScopes.ACTIONS_WRITE, CoreScopes.MODERATORS_WRITE, CoreScopes.PROVIDERS_WRITE) # Full permissions: all routes intended to be exposed to third party API users FULL_READ = NODE_ALL_READ + USERS_READ + ORGANIZER_READ + GUIDS_READ + METASCHEMAS_READ + DRAFT_READ + REVIEWS_READ + PREPRINT_ALL_READ + (CoreScopes.MEETINGS_READ, CoreScopes.INSTITUTION_READ, CoreScopes.SEARCH, CoreScopes.SCOPES_READ) FULL_WRITE = FULL_READ + NODE_ALL_WRITE + USERS_WRITE + ORGANIZER_WRITE + DRAFT_WRITE + REVIEWS_WRITE + PREPRINT_ALL_WRITE # Admin permissions- includes functionality not intended for third-party use ADMIN_LEVEL = FULL_WRITE + APPLICATIONS_WRITE + TOKENS_WRITE + COMMENT_REPORTS_WRITE + USERS_CREATE + REVIEWS_WRITE +\ (CoreScopes.USER_EMAIL_READ, CoreScopes.USER_ADDON_READ, CoreScopes.NODE_ADDON_READ, CoreScopes.NODE_ADDON_WRITE, CoreScopes.WAFFLE_READ, ) # List of all publicly documented scopes, mapped to composed scopes defined above. # Return as sets to enable fast comparisons of provided scopes vs those required by a given node # These are the ***only*** scopes that will be recognized from CAS public_scopes = { 'osf.full_read': scope(parts_=frozenset(ComposedScopes.FULL_READ), description='View all information associated with this account, including for ' 'private projects.', is_public=True), 'osf.full_write': scope(parts_=frozenset(ComposedScopes.FULL_WRITE), description='View and edit all information associated with this account, including for ' 'private projects.', is_public=True), 'osf.users.profile_read': scope(parts_=frozenset(ComposedScopes.USERS_READ), description='Read your profile data.', is_public=True), 'osf.users.email_read': scope(parts_=frozenset(ComposedScopes.USER_EMAIL_READ), description='Read your primary email address.', is_public=True), } if settings.DEV_MODE: public_scopes.update({ 'osf.users.profile_write': scope(parts_=frozenset(ComposedScopes.USERS_WRITE), description='Read and edit your profile data.', is_public=True), 'osf.nodes.metadata_read': scope(parts_=frozenset(ComposedScopes.NODE_METADATA_READ), description='Read a list of all public and private nodes accessible to this ' 'account, and view associated metadata such as project descriptions ' 'and titles.', is_public=True), 'osf.nodes.metadata_write': scope(parts_=frozenset(ComposedScopes.NODE_METADATA_WRITE), description='Read a list of all public and private nodes accessible to this ' 'account, and view and edit associated metadata such as project ' 'descriptions and titles.', is_public=True), 'osf.nodes.data_read': scope(parts_=frozenset(ComposedScopes.NODE_DATA_READ), description='List and view files associated with any public or private projects ' 'accessible to this account.', is_public=True), 'osf.nodes.data_write': scope(parts_=frozenset(ComposedScopes.NODE_DATA_WRITE), description='List, view, and update files associated with any public or private ' 'projects accessible to this account.', is_public=True), 'osf.nodes.access_read': scope(parts_=frozenset(ComposedScopes.NODE_ACCESS_READ), description='View the contributors list and any established registrations ' 'associated with public or private projects.', is_public=True), 'osf.nodes.access_write': scope(parts_=frozenset(ComposedScopes.NODE_ACCESS_WRITE), description='View and edit the contributors list associated with public or ' 'private projects accessible to this account. Also view and create ' 'registrations.', is_public=True), # TODO: Language: Does registrations endpoint allow creation of registrations? Is that planned? 'osf.nodes.full_read': scope(parts_=frozenset(ComposedScopes.NODE_ALL_READ), description='View all metadata, files, and access rights associated with all public ' 'and private projects accessible to this account.', is_public=True), 'osf.nodes.full_write': scope(parts_=frozenset(ComposedScopes.NODE_ALL_WRITE), description='View and edit all metadata, files, and access rights associated with ' 'all public and private projects accessible to this account.', is_public=True), # Undocumented scopes that can not be requested by third parties (per CAS restriction) 'osf.users.create': scope(parts_=frozenset(ComposedScopes.USERS_CREATE), description='This permission should only be granted to OSF collaborators. Allows a site to ' 'programmatically create new users with this account.', is_public=False), 'osf.admin': scope(parts_=frozenset(ComposedScopes.ADMIN_LEVEL), description='This permission should only be granted to OSF administrators. Allows a site to ' 'create, read, edit, and delete all information associated with this account.', is_public=False), }) def normalize_scopes(scopes): """ Given a list of public-facing scope names from a CAS token, return the list of internal scopes This is useful for converting a single broad scope name (from CAS) into the small constituent parts (as used by views) :param list scopes: a list public facing scopes """ all_scopes = set() for sc in scopes: try: scope_tuple = public_scopes[sc] all_scopes |= scope_tuple.parts except KeyError: pass return all_scopes if __name__ == '__main__': # Print some data to console, to help audit what views/core scopes map to a given public/composed scope # Although represented internally as a set, print as a sorted list for readability. from pprint import pprint as pp pp({k: sorted(v.parts) for k, v in public_scopes.items()})<|fim▁end|>
<|file_name|>services.py<|end_file_name|><|fim▁begin|>""" sprintkit.services ================== Implementations of most Developer Sandbox Services. :Copyright: (c) 2011 by Sprint. :License: MIT, see LICENSE for more details. """ from ConfigParser import SafeConfigParser from datetime import datetime from hashlib import md5 import json import os import time import urlparse import uuid from restkit import Resource from restkit.errors import RequestError, RequestTimeout, ResourceError from sprintkit import errors from sprintkit.gps import Coordinates, Gps2dFix class Config(dict): '''Reads configuration information for the Sandbox API gateway. :Parameters: * path (string) - The path to your config file (default=None). :class:`Config` is a sub-classed version of dict and therefore can be used just like a dict to store Sandbox configuration information. If you do not specify a `path` it will first try to read a config first from the current working directory from a file named `sprintkit.conf`, next it will try to read from the default path: `$HOME/.sprintkit.conf` The default config file is in ini format Here is a sample config file:: [sprintkit] key = <sprint_developer_key> secret = <sprint_developer_secret> host = test.sprintdevelopersandbox.com path = /developerSandbox/resources/v1 :class:`Config` will also try to read the Sandbox Key and Sandbox Secret from the environment variables `SPRINTKEY` and `SPRINTSECRET`. It will try these last so they can be used to override values stored in the configuration file. :class:`Config` does not provide file writing capabilities, so any changes made to a config instance programatically will need to be also made in the config file in order to make the changes permanent. ''' def __init__(self, path=None): if path == None: home_dir = os.path.expanduser('~') run_dir = os.getcwd() default_runpath = os.path.join(run_dir, "sprintkit.conf") default_homepath = os.path.join(home_dir, ".sprintkit.conf") if os.path.exists(default_runpath): self.path = default_runpath else: self.path = default_homepath else: self.path = path def load(self): """Read the configuration file from path stored in `self.path`. :Raises: (:class:`sprintkit.errors.SprintKitError`) - If config file could not be found. """ if os.path.exists(self.path): config = {} config_file = open(self.path, 'r') parser = SafeConfigParser() parser.readfp(config_file) config.update(parser.items('sprintkit')) if os.environ.has_key('SPRINTKEY') and os.environ.has_key('SPRINTSECRET'): config['key'] = os.environ['SPRINTKEY'] config['secret'] = os.environ['SPRINTSECRET'] self.update(config) else: raise errors.SprintkitError('Could not find configuration file: %s' % self.path) return self class SandboxResource(Resource): """A class that manages connections to Sandbox Resources. Sub-class this to add support for new Sandbox resources not yet available in SprintKit. SandboxResource is a sub-class of a restkit Resource, so it accepts all its parameters. """ def __init__(self, config=None, **kwargs): if config is None: self.config = Config() """A :class:`Config` instance for storing Sandbox credentials.""" self.config.load() else: self.config = config self.api_url = urlparse.urlunparse((self.config['protocol'], self.config['host'], self.config['path'], '', '', '')) super(SandboxResource, self).__init__(self.api_url, follow_redirect=True, max_follow_redirect=10, **kwargs) def parse_response(self, response): """Parse a restkit Response payload into a json data dict. :Parameters: response (:class:`restkit.wrappers.Response`) - Response :Returns: (dict) - The raw Sandbox JSON data. :Raises: :class:`sprintkit.errors.ParsingError` """ try: body = response.body_string() data = json.loads(body) except: raise errors.ParsingError("Malformed JSON data", body) return data def parse_errors(self, data): """Parse raw Sandbox JSON data looking for Sandbox thrown errors. :Parameters: data (dict) - The raw Sandbox JSON data. :Raises: :class:`sprintkit.errors.SandboxError` """ if 'error' in data.keys(): raise errors.SandboxError(data['error']) def sign_params(self, params, secret): """Build a dict of URL parameters and add a sig :Parameters: * params (dict) - Dictionary of URL query param key/val pairs * secret (str) - The API Secret used to create signature. :Returns: (dict) - The parameters with a signature added. .. note:: Read the documentation; http://goo.gl/Wu7T5 for details on generating the signature. Note, these parameters MUST NOT be url quoted, before generating the signature. """ #Update the timestamp if there is one if 'timestamp' in params.keys(): params['timestamp'] = self.make_timestamp() #Stringify all values for key, val in params.items(): params[key] = str(val) #Update the authentication signature if there is one if 'sig' in params.keys(): del(params['sig']) #Make the authentication signature slug pairs = ["%s%s" % (key, params[key]) for key in sorted(params.keys())] #Sign it using our secret rawsig = "".join(pairs) + secret params['sig'] = md5(rawsig).hexdigest() return params def make_timestamp(self): """Generate an API timestamp. :Returns: (string) - The timestamp. .. note:: The sandbox REST APIs require a timestamp parameter to help prevent replay attacks. This is a convienence utilty to create those timestamps so that they are properly formatted. The timestamp should be the current time in the format: [YYYY]-[MM]-[DD]T[HH]:[MM]:[SS][ZZZ] [HH] refers to a zero-padded hour between 00 and 23 (where 00 is used to notate midnight at the start of a calendar day). """ tnow = datetime.utcnow().replace(microsecond=0) #remove microseconds tzone = "UTC" timestamp = datetime.isoformat(tnow) + tzone return timestamp class SMS(SandboxResource): """A Resource used to send SMS messages.""" def send(self, mdns, msg): """Sends an SMS text message to a device or list of devices. :Parameters: * mdns (string) - The MDN(s) to send the message to. * msg (string) - The text message (160 characters). .. note:: The `mdns` parameter must be a valid 10-digit MDN, or a comma separated list of mdns. For example:: mdns = "0005551111" mdns = "0005551111,0005551212" :Returns: (dict) - The raw JSON Sandbox data. .. note:: Here is a sample response for a successful transaction:: {'MessagingResponse': [{'status': 'S', 'tranno': 'e6d6bd9', 'mdn': '9995551212', 'gcode': '1000'}]} :Raises: * :class:`sprintkit.errors.ConnectionError` * :class:`sprintkit.errors.ParsingError` * :class:`sprintkit.errors.SandboxError` """ params = {'mdns': mdns, 'msg': msg, 'timestamp': True, 'key': self.config['key'], 'sig': True} params = self.sign_params(params, self.config['secret']) try: response = self.get('sms.json', params_dict=params) except (RequestError, RequestTimeout) as e: raise errors.ConnectionError(str(e)) data = self.parse_response(response) self.parse_errors(data) #We only report the first error we find errs = [k for k in data.keys() if k != 'MessagingResponse'] if errs: raise errors.SandboxError(errs[0]) if not 'MessagingResponse' in data.keys(): raise errors.ParsingError("Missing a MessagingResponse", response) return data class Presence(SandboxResource): """A Resource to check if an MDN is reachable on the network. :Parameters: config (:class:`Config`) - The Sandbox configuration. """ def get_presence(self, mdn): """Get the presence status of an MDN. :Parameters: mdn (string) - The MDN to check for reachability. :Returns: (dict) - The raw Sandbox JSON data. :Raises: * :class:`sprintkit.errors.ConnectionError` * :class:`sprintkit.errors.ParsingError` * :class:`sprintkit.errors.SandboxError` """ params = {'mdn': mdn, 'timestamp': True, 'key': self.config['key'], 'sig': True} params = self.sign_params(params, self.config['secret']) try: response = self.get('presence.json', params_dict=params) except (RequestError, RequestTimeout) as e: raise errors.ConnectionError(str(e)) data = self.parse_response(response) self.parse_errors(data) return data def reachable(self, mdn): """Check if an MDN is reachable. :Parameters: mdn (string) - The MDN to check for reachability. :Returns: (bool) - True if the `mdn` is reachable. :Raises: * :class:`sprintkit.errors.ConnectionError` * :class:`sprintkit.errors.SandboxError` * :class:`sprintkit.errors.ParsingError` .. note:: This is a convenience method. The same data can be extracted using the `get_presence()` method. """ data = self.get_presence(mdn) try: status = data['status'] except KeyError as e: raise errors.ParsingError("KeyError: '%s'." % status, data) if status != 'Reachable' and status != 'Unreachable': raise errors.ParsingError("ValueError: 'status' is incorrect.", data) return (status == 'Reachable') class Location(SandboxResource): """A Resource for getting a location fix for an MDN. :Parameters: config (:class:`Config`) - The Sandbox configuration. """ def get_location(self, mdn): """Get the location data for an `mdn`. :Parameters: mdn (string) - The MDN to get location fix for (10 digits). :Returns: (dict) - The raw Sandbox location data in JSON format. :Raises: * :class:`sprintkit.errors.ConnectionError` * :class:`sprintkit.errors.SandboxError` * :class:`sprintkit.errors.ParsingError` """ params = {'mdn': mdn, 'timestamp': True, 'key': self.config['key'], 'sig': True} params = self.sign_params(params, self.config['secret']) try: response = self.get('location.json', params_dict=params) except (RequestError, RequestTimeout) as e: raise errors.ConnectionError(str(e)) data = self.parse_response(response) self.parse_errors(data) return data def locate(self, mdn): """Get the location data for an `mdn` (a convenience method). :Parameters: mdn (string) - The MDN to get location fix for (10 digits). :Returns: (:class:`sprintkit.gps.Gps2dFix`) - The Gps2dFix object :Raises: * :class:`sprintkit.errors.ConnectionError` * :class:`sprintkit.errors.SandboxError` * :class:`sprintkit.errors.ParsingError` .. note:: This is a convenience method that returns an instance of :class:`sprintkit.gps.Gps2dFix` which contains all of the pertinent location information. To get the lat/lon use the coordinates attribute:: lat = Gps2dFix.coordinates.lattitude lon = Gps2dFix.coordinates.longitude (lat, lon) = Gps2dFix.coordinates """ data = self.get_location(mdn) try: lat = float(data['lat']) lon = float(data['lon']) accuracy = int(data['accuracy']) except KeyError as e: raise errors.ParsingError("Missing %s" % e, data) except ValueError as e: raise errors.ParsingError(e, data) coord = Coordinates((lat,lon)) return Gps2dFix(datetime.now(), coord, errors={'hepe':accuracy}) class Perimeter(SandboxResource): """A class used for checking if an mdn is within a geographic area specified by its Coordinates and a radius in meters. :Parameters: * coordinates (:class:`sprintkit.gps.Coordinates`, or tuple) - The center lat/lon of the perimeter. * radius (integer) - Radius of the perimeter in meters. .. note:: The typical usage for Perimeter would be to create a perimeter based on a set of center coordinates and radius, then call its methods to check if devices are within the perimeter. """ def __init__(self, coordinates, radius, config=None, **kwargs): self.coordinates = Coordinates(coordinates) self.radius = radius super(Perimeter, self).__init__(config, **kwargs) def get_perimeter(self, mdn): """Check if an mdn is inside this Perimeter. :Parameters: mdn (string): The mdn of the device to check :Returns: (dict) - The raw Sandbox JSON data. :Raises: * :class:`sprintkit.errors.ConnectionError` * :class:`sprintkit.errors.SandboxError` * :class:`sprintkit.errors.ParsingError` """ lat = repr(self.coordinates.latitude) lon = repr(self.coordinates.longitude) rad = str(self.radius) params = {'mdn': mdn, 'lat': lat, 'long': lon, 'rad': rad, 'timestamp': True, 'key': self.config['key'], 'sig': True} params = self.sign_params(params, self.config['secret']) try: response = self.get('geofence/checkPerimeter.json', params_dict=params) except (RequestError, RequestTimeout) as e: raise errors.ConnectionError(str(e)) data = self.parse_response(response) self.parse_errors(data) return data def inside(self, mdn): """Returns True if the mdn is inside this Perimeter. :Parameters: mdn (string): The mdn of the device to check the perimeter for. :Returns: (bool) - True if mdn is inside the perimeter, False otherwise. :Raises: * :class:`sprintkit.errors.ConnectionError` * :class:`sprintkit.errors.SandboxError` * :class:`sprintkit.errors.ParsingError` .. note:: This method provides a simple perimeter check, if you also need to get the coordinates of the device at the same time as you check the perimeter, use the `get_perimeter` method instead. """ data = self.get_perimeter(mdn) try: status = data['CurrentLocation'] except KeyError as e: raise errors.ParsingError("Missing the CurrentLocation field", data) if status != 'INSIDE' and status != 'OUTSIDE': raise errors.ParsingError("ValueError for CurrentLocation", data) return (status == 'INSIDE') def check(self, mdn): """Check if an MDN is inside this Perimeter (a convenience method). :Parameters: mdn (string): The mdn of the device to check the perimeter for. :Returns: (tuple) - (bool, :class:`sprintkit.gps.Gps2dFix`) :Raises: * :class:`sprintkit.errors.ConnectionError` * :class:`sprintkit.errors.SandboxError` * :class:`sprintkit.errors.ParsingError` .. note:: This convenience method returns a tuple (inside, fix). The boolean `inside` is True if the device is inside the fence and `fix` contains :class:`sprintkit.gps.Gps2dFix` which has all of the pertinent location information. To get the lat/lon use the coordinates attribute of Gps2dFix:: lat = Gps2dFix.coordinates.lattitude lon = Gps2dFix.coordinates.longitude (lat, lon) = Gps2dFix.coordinates """ data = self.get_perimeter(mdn) timestamp = datetime.now() try: lat = float(data['Latitude']) lon = float(data['Longitude']) accuracy = float(data['Accuracy']) status = data['CurrentLocation'] except KeyError as e: raise errors.ParsingError("Missing %s" % e, data) except ValueError as e: raise errors.ParsingError(e, data) coord = Coordinates((lat,lon)) fix = Gps2dFix(timestamp, coord, errors={'hepe':accuracy}) try: status = data['CurrentLocation'] except KeyError as e: raise errors.ParsingError("Missing the CurrentLocation field", data) if status != 'INSIDE' and status != 'OUTSIDE': raise errors.ParsingError("ValueError for CurrentLocation", data) inside = (status == 'INSIDE') return (inside, fix) def distance_to(self, mdn): """Calculate the distance from the Perimeter to the `mdn`. :Parameters: * mdn (string) - The device MDN to calculate distance to. :Returns: (int) - The distance to the MDN in meters. :Raises: * :class:`sprintkit.errors.ConnectionError` * :class:`sprintkit.errors.ParsingError` * :class:`sprintkit.errors.SandboxError` .. note:: This method does not use the Sandbox functions to calculate the distance, instead the location is first determined using the `locate()` method, then the distance to these coordinates is calculated using the haversine formula. """ current_location = self.locate(mdn).coordinates return self.coordinates - current_location class Fence(SandboxResource): """A Sandbox Resource for modifying geofences. :Parameters: * config (:class:`Config`) - The Sandbox configuration. * fenceid (integer): A unique number for identifying the fence. * name (string): A text name for the fence. * coordinates (:class:`sprintkit.gps.Coordinates`): The coordinates for the center of the fence. * radius (integer): The radius of fence in meters. * days (string): The days of week to monitor fence [SMTWHFA]. * start_time (string): The time when fence becomes active "HHMM". * end_time (string): The time with fence becomes inactive "HHMM". .. note:: This object is not intended to be instantiated by the end user directly, instead it is returned when calling the GeoFence.fences() method. """ def __init__(self, fenceid, name, coordinates, radius, days, start_time, end_time, status, config=None, **kwargs): self.fenceid = fenceid self.name = name self.coordinates = coordinates self.radius = radius self.days = days self.start_time = start_time self.end_time = end_time self.status = status super(Fence, self).__init__(config, **kwargs) def activate(self): """Activate this Fence. :Returns: (dict) - The raw Sandbox JSON data. :Raises: * :class:`sprintkit.errors.ConnectionError` * :class:`sprintkit.errors.ParsingError` * :class:`sprintkit.errors.SandboxError` """ params = {'fenceId': self.fenceid, 'timestamp': True, 'key': self.config['key'], 'sig': True} params = self.sign_params(params, self.config['secret']) try: response = self.get('geofence/activate.json', params_dict=params) except (RequestError, RequestTimeout) as e: raise errors.ConnectionError(str(e)) data = self.parse_response(response) self.parse_errors(data) try: message = data['Message'] except KeyError: raise errors.ParsingError("Missing a `Message` field.", data) if message == 'FENCE_ACTIVATED': self.status = 'active' return data else: raise errors.GeoFenceError(message) def deactivate(self): """De-activate this Fence. :Returns: (dict) - The raw Sandbox JSON data. :Raises: * :class:`sprintkit.errors.ConnectionError` * :class:`sprintkit.errors.ParsingError` * :class:`sprintkit.errors.SandboxError` """ params = {'fenceId': self.fenceid, 'timestamp': True, 'key': self.config['key'], 'sig': True} params = self.sign_params(params, self.config['secret']) try: response = self.get('geofence/deactivate.json', params_dict=params) except (RequestError, RequestTimeout) as e: raise errors.ConnectionError(str(e)) data = self.parse_response(response) self.parse_errors(data) return data def get_devices(self): """Returns the devices associated with this fence. :Returns: (dict) - The raw Sandbox JSON data. :Raises: * :class:`sprintkit.errors.ConnectionError` * :class:`sprintkit.errors.ParsingError` * :class:`sprintkit.errors.SandboxError` """ params = {'fenceId': self.fenceid, 'timestamp': True, 'key': self.config['key'], 'sig': True} params = self.sign_params(params, self.config['secret']) try: response = self.get('geofence/listDevices.json', params_dict=params) except (RequestError, RequestTimeout) as e: raise errors.ConnectionError(str(e)) data = self.parse_response(response) self.parse_errors(data) return data def devices(self): """Returns the devices associated with this fence (a convenience method). :Returns: (dict) - The devices associated with a fence. :Raises: * :class:`sprintkit.errors.ConnectionError` * :class:`sprintkit.errors.ParsingError` * :class:`sprintkit.errors.SandboxError` .. note:: This method returns a dictionary that maps an mdn (string) to a deviceid (integer) for each device that is being monitored within this geofence. For example:: devices = {"1115551212": 102} """ result = self.get_devices() device_list = result['Device'] devices = {} for device in device_list: if device.has_key('Message'): return devices devices[device['MDN']] = int(device['DeviceID']) return devices def add_device(self, mdn): """Add a device to be monitored inside this Fence. :Parameters: mdn (string) - The mdn of the device to be monitored. :Returns: (dict) - The raw Sandbox JSON data. :Raises: * :class:`sprintkit.errors.ConnectionError` * :class:`sprintkit.errors.ParsingError` * :class:`sprintkit.errors.SandboxError` """ params = {'fenceId': self.fenceid, 'mdn': mdn, 'timestamp': True, 'key': self.config['key'], 'sig': True} params = self.sign_params(params, self.config['secret']) try: response = self.get('geofence/addDevice.json', params_dict=params) except (RequestError, RequestTimeout) as e: raise errors.ConnectionError(str(e))<|fim▁hole|> self.parse_errors(data) try: message = data['Message'] except KeyError: raise errors.ParsingError("Missing a `Message` field.") if message != 'DEVICE_ADDED': raise errors.GeoFenceError(message) else: return data def delete_device(self, mdn): """Delete a device associated with this Fence. :Parameters: mdn (string) - The mdn of the device to be removed from monitoring. :Returns: (dict) - The raw Sandbox JSON data. :Raises: * :class:`sprintkit.errors.ConnectionError` * :class:`sprintkit.errors.ParsingError` * :class:`sprintkit.errors.SandboxError` .. note:: The Sandbox does not provide a method to remove a device from a fence using its mdn, so we have to first make a call to get_devices() to get the deviceid associated with the mdn. """ devices = self.devices() try: deviceid = devices[mdn] except KeyError: raise errors.GeoFenceError("DEVICE_NOTFOUND") params = {'deviceId': deviceid, 'timestamp': True, 'key': self.config['key'], 'sig': True} params = self.sign_params(params, self.config['secret']) try: response = self.get('geofence/deleteDevice.json', params_dict=params) except (RequestError, RequestTimeout) as e: raise errors.ConnectionError(str(e)) data = self.parse_response(response) self.parse_errors(data) try: message = data['Message'] except KeyError: raise errors.ParsingError("Missing a `Message` field.") if message != 'DEVICE_DELETED': raise errors.GeoFenceError(message) else: return data def get_recipients(self): """Get the recipients of notification of geofence events. :Returns: (dict) - The raw Sandbox JSON data. :Raises: * :class:`sprintkit.errors.ConnectionError` * :class:`sprintkit.errors.ParsingError` * :class:`sprintkit.errors.SandboxError` """ params = {'fenceId': self.fenceid, 'timestamp': True, 'key': self.config['key'], 'sig': True} params = self.sign_params(params, self.config['secret']) try: response = self.get('geofence/listRecipients.json', params_dict=params) except (RequestError, RequestTimeout) as e: raise errors.ConnectionError(str(e)) data = self.parse_response(response) self.parse_errors(data) return data def recipients(self): """Get the recipients of notification of geofence events. This is a convenience method. This method returns a dictionary that maps a recipient mdnurl (string) to a recpientid (integer). The mdnurl is either an MDN or a URL that events for this geofence will be sent. Here is a sample:: recipients = {"1115551212": 105} :Returns: (dict) :Raises: * :class:`sprintkit.errors.ConnectionError` * :class:`sprintkit.errors.ParsingError` * :class:`sprintkit.errors.SandboxError` """ result = self.get_recipients() recipients = {} recipient_list = result['Recipient'] for recipient in recipient_list: try: recipients[recipient['MDNURL']] = int(recipient['RecipientID']) except: return recipients return recipients def add_recipient(self, recipient): """Add a recipient for a Fence notification event. :Parameters: recipient (string) - Either an MDN or a URL :Returns: (dict) - The raw Sandbox JSON data. :Raises: * :class:`sprintkit.errors.ConnectionError` * :class:`sprintkit.errors.ParsingError` * :class:`sprintkit.errors.SandboxError` """ params = {'fenceId': self.fenceid, 'mdnURL': recipient, 'timestamp': True, 'key': self.config['key'], 'sig': True} params = self.sign_params(params, self.config['secret']) try: response = self.get('geofence/addRecipient.json', params_dict=params) except (RequestError, RequestTimeout) as e: raise errors.ConnectionError(str(e)) data = self.parse_response(response) self.parse_errors(data) return data def delete_recipient(self, recipient): """Delete a recipient of a geofence notification. :Parameters: recipient (string) - Either an MDN or a URL. :Returns: (dict) - The raw Sandbox JSON data. :Raises: * :class:`sprintkit.errors.ConnectionError` * :class:`sprintkit.errors.ParsingError` * :class:`sprintkit.errors.SandboxError` """ recipients = self.recipients() if not recipients.has_key(recipient): raise errors.GeoFenceError("UNKNOWN_RECIPIENT") recipientid = recipients[recipient] params = {'recipientId': recipientid, 'timestamp': True, 'key': self.config['key'], 'sig': True} params = self.sign_params(params, self.config['secret']) try: response = self.get('geofence/deleteRecipient.json', params_dict=params) except (RequestError, RequestTimeout) as e: raise errors.ConnectionError(str(e)) data = self.parse_response(response) self.parse_errors(data) return data class GeoFence(SandboxResource): """A SandboxResource to retrieve and create geofences. :Parameters: config (:class:`Config`) - The Sandbox configuration. """ def get_fences(self): """Get all of the geofences associated with a Sandbox user account. :Returns: (dict) - The raw Sandbox JSON data. :Raises: * :class:`sprintkit.errors.ConnectionError` * :class:`sprintkit.errors.ParsingError` * :class:`sprintkit.errors.SandboxError` .. note:: This method retrieves the list of geofences for this user account. Here is a sample of the data returned:: {u'Fence': [{u'Status': u'Inactive', u'FenceID': u'139', u'Name': u'test', u'Days': u'W', u'Longitude': u'-94.1234', u'StartTime': u'1100', u'Latitude': u'38.1234', u'LastMonitorTime': u'NEVER', u'EndTime': u'2200', u'Dimensions': u'2000'}]} """ params = {'timestamp': True, 'key': self.config['key'], 'sig': True} params = self.sign_params(params, self.config['secret']) try: response = self.get('geofence/list.json', params_dict=params) except (RequestError, RequestTimeout) as e: raise errors.ConnectionError(str(e)) data = self.parse_response(response) self.parse_errors(data) return data def fences(self, match=None): """Get all of the geofences associated with a Sandbox user account. :Parameters: * match - (int or string) - The `fenceid` or `name` of a fence. :Returns: (list) - A List of Fence objects. :Raises: * :class:`sprintkit.errors.ConnectionError` * :class:`sprintkit.errors.ParsingError` * :class:`sprintkit.errors.SandboxError` .. note:: This method returns a list of Fence objects. Each Fence represents a Sandbox geofence. With the Fence objects you can control the individual geofences. You can filter the list of Fence objects by supplying a `match` argument. This is either the name of the geofence (string) or the fenceid (an integer). """ fences = [] data = self.get_fences() try: for fence in data['Fence']: if 'Message' in fence.keys(): return [] fenceid = int(fence['FenceID']) name = fence['Name'] lat = float(fence['Latitude']) lon = float(fence['Longitude']) coordinates = Coordinates((lat,lon)) radius = int(fence['Dimensions']) days = fence['Days'] start_time = fence['StartTime'] end_time = fence['EndTime'] status = fence['Status'].lower() if match: if isinstance(match, str) and match == name: fences.append(Fence(fenceid, name, coordinates, radius, days, start_time, end_time, status, self.config)) elif isinstance(match, int) and match == fenceid: fences.append(Fence(fenceid, name, coordinates, radius, days, start_time, end_time, status, self.config)) else: continue else: fences.append(Fence(fenceid, name, coordinates, radius, days, start_time, end_time, status, self.config)) except KeyError as e: raise errors.ParsingError("KeyError '%s'." % e, data) return fences def add_fence(self, name, start_time, end_time, coordinates, radius, interval, days, notify_event): """Add a fence to a Sandbox user account. :Parameters: * name (string) - A name to give this geofence. * start_name (string) - Time when the fence becomes active "HHMM" * end_time (string) - Time when the fence becomes inactive "HHMM" * coordinates (:class:`sprintkit.gps.Coordinates`) - The lat/lon center of the fence. * radius (integer) - Radius of fence in meters. * interval (integer) - How often to check the fence (in 5 minute increments). * days (string) - Days of week to check the fence. * notify_event (string) - What event triggers a notification. .. note:: The `days` parameter is a string that corresponds to which days of the week that a fence will be active. Each day of the week is represented by a letter, and these letters can be concatenated:: sunday = 'S' monday = 'M' tuesday = 'T' wednesday = 'W' thursday = 'H' friday = 'F' saturday = 'A' days = sunday + wednesday + friday days = "SWF" #Active on days Sunday, Wednesday and Friday The `start_time` and `end_time` parameters are strings that represent what time a fence will become active and what time it will become inactive. The string is in the format "HHMM"" where HH is the 24-hour time (00-23) where 00 is midnight. MM is the minutes (00-59). The `notify_event` parameter specifies whether the fence should notify on 'in', 'out' or 'both' events. :Returns: (:class:`sprintkit.services.Fence`) - The Fence that was added. :Raises: * :class:`sprintkit.errors.ConnectionError` * :class:`sprintkit.errors.ParsingError` * :class:`sprintkit.errors.SandboxError` """ coordinates = Coordinates(coordinates) params = {'name': name, 'strtTime': start_time, 'endTime': end_time, 'lat': repr(coordinates.latitude), 'long': repr(coordinates.longitude), 'dim': radius, 'interval': interval, 'days': days, 'notifyEvent': notify_event, 'timestamp': True, 'key': self.config['key'], 'sig': True} params = self.sign_params(params, self.config['secret']) try: response = self.get('geofence/add.json', params_dict=params) except (RequestError, RequestTimeout) as e: raise errors.ConnectionError(str(e)) data = self.parse_response(response) self.parse_errors(data) if data['message'] == 'FENCE_ADDED': fenceid = int(data['ID']) fence = [fence for fence in self.fences() if fence.fenceid == fenceid] if len(fence) == 1: return fence[0] else: raise errors.GeoFenceError("FENCE_NOTADDED") else: raise errors.GeoFenceError(data['message']) def delete_fence(self, fence): """Delete a geofence from this account. :Parameters: (:class:`sprintkit.services.Fence`) - A Fence object. :Returns: (dict) - The raw Sandbox JSON data. :Raises: * :class:`sprintkit.errors.ConnectionError` * :class:`sprintkit.errors.ParsingError` * :class:`sprintkit.errors.SandboxError` """ params = {'fenceId': fence.fenceid, 'timestamp': True, 'key': self.config['key'], 'sig': True} params = self.sign_params(params, self.config['secret']) try: response = self.get('geofence/delete.json', params_dict=params) except (RequestError, RequestTimeout) as e: raise errors.ConnectionError(str(e)) data = self.parse_response(response) self.parse_errors(data) return data class Account(SandboxResource): """A class for configuring devices associated with a developer account. :Parameters: * config (dict) - A :class:`Config` instance (default=None). """ def get_devices(self, status=None, mdn=None): """Retrieve devices associated with this developer account. :Optional Parameters: * status (string) - The authorization status criteria to filter on. * mdn (string) - The MDN to get status for. :Returns: (dict) - The raw JSON Sandbox Data. .. note:: Retrieves all of the devices associated with this developer account. The devices to be returned can be filtered by specifying either a device `status` or an `mdn`. The `status` parameter filters the devices that are returned. It has the following permitted values: 'p' for devices that are `pending`, 'a' for devices that are `approved`, 'x' for devices that are `declined`, and 'd' for devices that have been `deleted`. The `mdn` parameter can be used to return authorization status for a single device instead of all devices associated with this account. The authorization data is returned as a dict. Here is an example:: {"username": "your_username", "devices": { "approved": ["1115551212", "1115551213"], "declined": [], "pending": [], "deleted": ["1115551234"]}, "authStatus": "Declined"} :Raises: * :class:`sprintkit.errors.ConnectionError` * :class:`sprintkit.errors.ParsingError` * :class:`sprintkit.errors.SandboxError` """ params = {'key': self.config['key'], 'timestamp': True, 'sig': True} if status: params[status] = status if mdn: params[mdn] = mdn params = self.sign_params(params, self.config['secret']) try: response = self.get('devices.json', params_dict=params) except (RequestError, RequestTimeout) as e: raise errors.ConnectionError(str(e)) data = self.parse_response(response) self.parse_errors(data) return data def add_device(self, mdn): """Add a device to this developer account. :Parameters: mdn (string) - The MDN to add to this account. :Returns: (dict) - The raw JSON Sandbox data. .. note:: This method returns a dict containing the status of the add operation. On success it returns the following:: {u'response': u'SUCCESS'} If the Sandbox could not add the device to the account it returns a failure message, for example:: {u'response': u'FAILED'} :Raises: * :class:`sprintkit.errors.ConnectionError` * :class:`sprintkit.errors.ParsingError` * :class:`sprintkit.errors.SandboxError` """ params = {'method': 'add', 'mdn': mdn, 'key': self.config['key'], 'timestamp': True, 'sig': True} params = self.sign_params(params, self.config['secret']) try: response = self.get('device.json', params_dict=params) except (RequestError, RequestTimeout) as e: raise errors.ConnectionError(str(e)) data = self.parse_response(response) self.parse_errors(data) return data def delete_device(self, mdn): """Delete a device from this developer account. :Parameters: mdn (string) - The MDN to add to this account. :Returns: (dict) - The raw JSON sandbox data. .. note:: On success returns:: {u'response': u'SUCCESS'} :Raises: * :class:`sprintkit.errors.ConnectionError` * :class:`sprintkit.errors.ParsingError` * :class:`sprintkit.errors.SandboxError` """ params = {'method': 'delete', 'mdn': mdn, 'key': self.config['key'], 'timestamp': True, 'sig': True} params = self.sign_params(params, self.config['secret']) try: response = self.get('device.json', params_dict=params) except (RequestError, RequestTimeout) as e: raise errors.ConnectionError(str(e)) data = self.parse_response(response) self.parse_errors(data) return data<|fim▁end|>
data = self.parse_response(response)
<|file_name|>index.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
export * from './other-person-profile.component'
<|file_name|>LeaderboardPosition.js<|end_file_name|><|fim▁begin|>var BinaryWriter = require('./BinaryWriter'); function LeaderboardPosition(position) { this.place = position<|fim▁hole|>} module.exports = LeaderboardPosition; LeaderboardPosition.prototype.build = function() { var buf = new BinaryWriter(); buf.writeUInt8(0x30); buf.writeUInt16(this.place); return buf.toBuffer(); };<|fim▁end|>
<|file_name|>api.rs<|end_file_name|><|fim▁begin|>//! //! Public API for bitcrust-db //! //!<|fim▁hole|> use config; use store; use store::Store; use block_add; // Creates a store; mock interface pub fn init() -> Store { let config = test_cfg!(); let store = Store::new(&config); info!(store.logger, "Store intitalized"; "dir" => config.root.to_str().unwrap()); store } // Creates a store; mock interface pub fn init_prs() -> Store { let config = config::Config::new("prs"); let store = Store::new(&config); info!(store.logger, "Store intitalized"; "dir" => config.root.to_str().unwrap()); store } // This is a preliminary interface. pub fn add_block(store: &mut store::Store, buffer: &[u8]) { block_add::add_block(store, buffer) } pub fn add_transaction(_: &[u8]) { } pub fn get_block(_: [u8; 32]) { } #[cfg(test)] mod tests { use util::*; use super::*; #[test] pub fn test_add_block() { let hex = "0100000000000000000000000000000000000000000000000000000000000000\ 000000003ba3edfd7a7b12b27ac72c3e67768f617fc81bc3888a51323a9fb8aa\ 4b1e5e4a29ab5f49ffff001d1dac2b7c01010000000100000000000000000000\ 00000000000000000000000000000000000000000000ffffffff4d04ffff001d\ 0104455468652054696d65732030332f4a616e2f32303039204368616e63656c\ 6c6f72206f6e206272696e6b206f66207365636f6e64206261696c6f75742066\ 6f722062616e6b73ffffffff0100f2052a01000000434104678afdb0fe554827\ 1967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4\ f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5fac00000000"; let slice = &from_hex(hex); let mut store = init(); add_block(&mut store, slice); } }<|fim▁end|>
//! //!
<|file_name|>test_flipud.py<|end_file_name|><|fim▁begin|>import unittest <|fim▁hole|>from chainer import functions from chainer import gradient_check from chainer import testing from chainer.testing import attr @testing.parameterize(*testing.product({ 'shape': [(3,), (3, 4)], 'dtype': [numpy.float16, numpy.float32, numpy.float64], })) class TestFlipUD(unittest.TestCase): def setUp(self): self.x = numpy.random.uniform(0, 1, self.shape).astype(self.dtype) self.gy = numpy.random.uniform(0, 1, self.shape).astype(self.dtype) self.ggx = numpy.random.uniform(0, 1, self.shape).astype(self.dtype) def check_forward(self, x_data): x = chainer.Variable(x_data) y = functions.flipud(x) testing.assert_allclose(y.data, numpy.flipud(self.x)) def test_forward_cpu(self): self.check_forward(self.x) @attr.gpu def test_forward_gpu(self): self.check_forward(cuda.to_gpu(self.x)) def check_backward(self, x_data, y_grad): gradient_check.check_backward( functions.flipud, x_data, y_grad, dtype=numpy.float64) def test_backward_cpu(self): self.check_backward(self.x, self.gy) @attr.gpu def test_backward_gpu(self): self.check_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.gy)) def check_double_backward(self, x_data, y_grad, x_grad_grad): gradient_check.check_double_backward( functions.flipud, x_data, y_grad, x_grad_grad, dtype=numpy.float64, atol=5e-4, rtol=5e-3) def test_double_backward_cpu(self): self.check_double_backward(self.x, self.gy, self.ggx) @attr.gpu def test_double_backward_gpu(self): self.check_double_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.gy), cuda.to_gpu(self.ggx)) testing.run_module(__name__, __file__)<|fim▁end|>
import numpy import chainer from chainer.backends import cuda
<|file_name|>gyptest.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. __doc__ = """ gyptest.py -- test runner for GYP tests. """ import os import optparse import shlex import subprocess import sys class CommandRunner(object): """ Executor class for commands, including "commands" implemented by Python functions. """ verbose = True active = True def __init__(self, dictionary={}): self.subst_dictionary(dictionary) def subst_dictionary(self, dictionary): self._subst_dictionary = dictionary def subst(self, string, dictionary=None): """ Substitutes (via the format operator) the values in the specified dictionary into the specified command. The command can be an (action, string) tuple. In all cases, we perform substitution on strings and don't worry if something isn't a string. (It's probably a Python function to be executed.) """ if dictionary is None: dictionary = self._subst_dictionary if dictionary: try: string = string % dictionary except TypeError: pass return string def display(self, command, stdout=None, stderr=None): if not self.verbose: return if type(command) == type(()): func = command[0] args = command[1:] s = '%s(%s)' % (func.__name__, ', '.join(map(repr, args))) if type(command) == type([]): # TODO: quote arguments containing spaces # TODO: handle meta characters? s = ' '.join(command) else: s = self.subst(command) if not s.endswith('\n'): s += '\n' sys.stdout.write(s) sys.stdout.flush() def execute(self, command, stdout=None, stderr=None): """ Executes a single command. """ if not self.active: return 0 if type(command) == type(''): command = self.subst(command) cmdargs = shlex.split(command) if cmdargs[0] == 'cd': command = (os.chdir,) + tuple(cmdargs[1:]) if type(command) == type(()): func = command[0] args = command[1:] return func(*args) else: if stdout is sys.stdout: # Same as passing sys.stdout, except python2.4 doesn't fail on it. subout = None else: # Open pipe for anything else so Popen works on python2.4. subout = subprocess.PIPE if stderr is sys.stderr: # Same as passing sys.stderr, except python2.4 doesn't fail on it. suberr = None elif stderr is None: # Merge with stdout if stderr isn't specified. suberr = subprocess.STDOUT else: # Open pipe for anything else so Popen works on python2.4. suberr = subprocess.PIPE p = subprocess.Popen(command, shell=(sys.platform == 'win32'), stdout=subout, stderr=suberr) p.wait() if stdout is None: self.stdout = p.stdout.read() elif stdout is not sys.stdout: stdout.write(p.stdout.read()) if stderr not in (None, sys.stderr): stderr.write(p.stderr.read()) return p.returncode def run(self, command, display=None, stdout=None, stderr=None): """ Runs a single command, displaying it first. """ if display is None: display = command self.display(display) return self.execute(command, stdout, stderr) class Unbuffered(object): def __init__(self, fp): self.fp = fp def write(self, arg): self.fp.write(arg) self.fp.flush() def __getattr__(self, attr): return getattr(self.fp, attr) sys.stdout = Unbuffered(sys.stdout) sys.stderr = Unbuffered(sys.stderr) def is_test_name(f): return f.startswith('gyptest') and f.endswith('.py') def find_all_gyptest_files(directory): result = [] for root, dirs, files in os.walk(directory): if '.svn' in dirs: dirs.remove('.svn') result.extend([ os.path.join(root, f) for f in files if is_test_name(f) ]) result.sort() return result def main(argv=None):<|fim▁hole|> if argv is None: argv = sys.argv usage = "gyptest.py [-ahlnq] [-f formats] [test ...]" parser = optparse.OptionParser(usage=usage) parser.add_option("-a", "--all", action="store_true", help="run all tests") parser.add_option("-C", "--chdir", action="store", default=None, help="chdir to the specified directory") parser.add_option("-f", "--format", action="store", default='', help="run tests with the specified formats") parser.add_option("-G", '--gyp_option', action="append", default=[], help="Add -G options to the gyp command line") parser.add_option("-l", "--list", action="store_true", help="list available tests and exit") parser.add_option("-n", "--no-exec", action="store_true", help="no execute, just print the command line") parser.add_option("--passed", action="store_true", help="report passed tests") parser.add_option("--path", action="append", default=[], help="additional $PATH directory") parser.add_option("-q", "--quiet", action="store_true", help="quiet, don't print test command lines") opts, args = parser.parse_args(argv[1:]) if opts.chdir: os.chdir(opts.chdir) if opts.path: extra_path = [os.path.abspath(p) for p in opts.path] extra_path = os.pathsep.join(extra_path) os.environ['PATH'] = extra_path + os.pathsep + os.environ['PATH'] if not args: if not opts.all: sys.stderr.write('Specify -a to get all tests.\n') return 1 args = ['test'] tests = [] for arg in args: if os.path.isdir(arg): tests.extend(find_all_gyptest_files(os.path.normpath(arg))) else: if not is_test_name(os.path.basename(arg)): print >>sys.stderr, arg, 'is not a valid gyp test name.' sys.exit(1) tests.append(arg) if opts.list: for test in tests: print test sys.exit(0) CommandRunner.verbose = not opts.quiet CommandRunner.active = not opts.no_exec cr = CommandRunner() os.environ['PYTHONPATH'] = os.path.abspath('test/lib') if not opts.quiet: sys.stdout.write('PYTHONPATH=%s\n' % os.environ['PYTHONPATH']) passed = [] failed = [] no_result = [] if opts.format: format_list = opts.format.split(',') else: # TODO: not duplicate this mapping from pylib/gyp/__init__.py format_list = { 'aix5': ['make'], 'freebsd7': ['make'], 'freebsd8': ['make'], 'openbsd5': ['make'], 'cygwin': ['msvs'], 'win32': ['msvs', 'ninja'], 'linux2': ['make', 'ninja'], 'linux3': ['make', 'ninja'], 'darwin': ['make', 'ninja', 'xcode', 'xcode-ninja'], }[sys.platform] for format in format_list: os.environ['TESTGYP_FORMAT'] = format if not opts.quiet: sys.stdout.write('TESTGYP_FORMAT=%s\n' % format) gyp_options = [] for option in opts.gyp_option: gyp_options += ['-G', option] if gyp_options and not opts.quiet: sys.stdout.write('Extra Gyp options: %s\n' % gyp_options) for test in tests: status = cr.run([sys.executable, test] + gyp_options, stdout=sys.stdout, stderr=sys.stderr) if status == 2: no_result.append(test) elif status: failed.append(test) else: passed.append(test) if not opts.quiet: def report(description, tests): if tests: if len(tests) == 1: sys.stdout.write("\n%s the following test:\n" % description) else: fmt = "\n%s the following %d tests:\n" sys.stdout.write(fmt % (description, len(tests))) sys.stdout.write("\t" + "\n\t".join(tests) + "\n") if opts.passed: report("Passed", passed) report("Failed", failed) report("No result from", no_result) if failed: return 1 else: return 0 if __name__ == "__main__": sys.exit(main())<|fim▁end|>
<|file_name|>portal.ts<|end_file_name|><|fim▁begin|>import { TemplateRef, ViewContainerRef, ElementRef, ComponentRef, Injector } from '@angular/core'; // import { // NullPortalHostError, // PortalAlreadyAttachedError, // NoPortalAttachedError, // NullPortalError, // PortalHostAlreadyDisposedError, // UnknownPortalTypeError // } from './portal-errors'; import { ComponentType } from '../overlay/generic-component-type'; /** * A `Portal` is something that you want to render somewhere else. * It can be attach to / detached from a `PortalHost`. */ export abstract class Portal<T> { private _attachedHost: PortalHost; /** Attach this portal to a host. */ attach(host: PortalHost): T { if (host == null) { throw new Error(`no portal host`); } if (host.hasAttached()) { throw new Error(`Portal already attached`); } this._attachedHost = host; return <T> host.attach(this); } /** Detach this portal from its host */ detach(): void { let host = this._attachedHost; if (host == null) { throw new Error(`No portal attached`); } this._attachedHost = null; return host.detach(); } /** Whether this portal is attached to a host. */ get isAttached(): boolean { return this._attachedHost != null; } /** * Sets the PortalHost reference without performing `attach()`. This is used directly by * the PortalHost when it is performing an `attach()` or `detach()`. */ setAttachedHost(host: PortalHost) { this._attachedHost = host; } } /** * A `ComponentPortal` is a portal that instantiates some Component upon attachment. */ export class ComponentPortal<T> extends Portal<ComponentRef<T>> { /** The type of the component that will be instantiated for attachment. */ component: ComponentType<T>; /** * [Optional] Where the attached component should live in Angular's *logical* component tree. * This is different from where the component *renders*, which is determined by the PortalHost. * The origin is necessary when the host is outside of the Angular application context. */ viewContainerRef: ViewContainerRef; /** [Optional] Injector used for the instantiation of the component. */ injector: Injector; constructor( component: ComponentType<T>, viewContainerRef: ViewContainerRef = null, injector: Injector = null) { super(); this.component = component; this.viewContainerRef = viewContainerRef; this.injector = injector; } } /** * A `TemplatePortal` is a portal that represents some embedded template (TemplateRef). */ export class TemplatePortal extends Portal<Map<string, any>> { /** The embedded template that will be used to instantiate an embedded View in the host. */ templateRef: TemplateRef<any>; /** Reference to the ViewContainer into which the template will be stamped out. */ viewContainerRef: ViewContainerRef; /** * Additional locals for the instantiated embedded view. * These locals can be seen as "exports" for the template, such as how ngFor has * index / event / odd. * See https://angular.io/docs/ts/latest/api/core/EmbeddedViewRef-class.html */ locals: Map<string, any> = new Map<string, any>(); constructor(template: TemplateRef<any>, viewContainerRef: ViewContainerRef) { super(); this.templateRef = template; this.viewContainerRef = viewContainerRef; } get origin(): ElementRef { return this.templateRef.elementRef; } attach(host: PortalHost, locals?: Map<string, any>): Map<string, any> { this.locals = locals == null ? new Map<string, any>() : locals; return super.attach(host); } detach(): void { this.locals = new Map<string, any>(); return super.detach(); } } /** * A `PortalHost` is an space that can contain a single `Portal`. */ export interface PortalHost { attach(portal: Portal<any>): any; detach(): any;<|fim▁hole|> hasAttached(): boolean; } /** * Partial implementation of PortalHost that only deals with attaching either a * ComponentPortal or a TemplatePortal. */ export abstract class BasePortalHost implements PortalHost { /** The portal currently attached to the host. */ private _attachedPortal: Portal<any>; /** A function that will permanently dispose this host. */ private _disposeFn: () => void; /** Whether this host has already been permanently disposed. */ private _isDisposed: boolean = false; /** Whether this host has an attached portal. */ hasAttached(): boolean { return !!this._attachedPortal; } attach(portal: Portal<any>): any { if (!portal) { throw new Error(`Null portal`); } if (this.hasAttached()) { throw new Error(`Portal already attached`); } if (this._isDisposed) { throw new Error('Portal already disposed'); } if (portal instanceof ComponentPortal) { this._attachedPortal = portal; return this.attachComponentPortal(portal); } else if (portal instanceof TemplatePortal) { this._attachedPortal = portal; return this.attachTemplatePortal(portal); } throw new Error('Unknow portal type'); } abstract attachComponentPortal<T>(portal: ComponentPortal<T>): ComponentRef<T>; abstract attachTemplatePortal(portal: TemplatePortal): Map<string, any>; detach(): void { if (this._attachedPortal) { this._attachedPortal.setAttachedHost(null); this._attachedPortal = null; } this._invokeDisposeFn(); } dispose() { if (this.hasAttached()) { this.detach(); } this._invokeDisposeFn(); this._isDisposed = true; } setDisposeFn(fn: () => void) { this._disposeFn = fn; } private _invokeDisposeFn() { if (this._disposeFn) { this._disposeFn(); this._disposeFn = null; } } }<|fim▁end|>
dispose(): void;
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""<|fim▁hole|>""" __version__ = '0.1.1'<|fim▁end|>
Collection of astronomy-related functions and utilities
<|file_name|>test_quantization_accuracy.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the<|fim▁hole|># http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from collections import namedtuple import tvm from tvm import relay from tvm.relay import quantize as qtz import mxnet as mx from mxnet import gluon import logging import os logging.basicConfig(level=logging.INFO) Config = namedtuple('Config', ['model', 'nbit_input', 'dtype_input', 'nbit_output', 'dtype_output', 'global_scale', 'expected_acc']) def get_val_data(model_name, rec_val, batch_size, num_workers=4): rec_val = os.path.expanduser(rec_val) mean_rgb = [123.68, 116.779, 103.939] std_rgb = [58.393, 57.12, 57.375] def batch_fn(batch, ctx): data = gluon.utils.split_and_load(batch.data[0], ctx_list=ctx, batch_axis=0) label = gluon.utils.split_and_load(batch.label[0], ctx_list=ctx, batch_axis=0) return data, label img_size = 299 if model_name == 'inceptionv3' else 224 val_data = mx.io.ImageRecordIter( path_imgrec = rec_val, preprocess_threads = num_workers, shuffle = False, batch_size = batch_size, resize = 256, data_shape = (3, img_size, img_size), mean_r = mean_rgb[0], mean_g = mean_rgb[1], mean_b = mean_rgb[2], std_r = std_rgb[0], std_g = std_rgb[1], std_b = std_rgb[2], ) return val_data, batch_fn def get_model(model_name, batch_size, qconfig, target=None, original=False, simulated=False): gluon_model = gluon.model_zoo.vision.get_model(model_name, pretrained=True) img_size = 299 if model_name == 'inceptionv3' else 224 data_shape = (batch_size, 3, img_size, img_size) mod, params = relay.frontend.from_mxnet(gluon_model, {"data": data_shape}) net = mod['main'] with relay.build_config(opt_level=3): qfunc = relay.quantize.prerequisite_optimize(net, params=params) logging.debug('original') logging.debug(qfunc.astext(show_meta_data=False)) if original: return qfunc with qconfig: logging.debug('current quantize config') logging.debug(qtz.current_qconfig()) qfunc = qtz.quantize(qfunc) logging.debug('after quantize') logging.debug(qfunc.astext(show_meta_data=False)) return qfunc def eval_acc(model, dataset, batch_fn, target=tvm.target.cuda(), ctx=tvm.gpu(), log_interval=100): with relay.build_config(opt_level=3): graph, lib, params = relay.build(model, target) # create runtime module m = tvm.contrib.graph_runtime.create(graph, lib, ctx) m.set_input(**params) # setup evaluaiton metric dataset.reset() batch_size = dataset.batch_size acc_top1 = mx.metric.Accuracy() acc_top5 = mx.metric.TopKAccuracy(5) acc_top1.reset() acc_top5.reset() # Execute for i, batch in enumerate(dataset): data, label = batch_fn(batch, [mx.cpu(0)]) m.run(data=data[0].asnumpy()) out_arr = m.get_output(0) acc_top1.update(label, [mx.nd.array(out_arr.asnumpy())]) acc_top5.update(label, [mx.nd.array(out_arr.asnumpy())]) if not (i + 1) % log_interval: _, top1 = acc_top1.get() _, top5 = acc_top5.get() nsamples = (i + 1) * batch_size logging.info('[%d samples] validation: acc-top1=%f acc-top5=%f', nsamples, top1, top5) logging.info('[final] validation: acc-top1=%f acc-top5=%f', top1, top5) return top1 def test_quantize_acc(cfg, rec_val): qconfig = qtz.qconfig(skip_conv_layers=[0], nbit_input=cfg.nbit_input, nbit_weight=cfg.nbit_input, global_scale=cfg.global_scale, dtype_input=cfg.dtype_input, dtype_weight=cfg.dtype_input, dtype_activation=cfg.dtype_output, debug_enabled_ops=None) model = get_model(cfg.model, 32, qconfig, tvm.target.cuda()) val_data, batch_fn = get_val_data(cfg.model, rec_val=rec_val, batch_size=32) acc = eval_acc(model, val_data, batch_fn) assert acc > cfg.expected_acc return acc if __name__ == "__main__": #TODO(for user): replace the line with the path to imagenet validation dataset rec_val = "/scratch/tqchen/imagenet/val.rec" results = [] configs = [ Config('mobilenetv2_1.0', nbit_input=8, dtype_input='int8', nbit_output=32, dtype_output='int32', global_scale=4.0, expected_acc=0.666), Config('resnet18_v1', nbit_input=8, dtype_input='int8', nbit_output=16, dtype_output='int16', global_scale=8.0, expected_acc=0.692), Config('resnet18_v1', nbit_input=8, dtype_input='int8', nbit_output=32, dtype_output='int32', global_scale=8.0, expected_acc=0.692), Config('resnet34_v1', nbit_input=8, dtype_input='int8', nbit_output=32, dtype_output='int32', global_scale=8.0, expected_acc=0.733), Config('resnet50_v1', nbit_input=8, dtype_input='int8', nbit_output=32, dtype_output='int32', global_scale=8.0, expected_acc=0.747), Config('resnet101_v1', nbit_input=8, dtype_input='int8', nbit_output=32, dtype_output='int32', global_scale=8.0, expected_acc=0.756), # TODO: need to fix accuracy # Config('mobilenetv2_1.0', nbit_input=8, dtype_input='int8', nbit_output=16, dtype_output='int16', global_scale=4.0), ] for config in configs: acc = test_quantize_acc(config, rec_val) results.append((config, acc)) for res in results: print(res)<|fim▁end|>
# "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at #
<|file_name|>XWActivity.java<|end_file_name|><|fim▁begin|>/* -*- compile-command: "cd ../../../../../; ant debug install"; -*- */ /* * Copyright 2010 by Eric House ([email protected]). All rights * reserved. * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License as * published by the Free Software Foundation; either version 2 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ package com.oliversride.wordryo; import junit.framework.Assert; import android.app.Activity; import android.app.Dialog; import android.os.Bundle; public class XWActivity extends Activity implements DlgDelegate.DlgClickNotify, MultiService.MultiEventListener { private final static String TAG = "XWActivity"; private DlgDelegate m_delegate; @Override protected void onCreate( Bundle savedInstanceState ) { DbgUtils.logf( "%s.onCreate(this=%H)", getClass().getName(), this ); super.onCreate( savedInstanceState ); m_delegate = new DlgDelegate( this, this, savedInstanceState ); } @Override protected void onStart() { DbgUtils.logf( "%s.onStart(this=%H)", getClass().getName(), this ); super.onStart(); } @Override protected void onResume() { DbgUtils.logf( "%s.onResume(this=%H)", getClass().getName(), this ); BTService.setListener( this ); SMSService.setListener( this ); super.onResume(); } @Override protected void onPause() { DbgUtils.logf( "%s.onPause(this=%H)", getClass().getName(), this ); BTService.setListener( null ); SMSService.setListener( null ); super.onPause(); } @Override protected void onStop() { DbgUtils.logf( "%s.onStop(this=%H)", getClass().getName(), this ); super.onStop(); } @Override protected void onDestroy() { DbgUtils.logf( "%s.onDestroy(this=%H); isFinishing=%b", getClass().getName(), this, isFinishing() ); super.onDestroy(); } @Override protected void onSaveInstanceState( Bundle outState ) { super.onSaveInstanceState( outState ); m_delegate.onSaveInstanceState( outState ); } @Override protected Dialog onCreateDialog( int id ) { Dialog dialog = super.onCreateDialog( id ); if ( null == dialog ) { DbgUtils.logf( "%s.onCreateDialog() called", getClass().getName() ); dialog = m_delegate.onCreateDialog( id ); } return dialog; } // these are duplicated in XWListActivity -- sometimes multiple // inheritance would be nice to have... protected void showAboutDialog() { m_delegate.showAboutDialog(); } protected void showNotAgainDlgThen( int msgID, int prefsKey, int action ) { m_delegate.showNotAgainDlgThen( msgID, prefsKey, action ); } protected void showNotAgainDlgThen( int msgID, int prefsKey ) { m_delegate.showNotAgainDlgThen( msgID, prefsKey ); } protected void showOKOnlyDialog( int msgID ) { m_delegate.showOKOnlyDialog( msgID ); } protected void showOKOnlyDialog( String msg ) { m_delegate.showOKOnlyDialog( msg ); } protected void showDictGoneFinish() { m_delegate.showDictGoneFinish(); } protected void showConfirmThen( int msgID, int action ) { m_delegate.showConfirmThen( getString(msgID), action ); } protected void showConfirmThen( String msg, int action ) { m_delegate.showConfirmThen( msg, action ); } protected void showConfirmThen( int msg, int posButton, int action ) { m_delegate.showConfirmThen( getString(msg), posButton, action ); } public void showEmailOrSMSThen( int action ) { m_delegate.showEmailOrSMSThen( action ); } protected void doSyncMenuitem() { m_delegate.doSyncMenuitem(); } protected void launchLookup( String[] words, int lang ) { m_delegate.launchLookup( words, lang, false ); } protected void startProgress( int id ) { m_delegate.startProgress( id ); } protected void stopProgress() { m_delegate.stopProgress(); } protected boolean post( Runnable runnable ) { return m_delegate.post( runnable ); } // DlgDelegate.DlgClickNotify interface public void dlgButtonClicked( int id, int which ) { Assert.fail(); } // BTService.MultiEventListener interface public void eventOccurred( MultiService.MultiEvent event, final Object ... args )<|fim▁hole|>}<|fim▁end|>
{ m_delegate.eventOccurred( event, args ); }
<|file_name|>oldest_unique.py<|end_file_name|><|fim▁begin|>class Node: def __init__(self, value): self.value = value self.next = None self.prev = None class OldestUnique: def __init__(self): self.uniq = {} self.seen = set() self.head = None self.tail = None def feed(self, value): if value in self.uniq: # unlink from list but leave in uniq dict node = self.uniq[value] if node.prev is not None: node.prev.next = node.next else: self.head = node.next if node.next is not None: node.next.prev = node.prev else: self.tail = node.prev elif value not in self.seen: node = Node(value) if self.head is None: self.tail = node else: node.next = self.head self.head.prev = node self.head = node self.uniq[value] = node self.seen.add(value)<|fim▁hole|> return self.tail.value<|fim▁end|>
def query(self): if self.tail is not None:
<|file_name|>setup.py<|end_file_name|><|fim▁begin|># coding=utf8 from setuptools import setup setup(name='fbones', version='0.0.5', description='A bootstrap toolkit to kickoff a flask project', url='https://github.com/ipconfiger/fbones', author='Alexander.Li', author_email='[email protected]', license='GNU GENERAL PUBLIC LICENSE',<|fim▁hole|> 'flask', 'click', 'alembic', 'flask_doc', 'gunicorn', 'meinheld' ], entry_points={ 'console_scripts': ['fbones=fbones.fbones:main'], }, zip_safe=False)<|fim▁end|>
packages=['fbones'], install_requires=[
<|file_name|>MBeanTyper.java<|end_file_name|><|fim▁begin|>package web.magic.jvm; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.lang.reflect.UndeclaredThrowableException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.management.MBeanException; import javax.management.MBeanServer; import javax.management.ObjectName; class MBeanTyper { static final boolean DEBUG = Boolean.getBoolean("jboss.jmx.debug"); /** * create a typed object from an mbean */ public static final Object typeMBean(MBeanServer server, ObjectName mbean, Class<?> mainInterface) throws Exception { List<Class<?>> interfaces = new ArrayList<Class<?>>(); if (mainInterface.isInterface()) { interfaces.add(mainInterface); } addInterfaces(mainInterface.getInterfaces(), interfaces); Class<?> cl[] = (Class[]) interfaces.toArray(new Class[interfaces.size()]); if (DEBUG) { System.err.println("typeMean->server=" + server + ",mbean=" + mbean + ",mainInterface=" + mainInterface); for (int c = 0; c < cl.length; c++) { System.err.println(" :" + cl[c]); } } return Proxy.newProxyInstance(Thread.currentThread().getContextClassLoader(), cl, new MBeanTyperInvoker(server, mbean)); } private static final void addInterfaces(Class<?> cl[], List<Class<?>> list) { if (cl == null) return; for (int c = 0; c < cl.length; c++) { list.add(cl[c]); addInterfaces(cl[c].getInterfaces(), list); } } } /** * MBeanTyperInvoker handles method invocations against the MBeanTyper target * object and forwards them to the MBeanServer and ObjectName for invocation. * * @author <a href="mailto:[email protected]">Jeff Haynie</a> */ final class MBeanTyperInvoker implements java.lang.reflect.InvocationHandler { private final MBeanServer server; private final ObjectName mbean; private final Map<Method, String[]> signatureCache = Collections.synchronizedMap(new HashMap<Method, String[]>()); MBeanTyperInvoker(MBeanServer server, ObjectName mbean) { this.server = server; this.mbean = mbean; } private boolean isJMXAttribute(Method m) {<|fim▁hole|> return (name.startsWith("get")); } public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { if (MBeanTyper.DEBUG) { System.err.println(" ++ method=" + method.getName() + ",args=" + args); } try { if (method.getDeclaringClass() == Object.class) { String name = method.getName(); if (name.equals("hashCode")) { return new Integer(this.hashCode()); } else if (name.equals("toString")) { return this.toString(); } else if (name.equals("equals")) { // FIXME: this needs to be reviewed - we should be // smarter about this ... return new Boolean(equals(args[0])); } } else if (isJMXAttribute(method) && (args == null || args.length <= 0)) { String name = method.getName().substring(3); return server.getAttribute(mbean, name); } String sig[] = (String[]) signatureCache.get(method); if (sig == null) { // get the method signature from the method argument directly // vs. the arguments passed, since there may be primitives that // are wrapped as objects in the arguments Class<?> _args[] = method.getParameterTypes(); if (_args != null && _args.length > 0) { sig = new String[_args.length]; for (int c = 0; c < sig.length; c++) { if (_args[c] != null) { sig[c] = _args[c].getName(); } } } else { sig = new String[0]; } signatureCache.put(method, sig); } return server.invoke(mbean, method.getName(), args, sig); } catch (Throwable t) { if (MBeanTyper.DEBUG) { t.printStackTrace(); } if (t instanceof UndeclaredThrowableException) { UndeclaredThrowableException ut = (UndeclaredThrowableException) t; throw ut.getUndeclaredThrowable(); } else if (t instanceof InvocationTargetException) { InvocationTargetException it = (InvocationTargetException) t; throw it.getTargetException(); } else if (t instanceof MBeanException) { MBeanException me = (MBeanException) t; throw me.getTargetException(); } else { throw t; } } } }<|fim▁end|>
String name = m.getName();
<|file_name|>JustNotSorrySpec.test.js<|end_file_name|><|fim▁begin|>import { h } from 'preact'; import JustNotSorry from '../src/components/JustNotSorry.js'; import { configure, mount } from 'enzyme'; import Adapter from 'enzyme-adapter-preact-pure'; configure({ adapter: new Adapter() }); describe('JustNotSorry', () => { const justNotSorry = mount(<JustNotSorry />); let editableDiv1; let editableDiv2; let editableDiv3; let wrapper; let instance; const mutationObserverMock = jest.fn(function MutationObserver(callback) { this.observe = jest.fn(); this.disconnect = jest.fn(); this.trigger = (mockedMutationList) => { callback(mockedMutationList, this); }; }); document.createRange = jest.fn(() => ({ setStart: jest.fn(), setEnd: jest.fn(), commonAncestorContainer: { nodeName: 'BODY', ownerDocument: document, }, startContainer: 'test', getClientRects: jest.fn(() => [{}]), })); global.MutationObserver = mutationObserverMock; function generateEditableDiv(id, innerHtml) { return mount( <div id={id} contentEditable={'true'}> {innerHtml ? innerHtml : ''} </div> ); } beforeAll(() => { editableDiv1 = generateEditableDiv('div-1'); editableDiv2 = generateEditableDiv('div-2', 'test just test'); editableDiv3 = generateEditableDiv('div-3', 'test justify test'); }); describe('#addObserver', () => { it('adds an observer that listens for structural changes to the content editable div', () => { // remount JNS to trigger constructor functions justNotSorry.unmount(); justNotSorry.mount(); const instance = justNotSorry.instance(); const spy = jest.spyOn(instance, 'addObserver'); const node = mount( <div id={'div-focus'} contentEditable={'true'} onFocus={instance.addObserver.bind(instance)} ></div> ); node.simulate('focus'); // There should be the document observer and the observer specifically for the target div const observerInstances = mutationObserverMock.mock.instances; const observerInstance = observerInstances[observerInstances.length - 1]; expect(observerInstances.length).toBe(2); expect(spy).toHaveBeenCalledTimes(1); expect(observerInstance.observe).toHaveBeenCalledWith(node.getDOMNode(), { attributes: false, characterData: false, childList: true, subtree: true, }); node.unmount(); }); it('starts checking for warnings', () => { const instance = justNotSorry.instance(); const spy = jest.spyOn(instance, 'checkForWarnings'); const node = mount( <div id={'div-focus'} contentEditable={'true'} onFocus={instance.addObserver.bind(instance)} ></div> ); node.simulate('focus'); expect(spy).toHaveBeenCalled(); node.unmount(); }); it('adds warnings to the content editable div', () => { const instance = justNotSorry.instance(); const spy = jest.spyOn(instance, 'addWarnings'); const node = mount( <div id={'div-focus'} contentEditable={'true'} onFocus={instance.addObserver.bind(instance)} ></div> ); node.simulate('focus'); expect(spy).toHaveBeenCalledWith(node.getDOMNode().parentNode); node.unmount(); }); }); describe('#removeObserver', () => { it('removes any existing warnings', () => { const instance = justNotSorry.instance(); const spy = jest.spyOn(instance, 'removeObserver'); const node = mount( <div id={'div-focus'} contentEditable={'true'} onFocus={instance.addObserver.bind(instance)} onBlur={instance.removeObserver.bind(instance)} > just not sorry </div> ); node.simulate('focus'); expect(justNotSorry.state('warnings').length).toEqual(2); // remount the node node.mount(); node.simulate('blur'); expect(spy).toHaveBeenCalledTimes(1); expect(justNotSorry.state('warnings').length).toEqual(0); node.unmount(); }); it('no longer checks for warnings on input events', () => { justNotSorry.unmount(); justNotSorry.mount(); const instance = justNotSorry.instance(); const node = mount( <div id={'div-remove'} contentEditable={'true'} onFocus={instance.addObserver.bind(instance)} onBlur={instance.removeObserver.bind(instance)} ></div> ); node.simulate('focus'); node.simulate('blur'); const spy = jest.spyOn(instance, 'checkForWarnings'); node.simulate('input'); expect(spy).not.toHaveBeenCalled(); node.unmount(); }); it('disconnects the observer', () => { const instance = justNotSorry.instance(); const spy = jest.spyOn(instance, 'removeObserver'); const node = mount( <div id={'div-disconnect'} contentEditable={'true'} onFocus={instance.addObserver.bind(instance)} onBlur={instance.removeObserver.bind(instance)} ></div> ); node.simulate('focus'); node.simulate('blur'); // There should be the document observer and the observer specifically for the target div const observerInstances = mutationObserverMock.mock.instances; const observerInstance = observerInstances[observerInstances.length - 1]; expect(spy).toHaveBeenCalled(); expect(observerInstance.disconnect).toHaveBeenCalled(); node.unmount(); }); }); describe('#addWarning', () => { beforeEach(() => { wrapper = mount(<JustNotSorry />); instance = wrapper.instance(); }); it('adds a warning for a single keyword', () => { const node = editableDiv2.getDOMNode(); instance.addWarning(node, 'just', 'warning message'); expect(wrapper.state('warnings').length).toEqual(1); expect(wrapper.state('warnings')[0]).toEqual( expect.objectContaining({ keyword: 'just', message: 'warning message', parentNode: node, }) ); }); it('does not add warnings for partial matches', () => { const node = editableDiv3.getDOMNode(); instance.addWarning(node, 'just', 'warning message'); expect(wrapper.state('warnings').length).toEqual(0); expect(wrapper.state('warnings')).toEqual([]); }); it('matches case insensitive', () => { const node = generateEditableDiv('div-case', 'jUsT kidding').getDOMNode(); instance.addWarning(node, 'just', 'warning message'); expect(wrapper.state('warnings').length).toEqual(1); expect(wrapper.state('warnings')[0]).toEqual( expect.objectContaining({ keyword: 'just', message: 'warning message', parentNode: node, }) ); }); it('catches keywords with punctuation', () => { const node = generateEditableDiv( 'div-punctuation', 'just. test' ).getDOMNode(); instance.addWarning(node, 'just', 'warning message'); expect(wrapper.state('warnings').length).toEqual(1); expect(wrapper.state('warnings')[0]).toEqual( expect.objectContaining({ keyword: 'just', message: 'warning message', parentNode: node, }) ); }); it('matches phrases', () => { const node = generateEditableDiv( 'div-phrase', 'my cat is so sorry because of you' ).getDOMNode(); instance.addWarning(node, 'so sorry', 'warning message'); expect(wrapper.state('warnings').length).toEqual(1); expect(wrapper.state('warnings')[0]).toEqual( expect.objectContaining({ keyword: 'so sorry', message: 'warning message', parentNode: node, }) );<|fim▁hole|> it('does not add warnings for tooltip matches', () => { document.createRange = jest.fn(() => ({ setStart: jest.fn(), setEnd: jest.fn(), commonAncestorContainer: { nodeName: 'BODY', ownerDocument: document, }, startContainer: "The word 'very' does not communicate enough information. Find a stronger, more meaningful adverb, or omit it completely. --Andrea Ayres", getClientRects: jest.fn(() => [{}]), })); const node = editableDiv3.getDOMNode(); instance.addWarning(node, 'very', 'warning message'); expect(wrapper.state('warnings').length).toEqual(0); expect(wrapper.state('warnings')).toEqual([]); }); }); describe('#addWarnings', () => { beforeEach(() => { wrapper = mount(<JustNotSorry />); instance = wrapper.instance(); }); it('does nothing when given an empty string', () => { const node = editableDiv1.getDOMNode(); instance.addWarnings(node); expect(wrapper.state('warnings').length).toEqual(0); expect(wrapper.state('warnings')).toEqual([]); }); it('adds warnings to all keywords', () => { const node = generateEditableDiv( 'div-keywords', 'I am just so sorry. Yes, just.' ).getDOMNode(); instance.addWarnings(node); expect(wrapper.state('warnings').length).toEqual(3); }); }); describe('#checkForWarnings', () => { const instance = justNotSorry.instance(); const spy = jest.spyOn(instance, 'checkForWarnings'); const node = mount( <div onInput={instance.checkForWarnings}>just not sorry</div> ); it('updates warnings each time input is triggered', () => { node.simulate('input'); node.simulate('input'); node.simulate('input'); expect(spy).toHaveBeenCalledTimes(3); node.unmount(); }); }); });<|fim▁end|>
});
<|file_name|>http.rs<|end_file_name|><|fim▁begin|>#[macro_use] extern crate criterion; #[macro_use] extern crate combine; use std::fmt; use { combine::{ many, many1, parser::range::{range, take_while1}, stream::easy, token, ParseError, Parser, RangeStream, }, criterion::{black_box, Bencher, Criterion}, }; #[derive(Debug)] struct Request<'a> { method: &'a [u8], uri: &'a [u8], version: &'a [u8], } #[derive(Debug)] struct Header<'a> { name: &'a [u8], value: Vec<&'a [u8]>, } fn is_token(c: u8) -> bool { match c { 128..=255 | 0..=31 | b'(' | b')' | b'<' | b'>' | b'@' | b',' | b';' | b':' | b'\\' | b'"' | b'/' | b'[' | b']' | b'?' | b'=' | b'{' | b'}' | b' ' => false, _ => true, } } fn is_horizontal_space(c: u8) -> bool { c == b' ' || c == b'\t' } fn is_space(c: u8) -> bool { c == b' ' } fn is_not_space(c: u8) -> bool { c != b' ' } fn is_http_version(c: u8) -> bool { c >= b'0' && c <= b'9' || c == b'.' } fn end_of_line<'a, Input>() -> impl Parser<Input, Output = u8> where Input: RangeStream<Token = u8, Range = &'a [u8]>, Input::Error: ParseError<Input::Token, Input::Range, Input::Position>, { (token(b'\r'), token(b'\n')).map(|_| b'\r').or(token(b'\n')) } fn message_header<'a, Input>() -> impl Parser<Input, Output = Header<'a>> where Input: RangeStream<Token = u8, Range = &'a [u8]>, Input::Error: ParseError<Input::Token, Input::Range, Input::Position>, { let message_header_line = ( take_while1(is_horizontal_space), take_while1(|c| c != b'\r' && c != b'\n'), end_of_line(), ) .map(|(_, line, _)| line); struct_parser!(Header { name: take_while1(is_token), _: token(b':'), value: many1(message_header_line), }) } fn parse_http_request<'a, Input>( input: Input, ) -> Result<((Request<'a>, Vec<Header<'a>>), Input), Input::Error> where Input: RangeStream<Token = u8, Range = &'a [u8]>, Input::Error: ParseError<Input::Token, Input::Range, Input::Position>, { let http_version = range(&b"HTTP/"[..]).with(take_while1(is_http_version)); let request_line = struct_parser!(Request { method: take_while1(is_token), _: take_while1(is_space),<|fim▁hole|> uri: take_while1(is_not_space), _: take_while1(is_space), version: http_version, }); let mut request = ( request_line, end_of_line(), many(message_header()), end_of_line(), ) .map(|(request, _, headers, _)| (request, headers)); request.parse(input) } static REQUESTS: &'static [u8] = include_bytes!("http-requests.txt"); fn http_requests_small(b: &mut Bencher<'_>) { http_requests_bench(b, easy::Stream(REQUESTS)) } fn http_requests_large(b: &mut Bencher<'_>) { use std::iter; let mut buffer = Vec::with_capacity(REQUESTS.len() * 5); for buf in iter::repeat(REQUESTS).take(5) { buffer.extend_from_slice(buf); } http_requests_bench(b, easy::Stream(&buffer[..])) } fn http_requests_large_cheap_error(b: &mut Bencher<'_>) { use std::iter; let mut buffer = Vec::with_capacity(REQUESTS.len() * 5); for buf in iter::repeat(REQUESTS).take(5) { buffer.extend_from_slice(buf); } http_requests_bench(b, &buffer[..]) } fn http_requests_bench<'a, Input>(b: &mut Bencher<'_>, buffer: Input) where Input: RangeStream<Token = u8, Range = &'a [u8]> + Clone, Input::Error: ParseError<Input::Token, Input::Range, Input::Position> + fmt::Debug, { b.iter(|| { let mut buf = black_box(buffer.clone()); while buf.clone().uncons().is_ok() { match parse_http_request(buf) { Ok(((_, _), b)) => { buf = b; } Err(err) => panic!("{:?}", err), } } }); } fn http_requests(c: &mut Criterion) { c.bench_function("http_requests_small", http_requests_small); c.bench_function("http_requests_large", http_requests_large); c.bench_function( "http_requests_large_cheap_error", http_requests_large_cheap_error, ); } criterion_group!(http, http_requests,); criterion_main!(http);<|fim▁end|>
<|file_name|>sqllite.go<|end_file_name|><|fim▁begin|>package database import "database/sql" var db *sql.DB // OpenDB opens database connection func OpenDB() { openSQLite() } // openSQLite opens SQLite connection<|fim▁hole|> db, err = sql.Open("sqlite3", "adnalerts.db?loc.auto") if err != nil { panic(err) } err = db.Ping() if err != nil { panic(err) } } // Query any given SQL func Query(s string, args ...interface{}) (*sql.Rows, error) { rows, err := db.Query(s, args...) if err != nil { return nil, err } return rows, nil } // Exec any given SQL func Exec(s string, args ...interface{}) (sql.Result, error) { res, err := db.Exec(s, args...) if err != nil { return nil, err } return res, nil }<|fim▁end|>
func openSQLite() { var err error
<|file_name|>sparc64.rs<|end_file_name|><|fim▁begin|>// Copyright 2014-2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // FIXME: This needs an audit for correctness and completeness. use abi::call::{FnType, ArgType, Reg, RegKind, Uniform}; use abi::{HasDataLayout, LayoutOf, TyLayout, TyLayoutMethods}; fn is_homogeneous_aggregate<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>) -> Option<Uniform> where Ty: TyLayoutMethods<'a, C> + Copy, C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout { arg.layout.homogeneous_aggregate(cx).and_then(|unit| { // Ensure we have at most eight uniquely addressable members. if arg.layout.size > unit.size.checked_mul(8, cx).unwrap() { return None; } let valid_unit = match unit.kind { RegKind::Integer => false, RegKind::Float => true, RegKind::Vector => arg.layout.size.bits() == 128 }; if valid_unit { Some(Uniform { unit, total: arg.layout.size }) } else { None } }) } fn classify_ret_ty<'a, Ty, C>(cx: &C, ret: &mut ArgType<'a, Ty>) where Ty: TyLayoutMethods<'a, C> + Copy, C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout { if !ret.layout.is_aggregate() { ret.extend_integer_width_to(64); return; } if let Some(uniform) = is_homogeneous_aggregate(cx, ret) { ret.cast_to(uniform); return; } let size = ret.layout.size; let bits = size.bits(); if bits <= 256 { let unit = Reg::i64(); ret.cast_to(Uniform { unit, total: size }); return; } // don't return aggregates in registers ret.make_indirect(); } fn classify_arg_ty<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>) where Ty: TyLayoutMethods<'a, C> + Copy, C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout { if !arg.layout.is_aggregate() { arg.extend_integer_width_to(64); return; } if let Some(uniform) = is_homogeneous_aggregate(cx, arg) { arg.cast_to(uniform); return; } <|fim▁hole|> if total.bits() > 128 { arg.make_indirect(); return; } arg.cast_to(Uniform { unit: Reg::i64(), total }); } pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<'a, Ty>) where Ty: TyLayoutMethods<'a, C> + Copy, C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout { if !fty.ret.is_ignore() { classify_ret_ty(cx, &mut fty.ret); } for arg in &mut fty.args { if arg.is_ignore() { continue; } classify_arg_ty(cx, arg); } }<|fim▁end|>
let total = arg.layout.size;
<|file_name|>EventBus_solution.js<|end_file_name|><|fim▁begin|>/** * This file is the event bus demo **/ // Global namespace var EventBus = (function() { var // List of optional events events = {}, // Reference to the last fired event lastEvent, domNodes = [] ; function init() { domNodes['create-name'] = document.querySelector('[data-create-name]'); domNodes['create-handler'] = document.querySelector('[data-create-handler]'); domNodes['events-list'] = document.querySelector('.js-events-list'); domNodes['last-event'] = document.querySelector('[data-last-event]') } /** * Register new event * * @param {Object} eventData - The event data. * The eventData should contain the following info * * {eventData} : name: The name of the event * handler: The callback to be called when the event is triggered */ function registerEvent(eventData) { var name; // Validation if (!eventData) { console.log('Missing parameters'); return; } // Get the event name or set default name name = eventData.name || 'event' + Date.now(); // Store the event in the events list events[name] = eventData; // add event listener to print out when the vent was called document.addEventListener(name, function(myEvent) { console.log(myEvent); // Update the last event domNodes['last-event'].textContent = JSON.stringify(myEvent.detail); }); // Update the UI updateList(); } /** * Update the GUI events list */ function updateList() { // Get the list of the events var events = EventBus.getEventsList().sort(), // The new list markup listFragment = document.createDocumentFragment() ; // Clear the previous list domNodes['events-list'].innerHTML = ''; // Loop and output the events events.forEach(function(eventName) { var radio = document.createElement('input'), label = document.createElement('label'); radio.type = 'checkbox'; radio.name = eventName; radio.addEventListener('click', EventBus.fireEvent); listFragment.appendChild(radio); label.textContent = eventName; listFragment.appendChild(label); listFragment.appendChild(document.createElement('br')); }); domNodes['events-list'].appendChild(listFragment); } /** * Create new event. */ function createEvent(e) { // We don't do anything with the event registerEvent({ "name": domNodes['create-name'].value, "handler": domNodes['create-handler'].value }); domNodes['create-name'].value = ''; domNodes['create-handler'].value = ''; } <|fim▁hole|> function fireEvent() { var event; // un-check the previous event lastEvent ? lastEvent.checked = false : undefined; // Store the current event lastEvent = this; event = new CustomEvent( this.name, { detail: { message: "Fired event: " + this.name, time: Date.now() }, bubbles: true, cancelable: true } ); // Dispatch the event. document.dispatchEvent(event); } // Prepare the object init(); return { createEvent: createEvent, registerEvent: registerEvent, fireEvent: fireEvent, getEventsList: function() { return Object.keys(events); } } })(); // Bind the click button document.querySelector('.js-create-event').addEventListener('click', EventBus.createEvent);<|fim▁end|>
/** * Fire the event. */
<|file_name|>treeview.cpp<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2000 Matthias Elter <[email protected]> * Copyright (C) 2001-2002 Raffaele Sandrini <[email protected]) * Copyright (C) 2003 Waldo Bastian <[email protected]> * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * */ #include <unistd.h> #include <tqcstring.h> #include <tqcursor.h> #include <tqdatastream.h> #include <tqdir.h> #include <tqdragobject.h> #include <tqfileinfo.h> #include <tqheader.h> #include <tqpainter.h> #include <tqpopupmenu.h> #include <tqregexp.h> #include <tqstringlist.h> #include <tdeglobal.h> #include <kstandarddirs.h> #include <kinputdialog.h> #include <tdelocale.h> #include <ksimpleconfig.h> #include <kdebug.h> #include <kiconloader.h> #include <kdesktopfile.h> #include <tdeaction.h> #include <tdemessagebox.h> #include <tdeapplication.h> #include <kservice.h> #include <kservicegroup.h> #include <tdemultipledrag.h> #include <kurldrag.h> #include "treeview.h" #include "treeview.moc" #include "khotkeys.h" #include "menufile.h" #include "menuinfo.h" #define MOVE_FOLDER 'M' #define COPY_FOLDER 'C' #define MOVE_FILE 'm' #define COPY_FILE 'c' #define COPY_SEPARATOR 'S' TreeItem::TreeItem(TQListViewItem *parent, TQListViewItem *after, const TQString& menuId, bool __init) :TQListViewItem(parent, after), _hidden(false), _init(__init), _layoutDirty(false), _menuId(menuId), m_folderInfo(0), m_entryInfo(0) {} TreeItem::TreeItem(TQListView *parent, TQListViewItem *after, const TQString& menuId, bool __init) : TQListViewItem(parent, after), _hidden(false), _init(__init), _layoutDirty(false), _menuId(menuId), m_folderInfo(0), m_entryInfo(0) {} void TreeItem::setName(const TQString &name) { _name = name; update(); } void TreeItem::setHidden(bool b) { if (_hidden == b) return; _hidden = b; update(); } void TreeItem::update() { TQString s = _name; if (_hidden) s += i18n(" [Hidden]"); setText(0, s); } void TreeItem::setOpen(bool o) { if (o) load(); TQListViewItem::setOpen(o); } void TreeItem::load() { if (m_folderInfo && !_init) { _init = true; TreeView *tv = static_cast<TreeView *>(listView()); tv->fillBranch(m_folderInfo, this); } } void TreeItem::paintCell ( TQPainter * p, const TQColorGroup & cg, int column, int width, int align ) { TQListViewItem::paintCell(p, cg, column, width, align); if (!m_folderInfo && !m_entryInfo) { // Draw Separator int h = (height() / 2) -1; if (isSelected()) p->setPen( cg.highlightedText() ); else p->setPen( cg.text() ); p->drawLine(0, h, width, h); } } void TreeItem::setup() { TQListViewItem::setup(); if (!m_folderInfo && !m_entryInfo) setHeight(8); } static TQPixmap appIcon(const TQString &iconName) { TQPixmap normal = TDEGlobal::iconLoader()->loadIcon(iconName, TDEIcon::Small, 0, TDEIcon::DefaultState, 0L, true); // make sure they are not larger than 20x20 if (normal.width() > 20 || normal.height() > 20) { TQImage tmp = normal.convertToImage(); tmp = tmp.smoothScale(20, 20); normal.convertFromImage(tmp); } return normal; } TreeView::TreeView( bool controlCenter, TDEActionCollection *ac, TQWidget *parent, const char *name ) : TDEListView(parent, name), m_ac(ac), m_rmb(0), m_clipboard(0), m_clipboardFolderInfo(0), m_clipboardEntryInfo(0), m_controlCenter(controlCenter), m_layoutDirty(false) { setFrameStyle(TQFrame::WinPanel | TQFrame::Sunken); setAllColumnsShowFocus(true); setRootIsDecorated(true); setSorting(-1); setAcceptDrops(true); setDropVisualizer(true); setDragEnabled(true); setMinimumWidth(240); addColumn(""); header()->hide(); connect(this, TQT_SIGNAL(dropped(TQDropEvent*, TQListViewItem*, TQListViewItem*)), TQT_SLOT(slotDropped(TQDropEvent*, TQListViewItem*, TQListViewItem*))); connect(this, TQT_SIGNAL(clicked( TQListViewItem* )), TQT_SLOT(itemSelected( TQListViewItem* ))); connect(this,TQT_SIGNAL(selectionChanged ( TQListViewItem * )), TQT_SLOT(itemSelected( TQListViewItem* ))); connect(this, TQT_SIGNAL(rightButtonPressed(TQListViewItem*, const TQPoint&, int)), TQT_SLOT(slotRMBPressed(TQListViewItem*, const TQPoint&))); // connect actions connect(m_ac->action("newitem"), TQT_SIGNAL(activated()), TQT_SLOT(newitem())); connect(m_ac->action("newsubmenu"), TQT_SIGNAL(activated()), TQT_SLOT(newsubmenu())); if (m_ac->action("newsep")) connect(m_ac->action("newsep"), TQT_SIGNAL(activated()), TQT_SLOT(newsep())); m_menuFile = new MenuFile( locateLocal("xdgconf-menu", "applications-tdemenuedit.menu")); m_rootFolder = new MenuFolderInfo; m_separator = new MenuSeparatorInfo; m_drag = 0; // Read menu format configuration information TDESharedConfig::Ptr pConfig = TDESharedConfig::openConfig("kickerrc"); pConfig->setGroup("menus"); m_detailedMenuEntries = pConfig->readBoolEntry("DetailedMenuEntries",true); if (m_detailedMenuEntries) { m_detailedEntriesNamesFirst = pConfig->readBoolEntry("DetailedEntriesNamesFirst",false);<|fim▁hole|> cleanupClipboard(); delete m_rootFolder; delete m_separator; } void TreeView::setViewMode(bool showHidden) { delete m_rmb; // setup rmb menu m_rmb = new TQPopupMenu(this); TDEAction *action; action = m_ac->action("edit_cut"); if(action) { action->plug(m_rmb); action->setEnabled(false); connect(action, TQT_SIGNAL(activated()), TQT_SLOT(cut())); } action = m_ac->action("edit_copy"); if(action) { action->plug(m_rmb); action->setEnabled(false); connect(action, TQT_SIGNAL(activated()), TQT_SLOT(copy())); } action = m_ac->action("edit_paste"); if(action) { action->plug(m_rmb); action->setEnabled(false); connect(action, TQT_SIGNAL(activated()), TQT_SLOT(paste())); } m_rmb->insertSeparator(); action = m_ac->action("delete"); if(action) { action->plug(m_rmb); action->setEnabled(false); connect(action, TQT_SIGNAL(activated()), TQT_SLOT(del())); } m_rmb->insertSeparator(); if(m_ac->action("newitem")) m_ac->action("newitem")->plug(m_rmb); if(m_ac->action("newsubmenu")) m_ac->action("newsubmenu")->plug(m_rmb); if(m_ac->action("newsep")) m_ac->action("newsep")->plug(m_rmb); m_showHidden = showHidden; readMenuFolderInfo(); fill(); } void TreeView::readMenuFolderInfo(MenuFolderInfo *folderInfo, KServiceGroup::Ptr folder, const TQString &prefix) { if (!folderInfo) { folderInfo = m_rootFolder; if (m_controlCenter) folder = KServiceGroup::baseGroup("settings"); else folder = KServiceGroup::root(); } if (!folder || !folder->isValid()) return; folderInfo->caption = folder->caption(); folderInfo->comment = folder->comment(); // Item names may contain ampersands. To avoid them being converted // to accelerators, replace them with two ampersands. folderInfo->hidden = folder->noDisplay(); folderInfo->directoryFile = folder->directoryEntryPath(); folderInfo->icon = folder->icon(); TQString id = folder->relPath(); int i = id.findRev('/', -2); id = id.mid(i+1); folderInfo->id = id; folderInfo->fullId = prefix + id; KServiceGroup::List list = folder->entries(true, !m_showHidden, true, m_detailedMenuEntries && !m_detailedEntriesNamesFirst); for(KServiceGroup::List::ConstIterator it = list.begin(); it != list.end(); ++it) { KSycocaEntry * e = *it; if (e->isType(KST_KServiceGroup)) { KServiceGroup::Ptr g(static_cast<KServiceGroup *>(e)); MenuFolderInfo *subFolderInfo = new MenuFolderInfo(); readMenuFolderInfo(subFolderInfo, g, folderInfo->fullId); folderInfo->add(subFolderInfo, true); } else if (e->isType(KST_KService)) { folderInfo->add(new MenuEntryInfo(static_cast<KService *>(e)), true); } else if (e->isType(KST_KServiceSeparator)) { folderInfo->add(m_separator, true); } } } void TreeView::fill() { TQApplication::setOverrideCursor(Qt::WaitCursor); clear(); fillBranch(m_rootFolder, 0); TQApplication::restoreOverrideCursor(); } TQString TreeView::findName(KDesktopFile *df, bool deleted) { TQString name = df->readName(); if (deleted) { if (name == "empty") name = TQString::null; if (name.isEmpty()) { TQString file = df->fileName(); TQString res = df->resource(); bool isLocal = true; TQStringList files = TDEGlobal::dirs()->findAllResources(res.latin1(), file); for(TQStringList::ConstIterator it = files.begin(); it != files.end(); ++it) { if (isLocal) { isLocal = false; continue; } KDesktopFile df2(*it); name = df2.readName(); if (!name.isEmpty() && (name != "empty")) return name; } } } return name; } TreeItem *TreeView::createTreeItem(TreeItem *parent, TQListViewItem *after, MenuFolderInfo *folderInfo, bool _init) { TreeItem *item; if (parent == 0) item = new TreeItem(this, after, TQString::null, _init); else item = new TreeItem(parent, after, TQString::null, _init); item->setMenuFolderInfo(folderInfo); item->setName(folderInfo->caption); item->setPixmap(0, appIcon(folderInfo->icon)); item->setDirectoryPath(folderInfo->fullId); item->setHidden(folderInfo->hidden); item->setExpandable(true); return item; } TreeItem *TreeView::createTreeItem(TreeItem *parent, TQListViewItem *after, MenuEntryInfo *entryInfo, bool _init) { bool hidden = entryInfo->hidden; TreeItem* item; if (parent == 0) item = new TreeItem(this, after, entryInfo->menuId(), _init); else item = new TreeItem(parent, after, entryInfo->menuId(),_init); QString name; if (m_detailedMenuEntries && entryInfo->description.length() != 0) { if (m_detailedEntriesNamesFirst) { name = entryInfo->caption + " (" + entryInfo->description + ")"; } else { name = entryInfo->description + " (" + entryInfo->caption + ")"; } } else { name = entryInfo->caption; } item->setMenuEntryInfo(entryInfo); item->setName(name); item->setPixmap(0, appIcon(entryInfo->icon)); item->setHidden(hidden); return item; } TreeItem *TreeView::createTreeItem(TreeItem *parent, TQListViewItem *after, MenuSeparatorInfo *, bool _init) { TreeItem* item; if (parent == 0) item = new TreeItem(this, after, TQString::null, _init); else item = new TreeItem(parent, after, TQString::null,_init); return item; } void TreeView::fillBranch(MenuFolderInfo *folderInfo, TreeItem *parent) { TQString relPath = parent ? parent->directory() : TQString::null; TQPtrListIterator<MenuInfo> it( folderInfo->initialLayout ); TreeItem *after = 0; for (MenuInfo *info; (info = it.current()); ++it) { MenuEntryInfo *entry = dynamic_cast<MenuEntryInfo*>(info); if (entry) { after = createTreeItem(parent, after, entry); continue; } MenuFolderInfo *subFolder = dynamic_cast<MenuFolderInfo*>(info); if (subFolder) { after = createTreeItem(parent, after, subFolder); continue; } MenuSeparatorInfo *separator = dynamic_cast<MenuSeparatorInfo*>(info); if (separator) { after = createTreeItem(parent, after, separator); continue; } } } void TreeView::closeAllItems(TQListViewItem *item) { if (!item) return; while(item) { item->setOpen(false); closeAllItems(item->firstChild()); item = item->nextSibling(); } } void TreeView::selectMenu(const TQString &menu) { closeAllItems(firstChild()); if (menu.length() <= 1) { setCurrentItem(firstChild()); clearSelection(); return; // Root menu } TQString restMenu = menu.mid(1); if (!restMenu.endsWith("/")) restMenu += "/"; TreeItem *item = 0; do { int i = restMenu.find("/"); TQString subMenu = restMenu.left(i+1); restMenu = restMenu.mid(i+1); item = (TreeItem*)(item ? item->firstChild() : firstChild()); while(item) { MenuFolderInfo *folderInfo = item->folderInfo(); if (folderInfo && (folderInfo->id == subMenu)) { item->setOpen(true); break; } item = (TreeItem*) item->nextSibling(); } } while( item && !restMenu.isEmpty()); if (item) { setCurrentItem(item); ensureItemVisible(item); } } void TreeView::selectMenuEntry(const TQString &menuEntry) { TreeItem *item = (TreeItem *) selectedItem(); if (!item) { item = (TreeItem *) currentItem(); while (item && item->isDirectory()) item = (TreeItem*) item->nextSibling(); } else item = (TreeItem *) item->firstChild(); while(item) { MenuEntryInfo *entry = item->entryInfo(); if (entry && (entry->menuId() == menuEntry)) { setCurrentItem(item); ensureItemVisible(item); return; } item = (TreeItem*) item->nextSibling(); } } void TreeView::itemSelected(TQListViewItem *item) { TreeItem *_item = (TreeItem*)item; bool selected = false; bool dselected = false; if (_item) { selected = true; dselected = _item->isHidden(); } m_ac->action("edit_cut")->setEnabled(selected); m_ac->action("edit_copy")->setEnabled(selected); if (m_ac->action("delete")) m_ac->action("delete")->setEnabled(selected && !dselected); if(!item) { emit disableAction(); return; } if (_item->isDirectory()) emit entrySelected(_item->folderInfo()); else emit entrySelected(_item->entryInfo()); } void TreeView::currentChanged(MenuFolderInfo *folderInfo) { TreeItem *item = (TreeItem*)selectedItem(); if (item == 0) return; if (folderInfo == 0) return; item->setName(folderInfo->caption); item->setPixmap(0, appIcon(folderInfo->icon)); } void TreeView::currentChanged(MenuEntryInfo *entryInfo) { TreeItem *item = (TreeItem*)selectedItem(); if (item == 0) return; if (entryInfo == 0) return; QString name; if (m_detailedMenuEntries && entryInfo->description.length() != 0) { if (m_detailedEntriesNamesFirst) { name = entryInfo->caption + " (" + entryInfo->description + ")"; } else { name = entryInfo->description + " (" + entryInfo->caption + ")"; } } else { name = entryInfo->caption; } item->setName(name); item->setPixmap(0, appIcon(entryInfo->icon)); } TQStringList TreeView::fileList(const TQString& rPath) { TQString relativePath = rPath; // truncate "/.directory" int pos = relativePath.findRev("/.directory"); if (pos > 0) relativePath.truncate(pos); TQStringList filelist; // loop through all resource dirs and build a file list TQStringList resdirlist = TDEGlobal::dirs()->resourceDirs("apps"); for (TQStringList::ConstIterator it = resdirlist.begin(); it != resdirlist.end(); ++it) { TQDir dir((*it) + "/" + relativePath); if(!dir.exists()) continue; dir.setFilter(TQDir::Files); dir.setNameFilter("*.desktop;*.kdelnk"); // build a list of files TQStringList files = dir.entryList(); for (TQStringList::ConstIterator it = files.begin(); it != files.end(); ++it) { // does not work?! //if (filelist.contains(*it)) continue; if (relativePath.isEmpty()) { filelist.remove(*it); // hack filelist.append(*it); } else { filelist.remove(relativePath + "/" + *it); //hack filelist.append(relativePath + "/" + *it); } } } return filelist; } TQStringList TreeView::dirList(const TQString& rPath) { TQString relativePath = rPath; // truncate "/.directory" int pos = relativePath.findRev("/.directory"); if (pos > 0) relativePath.truncate(pos); TQStringList dirlist; // loop through all resource dirs and build a subdir list TQStringList resdirlist = TDEGlobal::dirs()->resourceDirs("apps"); for (TQStringList::ConstIterator it = resdirlist.begin(); it != resdirlist.end(); ++it) { TQDir dir((*it) + "/" + relativePath); if(!dir.exists()) continue; dir.setFilter(TQDir::Dirs); // build a list of subdirs TQStringList subdirs = dir.entryList(); for (TQStringList::ConstIterator it = subdirs.begin(); it != subdirs.end(); ++it) { if ((*it) == "." || (*it) == "..") continue; // does not work?! // if (dirlist.contains(*it)) continue; if (relativePath.isEmpty()) { dirlist.remove(*it); //hack dirlist.append(*it); } else { dirlist.remove(relativePath + "/" + *it); //hack dirlist.append(relativePath + "/" + *it); } } } return dirlist; } bool TreeView::acceptDrag(TQDropEvent* e) const { if (e->provides("application/x-kmenuedit-internal") && (e->source() == const_cast<TreeView *>(this))) return true; KURL::List urls; if (KURLDrag::decode(e, urls) && (urls.count() == 1) && urls[0].isLocalFile() && urls[0].path().endsWith(".desktop")) return true; return false; } static TQString createDesktopFile(const TQString &file, TQString *menuId, TQStringList *excludeList) { TQString base = file.mid(file.findRev('/')+1); base = base.left(base.findRev('.')); TQRegExp r("(.*)(?=-\\d+)"); base = (r.search(base) > -1) ? r.cap(1) : base; TQString result = KService::newServicePath(true, base, menuId, excludeList); excludeList->append(*menuId); // Todo for Undo-support: Undo menuId allocation: return result; } static KDesktopFile *copyDesktopFile(MenuEntryInfo *entryInfo, TQString *menuId, TQStringList *excludeList) { TQString result = createDesktopFile(entryInfo->file(), menuId, excludeList); KDesktopFile *df = entryInfo->desktopFile()->copyTo(result); df->deleteEntry("Categories"); // Don't set any categories! return df; } static TQString createDirectoryFile(const TQString &file, TQStringList *excludeList) { TQString base = file.mid(file.findRev('/')+1); base = base.left(base.findRev('.')); TQString result; int i = 1; while(true) { if (i == 1) result = base + ".directory"; else result = base + TQString("-%1.directory").arg(i); if (!excludeList->contains(result)) { if (locate("xdgdata-dirs", result).isEmpty()) break; } i++; } excludeList->append(result); result = locateLocal("xdgdata-dirs", result); return result; } void TreeView::slotDropped (TQDropEvent * e, TQListViewItem *parent, TQListViewItem*after) { if(!e) return; // get destination folder TreeItem *parentItem = static_cast<TreeItem*>(parent); TQString folder = parentItem ? parentItem->directory() : TQString::null; MenuFolderInfo *parentFolderInfo = parentItem ? parentItem->folderInfo() : m_rootFolder; if (e->source() != this) { // External drop KURL::List urls; if (!KURLDrag::decode(e, urls) || (urls.count() != 1) || !urls[0].isLocalFile()) return; TQString path = urls[0].path(); if (!path.endsWith(".desktop")) return; TQString menuId; TQString result = createDesktopFile(path, &menuId, &m_newMenuIds); KDesktopFile orig_df(path); KDesktopFile *df = orig_df.copyTo(result); df->deleteEntry("Categories"); // Don't set any categories! KService *s = new KService(df); s->setMenuId(menuId); MenuEntryInfo *entryInfo = new MenuEntryInfo(s, df); TQString oldCaption = entryInfo->caption; TQString newCaption = parentFolderInfo->uniqueItemCaption(oldCaption, oldCaption); entryInfo->setCaption(newCaption); // Add file to menu // m_menuFile->addEntry(folder, menuId); m_menuFile->pushAction(MenuFile::ADD_ENTRY, folder, menuId); // create the TreeItem if(parentItem) parentItem->setOpen(true); // update fileInfo data parentFolderInfo->add(entryInfo); TreeItem *newItem = createTreeItem(parentItem, after, entryInfo, true); setSelected ( newItem, true); itemSelected( newItem); m_drag = 0; setLayoutDirty(parentItem); return; } // is there content in the clipboard? if (!m_drag) return; if (m_dragItem == after) return; // Nothing to do int command = m_drag; if (command == MOVE_FOLDER) { MenuFolderInfo *folderInfo = m_dragInfo; if (e->action() == TQDropEvent::Copy) { // Ugh.. this is hard :) // * Create new .directory file // Add } else { TreeItem *tmpItem = static_cast<TreeItem*>(parentItem); while ( tmpItem ) { if ( tmpItem == m_dragItem ) { m_drag = 0; return; } tmpItem = static_cast<TreeItem*>(tmpItem->parent() ); } // Remove MenuFolderInfo TreeItem *oldParentItem = static_cast<TreeItem*>(m_dragItem->parent()); MenuFolderInfo *oldParentFolderInfo = oldParentItem ? oldParentItem->folderInfo() : m_rootFolder; oldParentFolderInfo->take(folderInfo); // Move menu TQString oldFolder = folderInfo->fullId; TQString folderName = folderInfo->id; TQString newFolder = m_menuFile->uniqueMenuName(folder, folderName, parentFolderInfo->existingMenuIds()); folderInfo->id = newFolder; // Add file to menu //m_menuFile->moveMenu(oldFolder, folder + newFolder); m_menuFile->pushAction(MenuFile::MOVE_MENU, oldFolder, folder + newFolder); // Make sure caption is unique TQString newCaption = parentFolderInfo->uniqueMenuCaption(folderInfo->caption); if (newCaption != folderInfo->caption) { folderInfo->setCaption(newCaption); } // create the TreeItem if(parentItem) parentItem->setOpen(true); // update fileInfo data folderInfo->updateFullId(parentFolderInfo->fullId); folderInfo->setInUse(true); parentFolderInfo->add(folderInfo); if ((parentItem != oldParentItem) || !after) { if (oldParentItem) oldParentItem->takeItem(m_dragItem); else takeItem(m_dragItem); if (parentItem) parentItem->insertItem(m_dragItem); else insertItem(m_dragItem); } m_dragItem->moveItem(after); m_dragItem->setName(folderInfo->caption); m_dragItem->setDirectoryPath(folderInfo->fullId); setSelected(m_dragItem, true); itemSelected(m_dragItem); } } else if (command == MOVE_FILE) { MenuEntryInfo *entryInfo = m_dragItem->entryInfo(); TQString menuId = entryInfo->menuId(); if (e->action() == TQDropEvent::Copy) { // Need to copy file and then add it KDesktopFile *df = copyDesktopFile(entryInfo, &menuId, &m_newMenuIds); // Duplicate //UNDO-ACTION: NEW_MENU_ID (menuId) KService *s = new KService(df); s->setMenuId(menuId); entryInfo = new MenuEntryInfo(s, df); TQString oldCaption = entryInfo->caption; TQString newCaption = parentFolderInfo->uniqueItemCaption(oldCaption, oldCaption); entryInfo->setCaption(newCaption); } else { del(m_dragItem, false); TQString oldCaption = entryInfo->caption; TQString newCaption = parentFolderInfo->uniqueItemCaption(oldCaption); entryInfo->setCaption(newCaption); entryInfo->setInUse(true); } // Add file to menu // m_menuFile->addEntry(folder, menuId); m_menuFile->pushAction(MenuFile::ADD_ENTRY, folder, menuId); // create the TreeItem if(parentItem) parentItem->setOpen(true); // update fileInfo data parentFolderInfo->add(entryInfo); TreeItem *newItem = createTreeItem(parentItem, after, entryInfo, true); setSelected ( newItem, true); itemSelected( newItem); } else if (command == COPY_SEPARATOR) { if (e->action() != TQDropEvent::Copy) del(m_dragItem, false); TreeItem *newItem = createTreeItem(parentItem, after, m_separator, true); setSelected ( newItem, true); itemSelected( newItem); } else { // Error } m_drag = 0; setLayoutDirty(parentItem); } void TreeView::startDrag() { TQDragObject *drag = dragObject(); if (!drag) return; drag->dragMove(); } TQDragObject *TreeView::dragObject() { m_dragPath = TQString::null; TreeItem *item = (TreeItem*)selectedItem(); if(item == 0) return 0; KMultipleDrag *drag = new KMultipleDrag( this ); if (item->isDirectory()) { m_drag = MOVE_FOLDER; m_dragInfo = item->folderInfo(); m_dragItem = item; } else if (item->isEntry()) { m_drag = MOVE_FILE; m_dragInfo = 0; m_dragItem = item; TQString menuId = item->menuId(); m_dragPath = item->entryInfo()->service->desktopEntryPath(); if (!m_dragPath.isEmpty()) m_dragPath = locate("apps", m_dragPath); if (!m_dragPath.isEmpty()) { KURL url; url.setPath(m_dragPath); drag->addDragObject( new KURLDrag(url, 0)); } } else { m_drag = COPY_SEPARATOR; m_dragInfo = 0; m_dragItem = item; } drag->addDragObject( new TQStoredDrag("application/x-kmenuedit-internal", 0)); if ( item->pixmap(0) ) drag->setPixmap(*item->pixmap(0)); return drag; } void TreeView::slotRMBPressed(TQListViewItem*, const TQPoint& p) { TreeItem *item = (TreeItem*)selectedItem(); if(item == 0) return; if(m_rmb) m_rmb->exec(p); } void TreeView::newsubmenu() { TreeItem *parentItem = 0; TreeItem *item = (TreeItem*)selectedItem(); bool ok; TQString caption = KInputDialog::getText( i18n( "New Submenu" ), i18n( "Submenu name:" ), TQString::null, &ok, this ); if (!ok) return; TQString file = caption; file.replace('/', '-'); file = createDirectoryFile(file, &m_newDirectoryList); // Create // get destination folder TQString folder; if(!item) { parentItem = 0; folder = TQString::null; } else if(item->isDirectory()) { parentItem = item; item = 0; folder = parentItem->directory(); } else { parentItem = static_cast<TreeItem*>(item->parent()); folder = parentItem ? parentItem->directory() : TQString::null; } MenuFolderInfo *parentFolderInfo = parentItem ? parentItem->folderInfo() : m_rootFolder; MenuFolderInfo *folderInfo = new MenuFolderInfo(); folderInfo->caption = parentFolderInfo->uniqueMenuCaption(caption); folderInfo->id = m_menuFile->uniqueMenuName(folder, caption, parentFolderInfo->existingMenuIds()); folderInfo->directoryFile = file; folderInfo->icon = "package"; folderInfo->hidden = false; folderInfo->setDirty(); KDesktopFile *df = new KDesktopFile(file); df->writeEntry("Name", folderInfo->caption); df->writeEntry("Icon", folderInfo->icon); df->sync(); delete df; // Add file to menu // m_menuFile->addMenu(folder + folderInfo->id, file); m_menuFile->pushAction(MenuFile::ADD_MENU, folder + folderInfo->id, file); folderInfo->fullId = parentFolderInfo->fullId + folderInfo->id; // create the TreeItem if(parentItem) parentItem->setOpen(true); // update fileInfo data parentFolderInfo->add(folderInfo); TreeItem *newItem = createTreeItem(parentItem, item, folderInfo, true); setSelected ( newItem, true); itemSelected( newItem); setLayoutDirty(parentItem); } void TreeView::newitem() { TreeItem *parentItem = 0; TreeItem *item = (TreeItem*)selectedItem(); bool ok; TQString caption = KInputDialog::getText( i18n( "New Item" ), i18n( "Item name:" ), TQString::null, &ok, this ); if (!ok) return; TQString menuId; TQString file = caption; file.replace('/', '-'); file = createDesktopFile(file, &menuId, &m_newMenuIds); // Create KDesktopFile *df = new KDesktopFile(file); df->writeEntry("Name", caption); df->writeEntry("Type", "Application"); // get destination folder TQString folder; if(!item) { parentItem = 0; folder = TQString::null; } else if(item->isDirectory()) { parentItem = item; item = 0; folder = parentItem->directory(); } else { parentItem = static_cast<TreeItem*>(item->parent()); folder = parentItem ? parentItem->directory() : TQString::null; } MenuFolderInfo *parentFolderInfo = parentItem ? parentItem->folderInfo() : m_rootFolder; // Add file to menu // m_menuFile->addEntry(folder, menuId); m_menuFile->pushAction(MenuFile::ADD_ENTRY, folder, menuId); KService *s = new KService(df); s->setMenuId(menuId); MenuEntryInfo *entryInfo = new MenuEntryInfo(s, df); // create the TreeItem if(parentItem) parentItem->setOpen(true); // update fileInfo data parentFolderInfo->add(entryInfo); TreeItem *newItem = createTreeItem(parentItem, item, entryInfo, true); setSelected ( newItem, true); itemSelected( newItem); setLayoutDirty(parentItem); } void TreeView::newsep() { TreeItem *parentItem = 0; TreeItem *item = (TreeItem*)selectedItem(); if(!item) { parentItem = 0; } else if(item->isDirectory()) { parentItem = item; item = 0; } else { parentItem = static_cast<TreeItem*>(item->parent()); } // create the TreeItem if(parentItem) parentItem->setOpen(true); TreeItem *newItem = createTreeItem(parentItem, item, m_separator, true); setSelected ( newItem, true); itemSelected( newItem); setLayoutDirty(parentItem); } void TreeView::cut() { copy( true ); m_ac->action("edit_cut")->setEnabled(false); m_ac->action("edit_copy")->setEnabled(false); m_ac->action("delete")->setEnabled(false); // Select new current item setSelected( currentItem(), true ); // Switch the UI to show that item itemSelected( selectedItem() ); } void TreeView::copy() { copy( false ); } void TreeView::copy( bool cutting ) { TreeItem *item = (TreeItem*)selectedItem(); // nil selected? -> nil to copy if (item == 0) return; if (cutting) setLayoutDirty((TreeItem*)item->parent()); // clean up old stuff cleanupClipboard(); // is item a folder or a file? if(item->isDirectory()) { TQString folder = item->directory(); if (cutting) { // Place in clipboard m_clipboard = MOVE_FOLDER; m_clipboardFolderInfo = item->folderInfo(); del(item, false); } else { // Place in clipboard m_clipboard = COPY_FOLDER; m_clipboardFolderInfo = item->folderInfo(); } } else if (item->isEntry()) { if (cutting) { // Place in clipboard m_clipboard = MOVE_FILE; m_clipboardEntryInfo = item->entryInfo(); del(item, false); } else { // Place in clipboard m_clipboard = COPY_FILE; m_clipboardEntryInfo = item->entryInfo(); } } else { // Place in clipboard m_clipboard = COPY_SEPARATOR; if (cutting) del(item, false); } m_ac->action("edit_paste")->setEnabled(true); } void TreeView::paste() { TreeItem *parentItem = 0; TreeItem *item = (TreeItem*)selectedItem(); // nil selected? -> nil to paste to if (item == 0) return; // is there content in the clipboard? if (!m_clipboard) return; // get destination folder TQString folder; if(item->isDirectory()) { parentItem = item; item = 0; folder = parentItem->directory(); } else { parentItem = static_cast<TreeItem*>(item->parent()); folder = parentItem ? parentItem->directory() : TQString::null; } MenuFolderInfo *parentFolderInfo = parentItem ? parentItem->folderInfo() : m_rootFolder; int command = m_clipboard; if ((command == COPY_FOLDER) || (command == MOVE_FOLDER)) { MenuFolderInfo *folderInfo = m_clipboardFolderInfo; if (command == COPY_FOLDER) { // Ugh.. this is hard :) // * Create new .directory file // Add } else if (command == MOVE_FOLDER) { // Move menu TQString oldFolder = folderInfo->fullId; TQString folderName = folderInfo->id; TQString newFolder = m_menuFile->uniqueMenuName(folder, folderName, parentFolderInfo->existingMenuIds()); folderInfo->id = newFolder; // Add file to menu // m_menuFile->moveMenu(oldFolder, folder + newFolder); m_menuFile->pushAction(MenuFile::MOVE_MENU, oldFolder, folder + newFolder); // Make sure caption is unique TQString newCaption = parentFolderInfo->uniqueMenuCaption(folderInfo->caption); if (newCaption != folderInfo->caption) { folderInfo->setCaption(newCaption); } // create the TreeItem if(parentItem) parentItem->setOpen(true); // update fileInfo data folderInfo->fullId = parentFolderInfo->fullId + folderInfo->id; folderInfo->setInUse(true); parentFolderInfo->add(folderInfo); TreeItem *newItem = createTreeItem(parentItem, item, folderInfo); setSelected ( newItem, true); itemSelected( newItem); } m_clipboard = COPY_FOLDER; // Next one copies. } else if ((command == COPY_FILE) || (command == MOVE_FILE)) { MenuEntryInfo *entryInfo = m_clipboardEntryInfo; TQString menuId; if (command == COPY_FILE) { // Need to copy file and then add it KDesktopFile *df = copyDesktopFile(entryInfo, &menuId, &m_newMenuIds); // Duplicate KService *s = new KService(df); s->setMenuId(menuId); entryInfo = new MenuEntryInfo(s, df); TQString oldCaption = entryInfo->caption; TQString newCaption = parentFolderInfo->uniqueItemCaption(oldCaption, oldCaption); entryInfo->setCaption(newCaption); } else if (command == MOVE_FILE) { menuId = entryInfo->menuId(); m_clipboard = COPY_FILE; // Next one copies. TQString oldCaption = entryInfo->caption; TQString newCaption = parentFolderInfo->uniqueItemCaption(oldCaption); entryInfo->setCaption(newCaption); entryInfo->setInUse(true); } // Add file to menu // m_menuFile->addEntry(folder, menuId); m_menuFile->pushAction(MenuFile::ADD_ENTRY, folder, menuId); // create the TreeItem if(parentItem) parentItem->setOpen(true); // update fileInfo data parentFolderInfo->add(entryInfo); TreeItem *newItem = createTreeItem(parentItem, item, entryInfo, true); setSelected ( newItem, true); itemSelected( newItem); } else { // create separator if(parentItem) parentItem->setOpen(true); TreeItem *newItem = createTreeItem(parentItem, item, m_separator, true); setSelected ( newItem, true); itemSelected( newItem); } setLayoutDirty(parentItem); } void TreeView::del() { TreeItem *item = (TreeItem*)selectedItem(); // nil selected? -> nil to delete if (item == 0) return; del(item, true); m_ac->action("edit_cut")->setEnabled(false); m_ac->action("edit_copy")->setEnabled(false); m_ac->action("delete")->setEnabled(false); // Select new current item setSelected( currentItem(), true ); // Switch the UI to show that item itemSelected( selectedItem() ); } void TreeView::del(TreeItem *item, bool deleteInfo) { TreeItem *parentItem = static_cast<TreeItem*>(item->parent()); // is file a .directory or a .desktop file if(item->isDirectory()) { MenuFolderInfo *folderInfo = item->folderInfo(); // Remove MenuFolderInfo MenuFolderInfo *parentFolderInfo = parentItem ? parentItem->folderInfo() : m_rootFolder; parentFolderInfo->take(folderInfo); folderInfo->setInUse(false); if (m_clipboard == COPY_FOLDER && (m_clipboardFolderInfo == folderInfo)) { // Copy + Del == Cut m_clipboard = MOVE_FOLDER; // Clipboard now owns folderInfo } else { if (folderInfo->takeRecursive(m_clipboardFolderInfo)) m_clipboard = MOVE_FOLDER; // Clipboard now owns m_clipboardFolderInfo if (deleteInfo) delete folderInfo; // Delete folderInfo } // Remove from menu // m_menuFile->removeMenu(item->directory()); m_menuFile->pushAction(MenuFile::REMOVE_MENU, item->directory(), TQString::null); // Remove tree item delete item; } else if (item->isEntry()) { MenuEntryInfo *entryInfo = item->entryInfo(); TQString menuId = entryInfo->menuId(); // Remove MenuFolderInfo MenuFolderInfo *parentFolderInfo = parentItem ? parentItem->folderInfo() : m_rootFolder; parentFolderInfo->take(entryInfo); entryInfo->setInUse(false); if (m_clipboard == COPY_FILE && (m_clipboardEntryInfo == entryInfo)) { // Copy + Del == Cut m_clipboard = MOVE_FILE; // Clipboard now owns entryInfo } else { if (deleteInfo) delete entryInfo; // Delete entryInfo } // Remove from menu TQString folder = parentItem ? parentItem->directory() : TQString::null; // m_menuFile->removeEntry(folder, menuId); m_menuFile->pushAction(MenuFile::REMOVE_ENTRY, folder, menuId); // Remove tree item delete item; } else { // Remove separator delete item; } setLayoutDirty(parentItem); } void TreeView::cleanupClipboard() { if (m_clipboard == MOVE_FOLDER) delete m_clipboardFolderInfo; m_clipboardFolderInfo = 0; if (m_clipboard == MOVE_FILE) delete m_clipboardEntryInfo; m_clipboardEntryInfo = 0; m_clipboard = 0; } static TQStringList extractLayout(TreeItem *item) { bool firstFolder = true; bool firstEntry = true; TQStringList layout; for(;item; item = static_cast<TreeItem*>(item->nextSibling())) { if (item->isDirectory()) { if (firstFolder) { firstFolder = false; layout << ":M"; // Add new folders here... } layout << (item->folderInfo()->id); } else if (item->isEntry()) { if (firstEntry) { firstEntry = false; layout << ":F"; // Add new entries here... } layout << (item->entryInfo()->menuId()); } else { layout << ":S"; } } return layout; } TQStringList TreeItem::layout() { TQStringList layout = extractLayout(static_cast<TreeItem*>(firstChild())); _layoutDirty = false; return layout; } void TreeView::saveLayout() { if (m_layoutDirty) { TQStringList layout = extractLayout(static_cast<TreeItem*>(firstChild())); m_menuFile->setLayout(m_rootFolder->fullId, layout); m_layoutDirty = false; } TQPtrList<TQListViewItem> lst; TQListViewItemIterator it( this ); while ( it.current() ) { TreeItem *item = static_cast<TreeItem*>(it.current()); if ( item->isLayoutDirty() ) { m_menuFile->setLayout(item->folderInfo()->fullId, item->layout()); } ++it; } } bool TreeView::save() { saveLayout(); m_rootFolder->save(m_menuFile); bool success = m_menuFile->performAllActions(); m_newMenuIds.clear(); m_newDirectoryList.clear(); if (success) { KService::rebuildKSycoca(this); } else { KMessageBox::sorry(this, "<qt>"+i18n("Menu changes could not be saved because of the following problem:")+"<br><br>"+ m_menuFile->error()+"</qt>"); } return success; } void TreeView::setLayoutDirty(TreeItem *parentItem) { if (parentItem) parentItem->setLayoutDirty(); else m_layoutDirty = true; } bool TreeView::isLayoutDirty() { TQPtrList<TQListViewItem> lst; TQListViewItemIterator it( this ); while ( it.current() ) { if ( static_cast<TreeItem*>(it.current())->isLayoutDirty() ) return true; ++it; } return false; } bool TreeView::dirty() { return m_layoutDirty || m_rootFolder->hasDirt() || m_menuFile->dirty() || isLayoutDirty(); } void TreeView::findServiceShortcut(const TDEShortcut&cut, KService::Ptr &service) { service = m_rootFolder->findServiceShortcut(cut); }<|fim▁end|>
} } TreeView::~TreeView() {
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>__all__ = ["gauth", "gcalendar", "lectio", "lesson", "run"]<|fim▁end|>
<|file_name|>package-info.java<|end_file_name|><|fim▁begin|>/**General utility methods collection (not all self developed). */<|fim▁hole|><|fim▁end|>
package de.konradhoeffner.commons;
<|file_name|>IdentitySieve.java<|end_file_name|><|fim▁begin|>package it.unimi.di.law.bubing.sieve; /* * Copyright (C) 2010-2017 Paolo Boldi, Massimo Santini, and Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import it.unimi.dsi.sux4j.mph.AbstractHashFunction; import java.io.IOException; //RELEASE-STATUS: DIST /** A sieve that simply (and immediately) copies {@linkplain #enqueue(Object, Object) enqueued keys} to the {@linkplain #setNewFlowRecevier(it.unimi.di.law.bubing.sieve.AbstractSieve.NewFlowReceiver) new flow receiver}. * * <p>Note that instances of this class call {@link AbstractSieve.NewFlowReceiver#prepareToAppend()} in the constructor only, and * {@link AbstractSieve.NewFlowReceiver#noMoreAppend()} in the method {@link #close()} only.<|fim▁hole|> super(keySerDeser, valueSerDeser, hashingStrategy, updateStrategy); setNewFlowRecevier(newFlowReceiver); newFlowReceiver.prepareToAppend(); } @Override public boolean enqueue(K key, V value) throws IOException { newFlowReceiver.append(0, key); return false; } @Override public void close() throws IOException { newFlowReceiver.noMoreAppend(); } @Override public void flush() throws IOException, InterruptedException {} }<|fim▁end|>
*/ public final class IdentitySieve<K, V> extends AbstractSieve<K, V> { public IdentitySieve(final NewFlowReceiver<K> newFlowReceiver, final ByteSerializerDeserializer<K> keySerDeser, final ByteSerializerDeserializer<V> valueSerDeser, final AbstractHashFunction<K> hashingStrategy, final UpdateStrategy<K, V> updateStrategy) throws IOException {
<|file_name|>keyframes.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use cssparser::{AtRuleParser, Parser, QualifiedRuleParser, RuleListParser}; use cssparser::{DeclarationListParser, DeclarationParser, parse_one_rule}; use parking_lot::RwLock; use parser::{ParserContext, ParserContextExtraData, log_css_error}; use properties::{Importance, PropertyDeclaration, PropertyDeclarationBlock}; use properties::PropertyDeclarationParseResult; use properties::animated_properties::TransitionProperty; use servo_url::ServoUrl; use std::fmt; use std::sync::Arc; use style_traits::ToCss; use stylesheets::{MemoryHoleReporter, Origin}; /// A number from 1 to 100, indicating the percentage of the animation where /// this keyframe should run. #[derive(Debug, Copy, Clone, PartialEq, PartialOrd)] #[cfg_attr(feature = "servo", derive(HeapSizeOf))] pub struct KeyframePercentage(pub f32); impl ::std::cmp::Ord for KeyframePercentage { #[inline] fn cmp(&self, other: &Self) -> ::std::cmp::Ordering { // We know we have a number from 0 to 1, so unwrap() here is safe. self.0.partial_cmp(&other.0).unwrap() } } impl ::std::cmp::Eq for KeyframePercentage { } impl KeyframePercentage { #[inline] pub fn new(value: f32) -> KeyframePercentage {<|fim▁hole|> debug_assert!(value >= 0. && value <= 1.); KeyframePercentage(value) } fn parse(input: &mut Parser) -> Result<KeyframePercentage, ()> { let percentage = if input.try(|input| input.expect_ident_matching("from")).is_ok() { KeyframePercentage::new(0.) } else if input.try(|input| input.expect_ident_matching("to")).is_ok() { KeyframePercentage::new(1.) } else { let percentage = try!(input.expect_percentage()); if percentage > 1. || percentage < 0. { return Err(()); } KeyframePercentage::new(percentage) }; Ok(percentage) } } /// A keyframes selector is a list of percentages or from/to symbols, which are /// converted at parse time to percentages. #[derive(Debug, Clone, PartialEq)] #[cfg_attr(feature = "servo", derive(HeapSizeOf))] pub struct KeyframeSelector(Vec<KeyframePercentage>); impl KeyframeSelector { #[inline] pub fn percentages(&self) -> &[KeyframePercentage] { &self.0 } /// A dummy public function so we can write a unit test for this. pub fn new_for_unit_testing(percentages: Vec<KeyframePercentage>) -> KeyframeSelector { KeyframeSelector(percentages) } pub fn parse(input: &mut Parser) -> Result<Self, ()> { input.parse_comma_separated(KeyframePercentage::parse) .map(KeyframeSelector) } } /// A keyframe. #[derive(Debug, Clone)] #[cfg_attr(feature = "servo", derive(HeapSizeOf))] pub struct Keyframe { pub selector: KeyframeSelector, /// `!important` is not allowed in keyframe declarations, /// so the second value of these tuples is always `Importance::Normal`. /// But including them enables `compute_style_for_animation_step` to create a `ApplicableDeclarationBlock` /// by cloning an `Arc<_>` (incrementing a reference count) rather than re-creating a `Vec<_>`. #[cfg_attr(feature = "servo", ignore_heap_size_of = "Arc")] pub block: Arc<RwLock<PropertyDeclarationBlock>>, } impl ToCss for Keyframe { fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write { let mut iter = self.selector.percentages().iter(); try!(write!(dest, "{}%", iter.next().unwrap().0)); for percentage in iter { try!(write!(dest, ", ")); try!(write!(dest, "{}%", percentage.0)); } try!(dest.write_str(" { ")); try!(self.block.read().to_css(dest)); try!(dest.write_str(" }")); Ok(()) } } impl Keyframe { pub fn parse(css: &str, origin: Origin, base_url: ServoUrl, extra_data: ParserContextExtraData) -> Result<Arc<RwLock<Self>>, ()> { let error_reporter = Box::new(MemoryHoleReporter); let context = ParserContext::new_with_extra_data(origin, &base_url, error_reporter, extra_data); let mut input = Parser::new(css); let mut rule_parser = KeyframeListParser { context: &context, }; parse_one_rule(&mut input, &mut rule_parser) } } /// A keyframes step value. This can be a synthetised keyframes animation, that /// is, one autogenerated from the current computed values, or a list of /// declarations to apply. // TODO: Find a better name for this? #[derive(Debug, Clone)] #[cfg_attr(feature = "servo", derive(HeapSizeOf))] pub enum KeyframesStepValue { /// See `Keyframe::declarations`’s docs about the presence of `Importance`. Declarations { #[cfg_attr(feature = "servo", ignore_heap_size_of = "Arc")] block: Arc<RwLock<PropertyDeclarationBlock>> }, ComputedValues, } /// A single step from a keyframe animation. #[derive(Debug, Clone)] #[cfg_attr(feature = "servo", derive(HeapSizeOf))] pub struct KeyframesStep { /// The percentage of the animation duration when this step starts. pub start_percentage: KeyframePercentage, /// Declarations that will determine the final style during the step, or /// `ComputedValues` if this is an autogenerated step. pub value: KeyframesStepValue, /// Wether a animation-timing-function declaration exists in the list of /// declarations. /// /// This is used to know when to override the keyframe animation style. pub declared_timing_function: bool, } impl KeyframesStep { #[allow(unsafe_code)] #[inline] fn new(percentage: KeyframePercentage, value: KeyframesStepValue) -> Self { let declared_timing_function = match value { KeyframesStepValue::Declarations { ref block } => { block.read().declarations.iter().any(|&(ref prop_decl, _)| { match *prop_decl { PropertyDeclaration::AnimationTimingFunction(..) => true, _ => false, } }) } _ => false, }; KeyframesStep { start_percentage: percentage, value: value, declared_timing_function: declared_timing_function, } } } /// This structure represents a list of animation steps computed from the list /// of keyframes, in order. /// /// It only takes into account animable properties. #[derive(Debug, Clone)] #[cfg_attr(feature = "servo", derive(HeapSizeOf))] pub struct KeyframesAnimation { pub steps: Vec<KeyframesStep>, /// The properties that change in this animation. pub properties_changed: Vec<TransitionProperty>, } /// Get all the animated properties in a keyframes animation. Note that it's not /// defined what happens when a property is not on a keyframe, so we only peek /// the props of the first one. /// /// In practice, browsers seem to try to do their best job at it, so we might /// want to go through all the actual keyframes and deduplicate properties. #[allow(unsafe_code)] fn get_animated_properties(keyframe: &Keyframe) -> Vec<TransitionProperty> { let mut ret = vec![]; // NB: declarations are already deduplicated, so we don't have to check for // it here. for &(ref declaration, _) in keyframe.block.read().declarations.iter() { if let Some(property) = TransitionProperty::from_declaration(declaration) { ret.push(property); } } ret } impl KeyframesAnimation { pub fn from_keyframes(keyframes: &[Arc<RwLock<Keyframe>>]) -> Option<Self> { if keyframes.is_empty() { return None; } let animated_properties = get_animated_properties(&keyframes[0].read()); if animated_properties.is_empty() { return None; } let mut steps = vec![]; for keyframe in keyframes { let keyframe = keyframe.read(); for percentage in keyframe.selector.0.iter() { steps.push(KeyframesStep::new(*percentage, KeyframesStepValue::Declarations { block: keyframe.block.clone(), })); } } // Sort by the start percentage, so we can easily find a frame. steps.sort_by_key(|step| step.start_percentage); // Prepend autogenerated keyframes if appropriate. if steps[0].start_percentage.0 != 0. { steps.insert(0, KeyframesStep::new(KeyframePercentage::new(0.), KeyframesStepValue::ComputedValues)); } if steps.last().unwrap().start_percentage.0 != 1. { steps.push(KeyframesStep::new(KeyframePercentage::new(0.), KeyframesStepValue::ComputedValues)); } Some(KeyframesAnimation { steps: steps, properties_changed: animated_properties, }) } } /// Parses a keyframes list, like: /// 0%, 50% { /// width: 50%; /// } /// /// 40%, 60%, 100% { /// width: 100%; /// } struct KeyframeListParser<'a> { context: &'a ParserContext<'a>, } pub fn parse_keyframe_list(context: &ParserContext, input: &mut Parser) -> Vec<Arc<RwLock<Keyframe>>> { RuleListParser::new_for_nested_rule(input, KeyframeListParser { context: context }) .filter_map(Result::ok) .collect() } enum Void {} impl<'a> AtRuleParser for KeyframeListParser<'a> { type Prelude = Void; type AtRule = Arc<RwLock<Keyframe>>; } impl<'a> QualifiedRuleParser for KeyframeListParser<'a> { type Prelude = KeyframeSelector; type QualifiedRule = Arc<RwLock<Keyframe>>; fn parse_prelude(&mut self, input: &mut Parser) -> Result<Self::Prelude, ()> { let start = input.position(); match KeyframeSelector::parse(input) { Ok(sel) => Ok(sel), Err(()) => { let message = format!("Invalid keyframe rule: '{}'", input.slice_from(start)); log_css_error(input, start, &message, self.context); Err(()) } } } fn parse_block(&mut self, prelude: Self::Prelude, input: &mut Parser) -> Result<Self::QualifiedRule, ()> { let mut declarations = Vec::new(); let parser = KeyframeDeclarationParser { context: self.context, }; let mut iter = DeclarationListParser::new(input, parser); while let Some(declaration) = iter.next() { match declaration { Ok(d) => declarations.extend(d.into_iter().map(|d| (d, Importance::Normal))), Err(range) => { let pos = range.start; let message = format!("Unsupported keyframe property declaration: '{}'", iter.input.slice(range)); log_css_error(iter.input, pos, &*message, self.context); } } // `parse_important` is not called here, `!important` is not allowed in keyframe blocks. } Ok(Arc::new(RwLock::new(Keyframe { selector: prelude, block: Arc::new(RwLock::new(PropertyDeclarationBlock { declarations: declarations, important_count: 0, })), }))) } } struct KeyframeDeclarationParser<'a, 'b: 'a> { context: &'a ParserContext<'b>, } /// Default methods reject all at rules. impl<'a, 'b> AtRuleParser for KeyframeDeclarationParser<'a, 'b> { type Prelude = (); type AtRule = Vec<PropertyDeclaration>; } impl<'a, 'b> DeclarationParser for KeyframeDeclarationParser<'a, 'b> { type Declaration = Vec<PropertyDeclaration>; fn parse_value(&mut self, name: &str, input: &mut Parser) -> Result<Vec<PropertyDeclaration>, ()> { let mut results = Vec::new(); match PropertyDeclaration::parse(name, self.context, input, &mut results, true) { PropertyDeclarationParseResult::ValidOrIgnoredDeclaration => {} _ => return Err(()) } Ok(results) } }<|fim▁end|>
<|file_name|>main.js<|end_file_name|><|fim▁begin|>// @flow /* eslint-env node */ /* global Restivus */ // Import Meteor :3 and Mongo :3 import { Meteor } from 'meteor/meteor' // Import fs and path to access the filesystem. And mime to get MIMETypes. import { readdirSync, lstatSync, readFileSync } from 'fs' import { join, sep, basename } from 'path' import { lookup } from 'mime' // Create the Meteor methods. Meteor.methods({ // This method enables the client to get the contents of any folder. getFolderContents (folder: string): Array<{ name: string, type: string }> { // Get folder contents and create initial variables the loop will write to. const folderContents: Array<string> = readdirSync(folder) const folderContentsWithTypes = [] let i // Define the function to get the type of a directory item. const getType = () => { if (lstatSync(`${folder}/${folderContents[i]}`).isDirectory()) { return 'folder' } return 'file' } // Start the loop to push folderContents. for (i = 0; i < folderContents.length; i += 1) { // Push objects to folderContentsWithTypes. folderContentsWithTypes.push({ name: folderContents[i], type: getType() }) } // Return folderContentsWithTypes. return folderContentsWithTypes }, // Pass it some paths and get a combination of those paths. joinPaths (...paths): string { return join(...paths) }, goUpOneDirectory (pathy: string): string { const pathyArray: Array<string> = pathy.split(sep) if (pathyArray[0] === '') { pathyArray[0] = '/' } const newArray = [] for (let x = 0; x < pathyArray.length - 1; x += 1) { newArray.push(pathyArray[x]) } return join(...newArray) } }) // Create a Restivus API. // flow-disable-next-line const Api = new Restivus({ prettyJson: true }) Api.addRoute('/file/:_filePath', { get () { // Get basename. const filename = basename(this.urlParams._filePath) <|fim▁hole|> // Set em' headers. this.response.writeHead({ // Filename. 'Content-disposition': `attachment; filename=${filename}`, // Type of file. 'Content-type': mimetype }) // Read the file and write data to response to client. const file = readFileSync(this.urlParams._filePath) this.response.write(file) // this.done() is quite self-explanatory. this.done() } })<|fim▁end|>
const mimetype = lookup(this.urlParams._filePath)
<|file_name|>pagination.rs<|end_file_name|><|fim▁begin|>pub trait PaginatedRequestor { type Item: 'static + Clone; type Error: 'static; fn next_page(&mut self) -> Result<Option<Vec<Self::Item>>, Self::Error>; } pub struct PaginatedIterator<'a, TR: PaginatedRequestor> { requestor: TR, current_page: Option<Vec<TR::Item>>, error: &'a mut Option<TR::Error> } impl<'a, TR: PaginatedRequestor> PaginatedIterator<'a, TR> {<|fim▁hole|> pub fn new(requestor: TR, error: &'a mut Option<TR::Error>) -> Self { PaginatedIterator { requestor: requestor, current_page: None, error: error } } fn advance_page(&mut self) { self.current_page = match self.requestor.next_page() { Ok(Some(p)) => Some(p.iter().cloned().rev().collect()), Ok(None) => None, Err(e) => { *self.error = Some(e); None } } } } impl<'a, TR: PaginatedRequestor> Iterator for PaginatedIterator<'a, TR> { type Item = TR::Item; fn next(&mut self) -> Option<Self::Item> { if self.current_page.is_none() { self.advance_page(); if self.current_page.is_none() { return None; } } match self.current_page.as_mut().unwrap().pop() { Some(i) => Some(i), None => { self.advance_page(); match self.current_page { Some(_) => self.next(), None => None } } } } }<|fim▁end|>
<|file_name|>service.py<|end_file_name|><|fim▁begin|># Copyright 2011 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from novaclient import exceptions as nova_exceptions from oslo_log import log as logging from trove.backup.models import Backup import trove.common.apischema as apischema from trove.common.auth import admin_context from trove.common import exception from trove.common.i18n import _ from trove.common import wsgi from trove.extensions.mgmt.instances import models from trove.extensions.mgmt.instances import views from trove.extensions.mgmt.instances.views import DiagnosticsView from trove.extensions.mgmt.instances.views import HwInfoView from trove.extensions.mysql import models as mysql_models from trove.instance import models as instance_models from trove.instance.service import InstanceController LOG = logging.getLogger(__name__) class MgmtInstanceController(InstanceController): """Controller for instance functionality.""" schemas = apischema.mgmt_instance @classmethod def get_action_schema(cls, body, action_schema): action_type = body.keys()[0] return action_schema.get(action_type, {}) @admin_context def index(self, req, tenant_id, detailed=False): """Return all instances.""" LOG.info(_("req : '%s'\n\n") % req) LOG.info(_("Indexing a database instance for tenant '%s'") % tenant_id) context = req.environ[wsgi.CONTEXT_KEY] deleted = None deleted_q = req.GET.get('deleted', '').lower() if deleted_q in ['true']: deleted = True<|fim▁hole|> try: instances = models.load_mgmt_instances( context, deleted=deleted, include_clustered=include_clustered) except nova_exceptions.ClientException as e: LOG.error(e) return wsgi.Result(str(e), 403) view_cls = views.MgmtInstancesView return wsgi.Result(view_cls(instances, req=req).data(), 200) @admin_context def show(self, req, tenant_id, id): """Return a single instance.""" LOG.info(_("req : '%s'\n\n") % req) LOG.info(_("Showing a database instance for tenant '%s'") % tenant_id) LOG.info(_("id : '%s'\n\n") % id) context = req.environ[wsgi.CONTEXT_KEY] deleted_q = req.GET.get('deleted', '').lower() include_deleted = deleted_q == 'true' server = models.DetailedMgmtInstance.load(context, id, include_deleted) root_history = mysql_models.RootHistory.load(context=context, instance_id=id) return wsgi.Result( views.MgmtInstanceDetailView( server, req=req, root_history=root_history).data(), 200) @admin_context def action(self, req, body, tenant_id, id): LOG.info("req : '%s'\n\n" % req) LOG.info("Committing an ACTION against instance %s for tenant '%s'" % (id, tenant_id)) if not body: raise exception.BadRequest(_("Invalid request body.")) context = req.environ[wsgi.CONTEXT_KEY] instance = models.MgmtInstance.load(context=context, id=id) _actions = { 'stop': self._action_stop, 'reboot': self._action_reboot, 'migrate': self._action_migrate, 'reset-task-status': self._action_reset_task_status } selected_action = None for key in body: if key in _actions: if selected_action is not None: msg = _("Only one action can be specified per request.") raise exception.BadRequest(msg) selected_action = _actions[key] else: msg = _("Invalid instance action: %s") % key raise exception.BadRequest(msg) if selected_action: return selected_action(context, instance, body) else: raise exception.BadRequest(_("Invalid request body.")) def _action_stop(self, context, instance, body): LOG.debug("Stopping MySQL on instance %s." % instance.id) instance.stop_db() return wsgi.Result(None, 202) def _action_reboot(self, context, instance, body): LOG.debug("Rebooting instance %s." % instance.id) instance.reboot() return wsgi.Result(None, 202) def _action_migrate(self, context, instance, body): LOG.debug("Migrating instance %s." % instance.id) LOG.debug("body['migrate']= %s" % body['migrate']) host = body['migrate'].get('host', None) instance.migrate(host) return wsgi.Result(None, 202) def _action_reset_task_status(self, context, instance, body): LOG.debug("Setting Task-Status to NONE on instance %s." % instance.id) instance.reset_task_status() LOG.debug("Failing backups for instance %s." % instance.id) Backup.fail_for_instance(instance.id) return wsgi.Result(None, 202) @admin_context def root(self, req, tenant_id, id): """Return the date and time root was enabled on an instance, if ever. """ LOG.info(_("req : '%s'\n\n") % req) LOG.info(_("Showing root history for tenant '%s'") % tenant_id) LOG.info(_("id : '%s'\n\n") % id) context = req.environ[wsgi.CONTEXT_KEY] try: instance_models.Instance.load(context=context, id=id) except exception.TroveError as e: LOG.error(e) return wsgi.Result(str(e), 404) rhv = views.RootHistoryView(id) reh = mysql_models.RootHistory.load(context=context, instance_id=id) if reh: rhv = views.RootHistoryView(reh.id, enabled=reh.created, user_id=reh.user) return wsgi.Result(rhv.data(), 200) @admin_context def hwinfo(self, req, tenant_id, id): """Return a single instance hardware info.""" LOG.info(_("req : '%s'\n\n") % req) LOG.info(_("Showing hardware info for instance '%s'") % id) context = req.environ[wsgi.CONTEXT_KEY] instance = models.MgmtInstance.load(context=context, id=id) hwinfo = instance.get_hwinfo() return wsgi.Result(HwInfoView(id, hwinfo).data(), 200) @admin_context def diagnostics(self, req, tenant_id, id): """Return a single instance diagnostics.""" LOG.info(_("req : '%s'\n\n") % req) LOG.info(_("Showing a instance diagnostics for instance '%s'") % id) LOG.info(_("id : '%s'\n\n") % id) context = req.environ[wsgi.CONTEXT_KEY] instance = models.MgmtInstance.load(context=context, id=id) diagnostics = instance.get_diagnostics() return wsgi.Result(DiagnosticsView(id, diagnostics).data(), 200) @admin_context def rpc_ping(self, req, tenant_id, id): """Checks if instance is reachable via rpc.""" LOG.info(_("req : '%s'\n\n") % req) LOG.info(_("id : '%s'\n\n") % id) context = req.environ[wsgi.CONTEXT_KEY] instance = models.MgmtInstance.load(context=context, id=id) instance.rpc_ping() return wsgi.Result(None, 204)<|fim▁end|>
elif deleted_q in ['false']: deleted = False clustered_q = req.GET.get('include_clustered', '').lower() include_clustered = clustered_q == 'true'
<|file_name|>amberrst.py<|end_file_name|><|fim▁begin|>############################################################################## # MDTraj: A Python Library for Loading, Saving, and Manipulating # Molecular Dynamics Trajectories. # Copyright 2012-2014 Stanford University and the Authors # # Authors: Jason Swails # Contributors: # # This code for reading Amber restart and inpcrd files was taken from ParmEd, # which is released under the GNU Lesser General Public License # # MDTraj is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 2.1 # of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with MDTraj. If not, see <http://www.gnu.org/licenses/>. ############################################################################## """ This module provides the ability to read Amber inpcrd/restart files as well as Amber NetCDF restart files. This code was taken from ParmEd and simplified by removing the functionality that is not needed. """ from __future__ import print_function, division from distutils.version import StrictVersion from math import ceil import os import warnings import numpy as np from mdtraj import version from mdtraj.formats.registry import FormatRegistry from mdtraj.utils import ensure_type, import_, in_units_of, cast_indices, six __all__ = ['AmberRestartFile', 'load_restrt', 'AmberNetCDFRestartFile', 'load_ncrestrt'] range = six.moves.range @FormatRegistry.register_loader('.rst7') @FormatRegistry.register_loader('.restrt') @FormatRegistry.register_loader('.inpcrd') def load_restrt(filename, top=None, atom_indices=None): """Load an AMBER ASCII restart/inpcrd file. Since this file doesn't contain information to specify the topology, you need to supply a topology Parameters ---------- filename : str name of the AMBER restart file top : {str, Trajectory, Topology} Pass in either the path to a file containing topology information (e.g., a PDB, an AMBER prmtop, or certain types of Trajectory objects) to supply the necessary topology information that is not present in these files atom_indices : array_like, optional If not None, then read only a subset of the atoms coordinates from the file. Returns ------- trajectory : md.Trajectory The resulting trajectory, as an md.Trajectory object See Also -------- mdtraj.AmberRestartFile : Low level interface to AMBER restart files """ from mdtraj.core.trajectory import _parse_topology topology = _parse_topology(top) atom_indices = cast_indices(atom_indices) with AmberRestartFile(filename) as f: return f.read_as_traj(topology, atom_indices=atom_indices) @FormatRegistry.register_fileobject('.rst7') @FormatRegistry.register_fileobject('.restrt') @FormatRegistry.register_fileobject('.inpcrd') class AmberRestartFile(object): """Interface for reading and writing AMBER ASCII restart files. This is a file-like object, that supports both reading and writing depending on the `mode` flag. It implements the context manager protocol, so you can also use it with the python 'with' statement. Parameters ---------- filename : str The name of the file to open mode : {'r', 'w'}, default='r' The mode in which to open the file. Valid options are 'r' or 'w' for 'read' or 'write' force_overwrite : bool, default=False In write mode, if a file named `filename` already exists, clobber it and overwrite it See Also -------- md.AmberNetCDFRestartFile : Low level interface to AMBER NetCDF-format restart files """ distance_unit = 'angstroms' def __init__(self, filename, mode='r', force_overwrite=True): self._closed = True self._mode = mode self._filename = filename if mode not in ('r', 'w'): raise ValueError("mode must be one of ['r', 'w']") if mode == 'w' and not force_overwrite and os.path.exists(filename): raise IOError('"%s" already exists' % filename) if mode == 'w': self._needs_initialization = True self._handle = open(filename, mode) self._closed = False elif mode == 'r': with open(filename, mode) as f: f.readline() words = f.readline().split() try: self._n_atoms = int(words[0]) except (IndexError, ValueError): raise TypeError('"%s" is not a recognized Amber restart' % filename) self._needs_initialization = False else: raise RuntimeError() @property def n_atoms(self): self._validate_open() if self._needs_initialization: raise IOError('The file is uninitialized') return self._n_atoms @property def n_frames(self): return 1 # always 1 frame def _validate_open(self): if self._closed: raise IOError('The file is closed.') def _parse(self, lines): """ Parses the file """ self._time = None try: words = lines[1].split() self._n_atoms = natom = int(words[0]) except (IndexError, ValueError): raise TypeError('not a recognized Amber restart') time = None if len(words) >= 2: time = float(words[1]) lines_per_frame = int(ceil(natom / 2)) if len(lines) == lines_per_frame + 2: hasbox = hasvels = False elif natom in (1, 2) and len(lines) == 4: # This is the _only_ case where line counting does not work -- there # is either 1 or 2 atoms and there are 4 lines. The 1st 3 lines are # the title, natom/time, and coordinates. The 4th are almost always # velocities since it's hard to have a periodic system this small. # However, velocities (which are scaled down by 20.445) have a ~0% # chance of being 60+, so we can pretty easily tell if the last line # has box dimensions and angles or velocities. I cannot envision a # plausible scenario where the detection here will ever fail line = lines[3] if natom == 1: tmp = [line[i:i+12] for i in range(0, 72, 12) if line[i:i+12].strip()] if len(tmp) == 3: hasvels = True hasbox = False elif len(tmp) == 6: hasbox = True hasvels = False else: raise TypeError('not a recognized Amber restart') else: # Ambiguous case tmp = [float(line[i:i+12]) >= 60.0 for i in range(0, 72, 12)] if any(tmp): hasbox = True hasvels = False else: hasvels = True hasbox = False elif len(lines) == lines_per_frame + 3: hasbox = True hasvels = False elif len(lines) == 2*lines_per_frame + 2: hasbox = False hasvels = True elif len(lines) == 2*lines_per_frame + 3: hasbox = hasvels = True else: raise TypeError('Badly formatted restart file. Has %d lines for ' '%d atoms' % (len(lines), natom)) coordinates = np.zeros((1, natom, 3)) if time is None: time = np.zeros(1) else: time = np.asarray((time,)) # Fill the coordinates for i in range(lines_per_frame): line = lines[i+2] # Skip first two lines i2 = i * 2 coordinates[0,i2,:] = [float(line[j:j+12]) for j in range(0,36,12)] i2 += 1 if i2 < natom: coordinates[0,i2,:] = [float(line[j:j+12]) for j in range(36,72,12)] if hasbox: cell_lengths = np.zeros((1,3)) cell_angles = np.zeros((1,3)) line = lines[-1] cell_lengths[0,:] = [float(line[i:i+12]) for i in range(0,36,12)] cell_angles[0,:] = [float(line[i:i+12]) for i in range(36,72,12)] else: cell_lengths = cell_angles = None return coordinates, time, cell_lengths, cell_angles def read_as_traj(self, topology, atom_indices=None): """Read an AMBER ASCII restart file as a trajectory. Parameters ---------- topology : Topology The system topology atom_indices : array_like, optional If not none, then read only a subset of the atoms coordinates from the file. This may be slightly slower than the standard read because it required an extra copy, but will save memory. Returns ------- trajectory : Trajectory A trajectory object with 1 frame created from the file. """ from mdtraj.core.trajectory import Trajectory if atom_indices is not None: topology = topology.subset(atom_indices) xyz, time, cell_lengths, cell_angles = self.read(atom_indices=atom_indices) xyz = in_units_of(xyz, self.distance_unit, Trajectory._distance_unit, inplace=True) cell_lengths = in_units_of(cell_lengths, self.distance_unit, Trajectory._distance_unit, inplace=True) return Trajectory(xyz=xyz, topology=topology, time=time, unitcell_lengths=cell_lengths, unitcell_angles=cell_angles) def read(self, atom_indices=None): """Read data from an AMBER ASCII restart file Parameters ---------- atom_indices : np.ndarray, dtype=int, optional The specific indices of the atoms you'd like to retrieve. If not supplied, all of the atoms will be retrieved. Returns ------- coordinates : np.ndarray, shape=(1, n_atoms, 3) The cartesian coordinates of the atoms, in units of angstroms. These files only ever contain 1 frame time : np.ndarray, None The time corresponding to the frame, in units of picoseconds, or None if no time information is present cell_lengths : np.ndarray, None The lengths (a, b, c) of the unit cell for the frame in angstroms, or None if the information is not present in the file cell_angles : np.ndarray, None The angles (\alpha, \beta, \gamma) defining the unit cell for each frame, or None if the information is not present in the file. """ if self._mode != 'r': raise IOError('The file was opened in mode=%s. Reading is not ' 'allowed.' % self._mode) with open(self._filename, 'r') as f: lines = f.readlines() coordinates, time, cell_lengths, cell_angles = self._parse(lines) if atom_indices is not None: atom_slice = ensure_type(atom_indices, dtype=np.int, ndim=1, name='atom_indices', warn_on_cast=False) if not np.all(atom_slice) >= 0: raise ValueError('Entries in atom_slice must be >= 0') coordinates = coordinates[:, atom_slice, :] return coordinates, time, cell_lengths, cell_angles def write(self, coordinates, time=None, cell_lengths=None, cell_angles=None): """Write one frame of a MD trajectory to disk in the AMBER ASCII restart file format. Parameters ---------- coordinates : np.ndarray, dtype=np.float32, shape=([1,] n_atoms, 3) The cartesian coordinates of each atom, in units of angstroms. Must be only a single frame (shape can be (1,N,3) or (N,3) where N is the number of atoms) time : array-like with 1 element or float, optional The time corresponding to this frame. If not specified, a place holder of 0 will be written cell_lengths : np.ndarray, dtype=np.double, shape=([1,] 3) The lengths (a,b,c) of the unit cell for the frame in Angstroms cell_angles : np.ndarray, dtype=np.double, shape=([1,] 3) The angles between the unit cell vectors for the frame in Degrees """ if self._mode != 'w': raise IOError('The file was opened in mode=%s. Writing not allowed.' % self._mode) if not self._needs_initialization: # Must have already been written -- can only write once raise RuntimeError('restart file has already been written -- can ' 'only write one frame to restart files.') # These are no-ops. # coordinates = in_units_of(coordinates, None, 'angstroms') # time = in_units_of(time, None, 'picoseconds') # cell_lengths = in_units_of(cell_lengths, None, 'angstroms') # cell_angles = in_units_of(cell_angles, None, 'degrees') # typecheck all of the input arguments rigorously coordinates = ensure_type(coordinates, np.float32, 3, 'coordinates', length=None, can_be_none=False, shape=(1,None,3), warn_on_cast=False,<|fim▁hole|> add_newaxis_on_deficient_ndim=True) n_frames, self._n_atoms = coordinates.shape[0], coordinates.shape[1] if n_frames != 1: raise ValueError('Can only write 1 frame to a restart file!') if time is not None: try: time = float(time) except TypeError: raise TypeError('Can only provide a single time') else: time = 0.0 cell_lengths = ensure_type(cell_lengths, np.float64, 2, 'cell_lengths', length=1, can_be_none=True, warn_on_cast=False, add_newaxis_on_deficient_ndim=True) cell_angles = ensure_type(cell_angles, np.float64, 2, 'cell_angles', length=1, can_be_none=True, warn_on_cast=False, add_newaxis_on_deficient_ndim=True) if ((cell_lengths is None and cell_angles is not None) or (cell_lengths is not None and cell_angles is None)): prov, negl = 'cell_lengths', 'cell_angles' if cell_lengths is None: prov, negl = negl, prov raise ValueError('You provided the variable "%s" but did not ' 'provide "%s". Either provide both or neither -- ' 'one without the other is meaningless.' % (prov, negl)) self._handle.write('Amber restart file (without velocities) written by ' 'MDTraj\n') self._handle.write('%5d%15.7e\n' % (self._n_atoms, time)) fmt = '%12.7f%12.7f%12.7f' for i in range(self._n_atoms): acor = coordinates[0, i, :] self._handle.write(fmt % (acor[0], acor[1], acor[2])) if i % 2 == 1: self._handle.write('\n') if self._n_atoms % 2 == 1: self._handle.write('\n') if cell_lengths is not None: self._handle.write(fmt % (cell_lengths[0,0], cell_lengths[0,1], cell_lengths[0,2])) self._handle.write(fmt % (cell_angles[0,0], cell_angles[0,1], cell_angles[0,2]) + '\n') self._handle.flush() def __enter__(self): return self def __exit__(self, *exc_info): self.close() def close(self): if not self._closed and hasattr(self, '_handle'): self._handle.close() self._closed = True def __del__(self): self.close() def __len__(self): return 1 # All restarts have only 1 frame @FormatRegistry.register_loader('.ncrst') def load_ncrestrt(filename, top=None, atom_indices=None): """Load an AMBER NetCDF restart/inpcrd file. Since this file doesn't contain information to specify the topology, you need to supply a topology Parameters ---------- filename : str name of the AMBER restart file top : {str, Trajectory, Topology} Pass in either the path to a file containing topology information (e.g., a PDB, an AMBER prmtop, or certain types of Trajectory objects) to supply the necessary topology information that is not present in these files atom_indices : array_like, optional If not None, then read only a subset of the atoms coordinates from the file. Returns ------- trajectory : md.Trajectory The resulting trajectory, as an md.Trajectory object See Also -------- mdtraj.AmberRestartFile : Low level interface to AMBER restart files """ from mdtraj.core.trajectory import _parse_topology topology = _parse_topology(top) atom_indices = cast_indices(atom_indices) with AmberNetCDFRestartFile(filename) as f: return f.read_as_traj(topology, atom_indices=atom_indices) @FormatRegistry.register_fileobject('.ncrst') class AmberNetCDFRestartFile(object): """Interface for reading and writing AMBER NetCDF files. This is a file-like object, that supports both reading and writing depending on the `mode` flag. It implements the context manager protocol, so you can also use it with the python 'with' statement. Parameters ---------- filename : str The name of the file to open mode : {'r', 'w'}, default='r' The mode in which to open the file. Valid options are 'r' or 'w' for 'read' or 'write' force_overwrite : bool, default=False In write mode, if a file named `filename` already exists, clobber it and overwrite it """ distance_unit = 'angstroms' def __init__(self, filename, mode='r', force_overwrite=False): self._closed = True self._mode = mode if StrictVersion(import_('scipy.version').short_version) < StrictVersion('0.12.0'): raise ImportError('MDTraj NetCDF support requires scipy>=0.12.0. ' 'You have %s' % import_('scipy.version').short_version) netcdf = import_('scipy.io').netcdf_file if mode not in ('r', 'w'): raise ValueError("mode must be one of ['r', 'w']") if mode == 'w' and not force_overwrite and os.path.exists(filename): raise IOError('"%s" already exists' % filename) # AMBER uses the NetCDF3 format, with 64 bit encodings, which for # scipy.io.netcdf_file is "version=2" self._handle = netcdf(filename, mode=mode, version=2) self._closed = False if mode == 'w': self._needs_initialization = True elif mode == 'r': self._needs_initialization = False else: raise RuntimeError() @property def n_atoms(self): self._validate_open() if self._needs_initialization: raise IOError('The file is uninitialized') return self._handle.dimensions['atom'] @property def n_frames(self): return 1 # always 1 frame def _validate_open(self): if self._closed: raise IOError('The file is closed.') def read_as_traj(self, topology, atom_indices=None): """Read an AMBER ASCII restart file as a trajectory. Parameters ---------- topology : Topology The system topology atom_indices : array_like, optional If not none, then read only a subset of the atoms coordinates from the file. This may be slightly slower than the standard read because it required an extra copy, but will save memory. Returns ------- trajectory : Trajectory A trajectory object with 1 frame created from the file. """ from mdtraj.core.trajectory import Trajectory if atom_indices is not None: topology = topology.subset(atom_indices) xyz, time, cell_lengths, cell_angles = self.read(atom_indices=atom_indices) xyz = in_units_of(xyz, self.distance_unit, Trajectory._distance_unit, inplace=True) cell_lengths = in_units_of(cell_lengths, self.distance_unit, Trajectory._distance_unit, inplace=True) return Trajectory(xyz=xyz, topology=topology, time=time, unitcell_lengths=cell_lengths, unitcell_angles=cell_angles) def read(self, atom_indices=None): """Read data from an AMBER NetCDF restart file Parameters ---------- atom_indices : np.ndarray, dtype=int, optional The specific indices of the atoms you'd like to retrieve. If not supplied, all of the atoms will be retrieved. Returns ------- coordinates : np.ndarray, shape=(1, n_atoms, 3) The cartesian coordinates of the atoms, in units of angstroms. These files only ever contain 1 frame time : np.ndarray, None The time corresponding to the frame, in units of picoseconds, or None if no time information is present cell_lengths : np.ndarray, None The lengths (a, b, c) of the unit cell for the frame in angstroms, or None if the information is not present in the file cell_angles : np.ndarray, None The angles (\alpha, \beta, \gamma) defining the unit cell for each frame, or None if the information is not present in the file. Notes ----- If the file is not a NetCDF file with the appropriate convention, a TypeError is raised. If variables that are needed do not exist or if illegal values are passed in for parameters, ValueError is raised. If I/O errors occur, IOError is raised. """ if self._mode != 'r': raise IOError('The file was opened in mode=%s. Reading is not ' 'allowed.' % self._mode) if 'coordinates' not in self._handle.variables: raise ValueError('No coordinates found in the NetCDF file.') # Check that conventions are correct try: conventions = self._handle.Conventions.decode('ascii') except UnicodeDecodeError: raise TypeError('NetCDF file does not have correct Conventions') try: convention_version = self._handle.ConventionVersion.decode('ascii') except UnicodeDecodeError: raise ValueError('NetCDF file does not have correct ConventionVersion') except AttributeError: raise TypeError('NetCDF file does not have ConventionVersion') if (not hasattr(self._handle, 'Conventions') or conventions != 'AMBERRESTART'): raise TypeError('NetCDF file does not have correct Conventions') if convention_version != '1.0': raise ValueError('NetCDF restart has ConventionVersion %s. Only ' 'Version 1.0 is supported.' % convention_version) if atom_indices is not None: atom_slice = ensure_type(atom_indices, dtype=np.int, ndim=1, name='atom_indices', warn_on_cast=False) if not np.all(atom_slice) >= 0: raise ValueError('Entries in atom_slice must be >= 0') coordinates = self._handle.variables['coordinates'][atom_slice, :] else: coordinates = self._handle.variables['coordinates'][:, :] # Get unit cell parameters if 'cell_lengths' in self._handle.variables: cell_lengths = self._handle.variables['cell_lengths'][:] else: cell_lengths = None if 'cell_angles' in self._handle.variables: cell_angles = self._handle.variables['cell_angles'][:] else: cell_angles = None if cell_lengths is None and cell_angles is not None: warnings.warn('cell_lengths were found, but no cell_angles') if cell_lengths is not None and cell_angles is None: warnings.warn('cell_angles were found, but no cell_lengths') if 'time' in self._handle.variables: time = self._handle.variables['time'].getValue() else: time = None # scipy.io.netcdf variables are mem-mapped, and are only backed by valid # memory while the file handle is open. This is _bad_ because we need to # support the user opening the file, reading the coordinates, and then # closing it, and still having the coordinates be a valid memory # segment. # https://github.com/mdtraj/mdtraj/issues/440 if coordinates is not None and not coordinates.flags['WRITEABLE']: coordinates = np.array(coordinates, copy=True) if cell_lengths is not None and not cell_lengths.flags['WRITEABLE']: cell_lengths = np.array(cell_lengths, copy=True) if cell_angles is not None and not cell_angles.flags['WRITEABLE']: cell_angles = np.array(cell_angles, copy=True) # The leading frame dimension is missing on all of these arrays since # restart files have only one frame. Reshape them to add this extra # dimension coordinates = coordinates[np.newaxis,:] if cell_lengths is not None: cell_lengths = cell_lengths[np.newaxis,:] if cell_angles is not None: cell_angles = cell_angles[np.newaxis,:] if time is not None: time = np.asarray([time,]) return coordinates, time, cell_lengths, cell_angles def write(self, coordinates, time=None, cell_lengths=None, cell_angles=None): """Write one frame of a MD trajectory to disk in the AMBER NetCDF restart file format. Parameters ---------- coordinates : np.ndarray, dtype=np.float32, shape=([1,] n_atoms, 3) The cartesian coordinates of each atom, in units of angstroms. Must be only a single frame (shape can be (1,N,3) or (N,3) where N is the number of atoms) time : array-like with 1 element or float, optional The time corresponding to this frame. If not specified, a place holder of 0 will be written cell_lengths : np.ndarray, dtype=np.double, shape=([1,] 3) The lengths (a,b,c) of the unit cell for the frame in Angstroms cell_angles : np.ndarray, dtype=np.double, shape=([1,] 3) The angles between the unit cell vectors for the frame in Degrees Notes ----- You must only have one frame to write to this file. """ if self._mode != 'w': raise IOError('The file was opened in mode=%s. Writing not allowed.' % self._mode) if not self._needs_initialization: # Must have already been written -- can only write once raise RuntimeError('NetCDF restart file has already been written ' '-- can only write one frame to restart files.') # these are no-ops # coordinates = in_units_of(coordinates, None, 'angstroms') # time = in_units_of(time, None, 'picoseconds') # cell_lengths = in_units_of(cell_lengths, None, 'angstroms') # cell_angles = in_units_of(cell_angles, None, 'degrees') # typecheck all of the input arguments rigorously coordinates = ensure_type(coordinates, np.float32, 3, 'coordinates', length=None, can_be_none=False, shape=(1,None,3), warn_on_cast=False, add_newaxis_on_deficient_ndim=True) n_frames, n_atoms = coordinates.shape[0], coordinates.shape[1] if n_frames != 1: raise ValueError('Can only write 1 frame to a restart file!') if time is not None: try: time = float(time) except TypeError: raise TypeError('Can only provide a single time') else: time = 0.0 cell_lengths = ensure_type(cell_lengths, np.float64, 2, 'cell_lengths', length=1, can_be_none=True, warn_on_cast=False, add_newaxis_on_deficient_ndim=True) cell_angles = ensure_type(cell_angles, np.float64, 2, 'cell_angles', length=1, can_be_none=True, warn_on_cast=False, add_newaxis_on_deficient_ndim=True) if ((cell_lengths is None and cell_angles is not None) or (cell_lengths is not None and cell_angles is None)): prov, negl = 'cell_lengths', 'cell_angles' if cell_lengths is None: prov, negl = negl, prov raise ValueError('You provided the variable "%s" but did not ' 'provide "%s". Either provide both or neither -- ' 'one without the other is meaningless.' % (prov, negl)) self._initialize_headers(n_atoms=n_atoms, set_coordinates=True, set_time=(time is not None), set_cell=(cell_lengths is not None)) self._needs_initialization = False # Write the time, coordinates, and box info if time is not None: self._handle.variables['time'][0] = float(time) self._handle.variables['coordinates'][:,:] = coordinates[0,:,:] if cell_lengths is not None: self._handle.variables['cell_angles'][:] = cell_angles[0,:] self._handle.variables['cell_lengths'][:] = cell_lengths[0,:] self.flush() def _initialize_headers(self, n_atoms, set_coordinates, set_time, set_cell): """Initialize the headers and convention properties of the NetCDF restart file """ ncfile = self._handle ncfile.Conventions = 'AMBERRESTART' ncfile.ConventionVersion = "1.0" ncfile.title = 'NetCDF Restart file written by MDTraj w/out velocities' ncfile.application = 'Omnia' ncfile.program = 'MDTraj' ncfile.programVersion = version.short_version # Dimensions ncfile.createDimension('spatial', 3) ncfile.createDimension('atom', n_atoms) if set_cell: ncfile.createDimension('cell_spatial', 3) ncfile.createDimension('label', 5) ncfile.createDimension('cell_angular', 3) if set_time: ncfile.createDimension('time', 1) # Variables v = ncfile.createVariable('spatial', 'c', ('spatial',)) v[:] = np.asarray(list('xyz')) v = ncfile.createVariable('coordinates', 'd', ('atom', 'spatial')) v.units = 'angstrom' if set_cell: v = ncfile.createVariable('cell_angular', 'c', ('cell_angular', 'label')) v[0] = np.asarray(list('alpha')) v[1] = np.asarray(list('beta ')) v[2] = np.asarray(list('gamma')) v = ncfile.createVariable('cell_spatial', 'c', ('cell_spatial',)) v[:] = np.asarray(list('abc')) v = ncfile.createVariable('cell_lengths', 'd', ('cell_spatial',)) v.units = 'angstrom' v = ncfile.createVariable('cell_angles', 'd', ('cell_angular',)) v.units = 'degree' if set_time: v = ncfile.createVariable('time', 'd', ('time',)) v.units = 'picoseconds' self.flush() def __enter__(self): return self def __exit__(self, *exc_info): self.close() def close(self): if not self._closed and hasattr(self, '_handle'): self._handle.close() self._closed = True def __del__(self): self.close() def __len__(self): return 1 # All restarts have only 1 frame def flush(self): self._validate_open() if self._mode != 'w': raise IOError('Cannot flush a file opened for reading') self._handle.flush()<|fim▁end|>
<|file_name|>global-helpers.js<|end_file_name|><|fim▁begin|>import {Utils} from "../service/Utils"; Template.registerHelper( "displayHours", function (date) { return new moment(date).format("H[h]"); } ); Template.registerHelper( "displayHoursMinute", function (date) { return new moment(date).format("H[h]mm"); } ); Template.registerHelper( "displayHoursMinuteSeconde", function (date) { return new moment(date).format("H[h]mm ss[sec]"); } ); Template.registerHelper( "displayDateTime", function (date) { return new moment(date).format("ddd DD MMM HH[h]mm"); } ); Template.registerHelper( "displayDay", function (date) { return new moment(date).format("DD MMM"); } ); Template.registerHelper( "skillLabel", function () { return Skills.findOne({_id: this.toString()}).label; } ); Template.registerHelper( "onUpdateError", function (error) { return function (error) { Utils.onUpdateError(error.reason) } }); Template.registerHelper( "onUpdateSuccess", function (message) { return function (message) { Utils.onUpdateSuccess(message); } }); Template.registerHelper( "onDeleteError", function (error) { return function (error) { Utils.onUpdateError(error.reason) } }); Template.registerHelper( "onDeleteSuccess", function (message) { return function (message) { Utils.onUpdateSuccess(message); } }); Template.registerHelper( "allTeams", function () { return Teams.find(); } ); Template.registerHelper('equals', function (a, b) { return a === b; }); Template.registerHelper('adds', function (a, b) { return a + b; }); Template.registerHelper( "allOptionsTeams", function () { return Teams.find({ name: { $ne: ASSIGNMENTREADYTEAM } }); } ); Template.registerHelper( "allSkills", function (userId) { var userTeams = Meteor.users.findOne({_id: userId}).teams; return Skills.find({ teams: { $in: userTeams }<|fim▁hole|> }); } ); Template.registerHelper('ifNotEmpty', function (item, options) { if (item) { if (item instanceof Array) { if (item.length > 0) { return options.fn(this); } else { return options.inverse(this); } } else { if (item.fetch().length > 0) { return options.fn(this); } else { return options.inverse(this); } } } else { return options.inverse(this); } }); Template.registerHelper("equals", function (a, b) { return a === b; } ); Template.registerHelper("isMore", function (a, b) { return a > b; } ); Template.registerHelper("displayValidationState", function (state) { return DisplayedValidationState[state]; }); Template.registerHelper("RolesEnum", function () { return RolesEnum; }); Template.registerHelper( "currentUserId", function () { return Meteor.users.findOne({_id: Meteor.userId()})._id; } ); Template.registerHelper( "isCurrentUserTheOneLogged", function (currentUserId) { return currentUserId === Meteor.users.findOne({_id: Meteor.userId()})._id; } ) Template.registerHelper( "currentUserIdObject", function () { return { _id: Meteor.users.findOne({_id: Meteor.userId()})._id } } ); Template.registerHelper("cursorLength", function (array) { return array.fetch().length; } );<|fim▁end|>
<|file_name|>ui_role_list_item.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python """Test to verify presentation of selectable list items.""" from macaroon.playback import * import utils sequence = MacroSequence() sequence.append(KeyComboAction("<Control><Shift>n")) sequence.append(KeyComboAction("Tab")) sequence.append(utils.StartRecordingAction()) sequence.append(KeyComboAction("Tab")) sequence.append(utils.AssertPresentationAction( "1. Tab to list item", ["KNOWN ISSUE: We are presenting nothing here", ""]))<|fim▁hole|>sequence.append(utils.AssertPresentationAction( "2. Right to next list item", ["BRAILLE LINE: 'soffice application Template Manager frame Template Manager dialog Drawings page tab list Presentation Backgrounds list item'", " VISIBLE: 'Presentation Backgrounds list it', cursor=1", "SPEECH OUTPUT: 'Presentation Backgrounds'"])) sequence.append(utils.StartRecordingAction()) sequence.append(KeyComboAction("Left")) sequence.append(utils.AssertPresentationAction( "3. Left to previous list item", ["BRAILLE LINE: 'soffice application Template Manager frame Template Manager dialog Drawings page tab list My Templates list item'", " VISIBLE: 'My Templates list item', cursor=1", "SPEECH OUTPUT: 'My Templates'"])) sequence.append(utils.AssertionSummaryAction()) sequence.start()<|fim▁end|>
sequence.append(utils.StartRecordingAction()) sequence.append(KeyComboAction("Right"))
<|file_name|>Oracle12cGetGeneratedKeysDelegate.java<|end_file_name|><|fim▁begin|>/* * Hibernate, Relational Persistence for Idiomatic Java * * License: GNU Lesser General Public License (LGPL), version 2.1 or later. * See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>. */ package org.hibernate.dialect.identity; import java.sql.PreparedStatement; import java.sql.SQLException; import org.hibernate.HibernateException; import org.hibernate.dialect.Dialect; import org.hibernate.dialect.identity.GetGeneratedKeysDelegate; import org.hibernate.engine.spi.SessionImplementor; import org.hibernate.id.PostInsertIdentityPersister; /** * @author Andrea Boriero */ public class Oracle12cGetGeneratedKeysDelegate extends GetGeneratedKeysDelegate { private String[] keyColumns; public Oracle12cGetGeneratedKeysDelegate(PostInsertIdentityPersister persister, Dialect dialect) { super( persister, dialect ); this.keyColumns = getPersister().getRootTableKeyColumnNames(); if ( keyColumns.length > 1 ) { throw new HibernateException( "Identity generator cannot be used with multi-column keys" ); } } @Override protected PreparedStatement prepare(String insertSQL, SessionImplementor session) throws SQLException { return session<|fim▁hole|> .getStatementPreparer() .prepareStatement( insertSQL, keyColumns ); } }<|fim▁end|>
.getJdbcCoordinator()
<|file_name|>host_path_windows.go<|end_file_name|><|fim▁begin|>/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package host_path import ( "fmt" "os" "syscall" "k8s.io/api/core/v1" ) func (dftc *defaultFileTypeChecker) getFileType(info os.FileInfo) (v1.HostPathType, error) { mode := info.Sys().(*syscall.Win32FileAttributeData).FileAttributes<|fim▁hole|> return v1.HostPathSocket, nil case syscall.S_IFBLK: return v1.HostPathBlockDev, nil case syscall.S_IFCHR: return v1.HostPathCharDev, nil } return "", fmt.Errorf("only recognise socket, block device and character device") }<|fim▁end|>
switch mode & syscall.S_IFMT { case syscall.S_IFSOCK:
<|file_name|>server.js<|end_file_name|><|fim▁begin|>process.env.NODE_ENV = process.env.NODE_ENV || 'development' let express = require('express'); let path = require('path'); let favicon = require('serve-favicon'); let logger = require('morgan'); let cookieParser = require('cookie-parser'); let bodyParser = require('body-parser'); let compression = require('compression'); let session = require('express-session'); let index = require('./routes/index'); let users = require('./routes/users'); let server = express(); server.set('views', path.join(__dirname, 'views')); server.set('view engine', 'hbs'); // uncomment after placing your favicon in /public //server.use(favicon(path.join(__dirname, 'public', 'favicon.ico'))); server.use(logger('dev')); server.use(bodyParser.json()); server.use(bodyParser.urlencoded({ extended: false })); server.use(cookieParser()); server.use(express.static(path.join(__dirname, 'public'))); server.use(compression())<|fim▁hole|> // express-sessions setup server.use(session({ secret: 'blueberry pie', resave: false, saveUninitialized: true, cookie: { maxAge: 600000 } // db: knex })) server.use('/', index); server.use('/users', users); // catch 404 and forward to error handler server.use((req, res, next) => { let err = new Error('Not Found'); err.status = 404; next(err); }); // error handlers // development error handler // will print stacktrace if (server.get('env') === 'development') { server.use((err, req, res, next) => { res.status(err.status || 500); res.render('error', { message: err.message, error: err }); }); } // production error handler // no stacktraces leaked to user server.use((err, req, res, next) => { res.status(err.status || 500); res.render('error', { message: err.message, error: {} }); }); module.exports = server;<|fim▁end|>
<|file_name|>deep.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. fn f(x: isize) -> isize { if x == 1 { return 1; } else { let y: isize = 1 + f(x - 1); return y; }<|fim▁hole|>} pub fn main() { assert_eq!(f(5000), 5000); }<|fim▁end|>
<|file_name|>main.go<|end_file_name|><|fim▁begin|>// A golang implementation of the acme factory examples from python and Java APIs // // The python implementation is in mmv.py in PCP core // (https://github.com/performancecopilot/pcp/blob/master/src/python/pcp/mmv.py#L21-L70) // // The Java implementation is in examples in parfait core // (https://github.com/performancecopilot/parfait/tree/master/examples/acme) // // To run the python version of the example that exits do // go run examples/acme/main.go<|fim▁hole|>// go run examples/acme/main.go --forever package main import ( "flag" "fmt" "log" "math/rand" "time" "github.com/performancecopilot/speed/v4" ) var runforever bool func init() { flag.BoolVar(&runforever, "forever", false, "if enabled, runs the forever running version of this example") } func main() { flag.Parse() if runforever { forever() } else { serial() } } func serial() { instances := []string{"Anvils", "Rockets", "Giant_Rubber_Bands"} indom, err := speed.NewPCPInstanceDomain( "Acme Products", instances, "Acme products", "Most popular products produced by the Acme Corporation", ) if err != nil { log.Fatal("Could not create indom, error: ", err) } countmetric, err := speed.NewPCPInstanceMetric( speed.Instances{ "Anvils": 0, "Rockets": 0, "Giant_Rubber_Bands": 0, }, "products.count", indom, speed.Uint64Type, speed.CounterSemantics, speed.OneUnit, "Acme factory product throughput", `Monotonic increasing counter of products produced in the Acme Corporation factory since starting the Acme production application. Quality guaranteed.`, ) if err != nil { log.Fatal("Could not create countmetric, error: ", err) } timemetric, err := speed.NewPCPInstanceMetric( speed.Instances{ "Anvils": 0, "Rockets": 0, "Giant_Rubber_Bands": 0, }, "products.time", indom, speed.Uint64Type, speed.CounterSemantics, speed.MicrosecondUnit, "Machine time spent producing Acme products", ) if err != nil { log.Fatal("Could not create timemetric, error: ", err) } client, err := speed.NewPCPClient("acme") if err != nil { log.Fatal("Could not create client, error: ", err) } client.MustRegisterIndom(indom) client.MustRegister(countmetric) client.MustRegister(timemetric) client.MustStart() defer client.MustStop() time.Sleep(time.Second * 5) err = countmetric.SetInstance(42, "Anvils") if err != nil { log.Fatal("Could not set countmetric[\"Anvils\"], error: ", err) } time.Sleep(time.Second * 5) } // ProductBuilder is based on ProductBuilder in the parfait example // https://github.com/performancecopilot/parfait/blob/master/examples/acme/src/main/java/ProductBuilder.java type ProductBuilder struct { completed speed.Counter totalTime speed.Gauge bound int name string } // NewProductBuilder creates a new instance of ProductBuilder func NewProductBuilder(name string, client speed.Client) *ProductBuilder { completed, err := speed.NewPCPCounter(0, "products."+name+".count") if err != nil { log.Fatal("Could not create completed, error: ", err) } totalTime, err := speed.NewPCPGauge(0, "products."+name+".time") if err != nil { log.Fatal("Could not create totalTime, error: ", err) } client.MustRegister(completed) client.MustRegister(totalTime) return &ProductBuilder{ name: name, bound: 500, completed: completed, totalTime: totalTime, } } // Difficulty sets the upper bound on the sleep time func (p *ProductBuilder) Difficulty(bound int) { p.bound = bound } // Build sleeps for a random time, then adds that value to totalTime func (p *ProductBuilder) Build() { elapsed := rand.Intn(p.bound) time.Sleep(time.Duration(elapsed) * time.Millisecond) p.totalTime.MustInc(float64(elapsed)) p.completed.Up() } // Start starts an infinite loop calling Build and logging the value of completed func (p *ProductBuilder) Start() { for { p.Build() fmt.Printf("Built %d %s\n", p.completed.Val(), p.name) } } func forever() { client, err := speed.NewPCPClient("acme") if err != nil { log.Fatal("Could not create client, error: ", err) } rockets := NewProductBuilder("Rockets", client) anvils := NewProductBuilder("Anvils", client) gbrs := NewProductBuilder("Giant_Rubber_Bands", client) rockets.Difficulty(4500) anvils.Difficulty(1500) gbrs.Difficulty(2500) go func() { rockets.Start() }() go func() { anvils.Start() }() go func() { gbrs.Start() }() client.MustStart() defer client.MustStop() // block forever // TODO: maybe use signal.Notify and shut down gracefully select {} }<|fim▁end|>
// // To run the java version of the example that runs forever, simply add a --forever // flag
<|file_name|>RawSQLChange.java<|end_file_name|><|fim▁begin|>package liquibase.change.core; import liquibase.change.AbstractSQLChange; import liquibase.change.DatabaseChange; import liquibase.change.ChangeMetaData; /** * Allows execution of arbitrary SQL. This change can be used when existing changes are either don't exist, * are not flexible enough, or buggy. */ @DatabaseChange(name="sql", description = "The 'sql' tag allows you to specify whatever sql you want. It is useful for complex changes that aren't supported through Liquibase's automated refactoring tags and to work around bugs and limitations of Liquibase. The SQL contained in the sql tag can be multi-line.\n" + "\n" + "The createProcedure refactoring is the best way to create stored procedures.\n" + "\n" + "The 'sql' tag can also support multiline statements in the same file. Statements can either be split using a ; at the end of the last line of the SQL or a go on its own on the line between the statements can be used.Multiline SQL statements are also supported and only a ; or go statement will finish a statement, a new line is not enough. Files containing a single statement do not need to use a ; or go.\n" + "\n" + "The sql change can also contain comments of either of the following formats:\n" + "\n" + "A multiline comment that starts with /* and ends with */.\n" + "A single line comment starting with <space>--<space> and finishing at the end of the line\n" + "Note: By default it will attempt to split statements on a ';' or 'go' at the end of lines. Because of this, if you have a comment or some other non-statement ending ';' or 'go', don't have it at the end of a line or you will get invalid SQL.", priority = ChangeMetaData.PRIORITY_DEFAULT) public class RawSQLChange extends AbstractSQLChange { private String comment; public RawSQLChange() { } <|fim▁hole|> } public String getComment() { return comment; } public void setComment(String comment) { this.comment = comment; } public String getConfirmationMessage() { return "Custom SQL executed"; } }<|fim▁end|>
public RawSQLChange(String sql) { setSql(sql);
<|file_name|>parser.py<|end_file_name|><|fim▁begin|># coding=utf8 """ Parser for todo format string.<|fim▁hole|> from todo.parser import parser parser.parse(string) # return an Todo instance """ from models import Task from models import Todo from ply import lex from ply import yacc class TodoLexer(object): """ Lexer for Todo format string. Tokens ID e.g. '1.' DONE e.g. '(x)' TASK e.g. 'This is a task' """ tokens = ( "ID", "DONE", "TASK", ) t_ignore = "\x20\x09" # ignore spaces and tabs def t_ID(self, t): r'\d+\.([uU]|[lL]|[uU][lL]|[lL][uU])?' t.value = int(t.value[:-1]) return t def t_DONE(self, t): r'(\(x\))' return t def t_TASK(self, t): r'((?!\(x\))).+' return t def t_newline(self, t): r'\n+' t.lexer.lineno += len(t.value) def t_error(self, t): raise SyntaxError( "Illegal character: '%s' at Line %d" % (t.value[0], t.lineno) ) def __init__(self): self.lexer = lex.lex(module=self) class TodoParser(object): """ Parser for Todo format string, works with a todo lexer. Parse string to Python list todo_str = "1. (x) Write email to tom" TodoParser().parse(todo_str) """ tokens = TodoLexer.tokens def p_error(self, p): if p: raise SyntaxError( "Character '%s' at line %d" % (p.value[0], p.lineno) ) else: raise SyntaxError("SyntaxError at EOF") def p_start(self, p): "start : translation_unit" p[0] = self.todo def p_translation_unit(self, p): """ translation_unit : translate_task | translation_unit translate_task | """ pass def p_translation_task(self, p): """ translate_task : ID DONE TASK | ID TASK """ if len(p) == 4: done = True content = p[3] elif len(p) == 3: done = False content = p[2] task = Task(p[1], content, done) self.todo.append(task) def __init__(self): self.parser = yacc.yacc(module=self, debug=0, write_tables=0) def parse(self, data): # reset list self.todo = Todo() return self.parser.parse(data) lexer = TodoLexer() # build lexer parser = TodoParser() # build parser<|fim▁end|>
<|file_name|>nevers.rs<|end_file_name|><|fim▁begin|>use itertools::Itertools; use malachite_base::nevers::nevers; #[test] fn test_nevers() {<|fim▁hole|><|fim▁end|>
assert_eq!(nevers().collect_vec(), &[]); }
<|file_name|>GetDetectorModelAnalysisResultsRequest.java<|end_file_name|><|fim▁begin|>/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.iotevents.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/iotevents-2018-07-27/GetDetectorModelAnalysisResults" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class GetDetectorModelAnalysisResultsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The ID of the analysis result that you want to retrieve. * </p> */ private String analysisId; /** * <p> * The token that you can use to return the next set of results. * </p> */ private String nextToken; /** * <p> * The maximum number of results to be returned per request. * </p> */ private Integer maxResults; /** * <p> * The ID of the analysis result that you want to retrieve. * </p> * * @param analysisId * The ID of the analysis result that you want to retrieve. */ public void setAnalysisId(String analysisId) { this.analysisId = analysisId; } /** * <p> * The ID of the analysis result that you want to retrieve. * </p> * * @return The ID of the analysis result that you want to retrieve. */ public String getAnalysisId() { return this.analysisId; } /** * <p> * The ID of the analysis result that you want to retrieve. * </p> * * @param analysisId * The ID of the analysis result that you want to retrieve. * @return Returns a reference to this object so that method calls can be chained together. */ public GetDetectorModelAnalysisResultsRequest withAnalysisId(String analysisId) { setAnalysisId(analysisId); return this; } /** * <p> * The token that you can use to return the next set of results. * </p> * * @param nextToken * The token that you can use to return the next set of results. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * The token that you can use to return the next set of results. * </p> * * @return The token that you can use to return the next set of results. */ public String getNextToken() { return this.nextToken; } /** * <p> * The token that you can use to return the next set of results. * </p> * * @param nextToken * The token that you can use to return the next set of results. * @return Returns a reference to this object so that method calls can be chained together. */ public GetDetectorModelAnalysisResultsRequest withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * <p> * The maximum number of results to be returned per request. * </p> * * @param maxResults * The maximum number of results to be returned per request. */ public void setMaxResults(Integer maxResults) { this.maxResults = maxResults; } /** * <p> * The maximum number of results to be returned per request. * </p> * * @return The maximum number of results to be returned per request. */ public Integer getMaxResults() { return this.maxResults; } /** * <p> * The maximum number of results to be returned per request. * </p> * * @param maxResults * The maximum number of results to be returned per request. * @return Returns a reference to this object so that method calls can be chained together. */ public GetDetectorModelAnalysisResultsRequest withMaxResults(Integer maxResults) { setMaxResults(maxResults); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getAnalysisId() != null) sb.append("AnalysisId: ").append(getAnalysisId()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()).append(","); if (getMaxResults() != null) sb.append("MaxResults: ").append(getMaxResults()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof GetDetectorModelAnalysisResultsRequest == false) return false; GetDetectorModelAnalysisResultsRequest other = (GetDetectorModelAnalysisResultsRequest) obj; if (other.getAnalysisId() == null ^ this.getAnalysisId() == null) return false; if (other.getAnalysisId() != null && other.getAnalysisId().equals(this.getAnalysisId()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; if (other.getMaxResults() == null ^ this.getMaxResults() == null) return false; if (other.getMaxResults() != null && other.getMaxResults().equals(this.getMaxResults()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getAnalysisId() == null) ? 0 : getAnalysisId().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); hashCode = prime * hashCode + ((getMaxResults() == null) ? 0 : getMaxResults().hashCode()); return hashCode;<|fim▁hole|> return (GetDetectorModelAnalysisResultsRequest) super.clone(); } }<|fim▁end|>
} @Override public GetDetectorModelAnalysisResultsRequest clone() {
<|file_name|>protobufs.rs<|end_file_name|><|fim▁begin|>// This file is generated. Do not edit // @generated // https://github.com/Manishearth/rust-clippy/issues/702 #![allow(unknown_lints)] #![allow(clippy)] #![cfg_attr(rustfmt, rustfmt_skip)] #![allow(box_pointers)] #![allow(dead_code)] #![allow(non_camel_case_types)] #![allow(non_snake_case)] #![allow(non_upper_case_globals)] #![allow(trivial_casts)] #![allow(unsafe_code)] #![allow(unused_imports)] #![allow(unused_results)] use protobuf::Message as Message_imported_for_functions; use protobuf::ProtobufEnum as ProtobufEnum_imported_for_functions; #[derive(Clone,Default)] pub struct TextMessage { // message fields id: ::protobuf::SingularField<::std::string::String>, sender: ::protobuf::SingularField<::std::string::String>, text: ::protobuf::SingularField<::std::string::String>, // special fields unknown_fields: ::protobuf::UnknownFields, cached_size: ::std::cell::Cell<u32>, } // see codegen.rs for the explanation why impl Sync explicitly unsafe impl ::std::marker::Sync for TextMessage {} impl TextMessage { pub fn new() -> TextMessage { ::std::default::Default::default() } pub fn default_instance() -> &'static TextMessage { static mut instance: ::protobuf::lazy::Lazy<TextMessage> = ::protobuf::lazy::Lazy { lock: ::protobuf::lazy::ONCE_INIT, ptr: 0 as *const TextMessage, }; unsafe { instance.get(|| { TextMessage { id: ::protobuf::SingularField::none(), sender: ::protobuf::SingularField::none(), text: ::protobuf::SingularField::none(), unknown_fields: ::protobuf::UnknownFields::new(), cached_size: ::std::cell::Cell::new(0), } }) } } // required string id = 1; pub fn clear_id(&mut self) { self.id.clear(); } pub fn has_id(&self) -> bool { self.id.is_some() } // Param is passed by value, moved pub fn set_id(&mut self, v: ::std::string::String) { self.id = ::protobuf::SingularField::some(v); } // Mutable pointer to the field. // If field is not initialized, it is initialized with default value first. pub fn mut_id(&mut self) -> &mut ::std::string::String { if self.id.is_none() { self.id.set_default(); }; self.id.as_mut().unwrap() } // Take field pub fn take_id(&mut self) -> ::std::string::String { self.id.take().unwrap_or_else(|| ::std::string::String::new()) } pub fn get_id(&self) -> &str { match self.id.as_ref() { Some(v) => &v, None => "", } } // required string sender = 2; pub fn clear_sender(&mut self) { self.sender.clear(); } pub fn has_sender(&self) -> bool { self.sender.is_some() } // Param is passed by value, moved pub fn set_sender(&mut self, v: ::std::string::String) { self.sender = ::protobuf::SingularField::some(v); } // Mutable pointer to the field. // If field is not initialized, it is initialized with default value first. pub fn mut_sender(&mut self) -> &mut ::std::string::String { if self.sender.is_none() { self.sender.set_default(); }; self.sender.as_mut().unwrap() } // Take field pub fn take_sender(&mut self) -> ::std::string::String {<|fim▁hole|> } pub fn get_sender(&self) -> &str { match self.sender.as_ref() { Some(v) => &v, None => "", } } // required string text = 3; pub fn clear_text(&mut self) { self.text.clear(); } pub fn has_text(&self) -> bool { self.text.is_some() } // Param is passed by value, moved pub fn set_text(&mut self, v: ::std::string::String) { self.text = ::protobuf::SingularField::some(v); } // Mutable pointer to the field. // If field is not initialized, it is initialized with default value first. pub fn mut_text(&mut self) -> &mut ::std::string::String { if self.text.is_none() { self.text.set_default(); }; self.text.as_mut().unwrap() } // Take field pub fn take_text(&mut self) -> ::std::string::String { self.text.take().unwrap_or_else(|| ::std::string::String::new()) } pub fn get_text(&self) -> &str { match self.text.as_ref() { Some(v) => &v, None => "", } } } impl ::protobuf::Message for TextMessage { fn is_initialized(&self) -> bool { if self.id.is_none() { return false; }; if self.sender.is_none() { return false; }; if self.text.is_none() { return false; }; true } fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream) -> ::protobuf::ProtobufResult<()> { while !try!(is.eof()) { let (field_number, wire_type) = try!(is.read_tag_unpack()); match field_number { 1 => { try!(::protobuf::rt::read_singular_string_into(wire_type, is, &mut self.id)); }, 2 => { try!(::protobuf::rt::read_singular_string_into(wire_type, is, &mut self.sender)); }, 3 => { try!(::protobuf::rt::read_singular_string_into(wire_type, is, &mut self.text)); }, _ => { try!(::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())); }, }; } ::std::result::Result::Ok(()) } // Compute sizes of nested messages #[allow(unused_variables)] fn compute_size(&self) -> u32 { let mut my_size = 0; for value in self.id.iter() { my_size += ::protobuf::rt::string_size(1, &value); }; for value in self.sender.iter() { my_size += ::protobuf::rt::string_size(2, &value); }; for value in self.text.iter() { my_size += ::protobuf::rt::string_size(3, &value); }; my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields()); self.cached_size.set(my_size); my_size } fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream) -> ::protobuf::ProtobufResult<()> { if let Some(v) = self.id.as_ref() { try!(os.write_string(1, &v)); }; if let Some(v) = self.sender.as_ref() { try!(os.write_string(2, &v)); }; if let Some(v) = self.text.as_ref() { try!(os.write_string(3, &v)); }; try!(os.write_unknown_fields(self.get_unknown_fields())); ::std::result::Result::Ok(()) } fn get_cached_size(&self) -> u32 { self.cached_size.get() } fn get_unknown_fields(&self) -> &::protobuf::UnknownFields { &self.unknown_fields } fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields { &mut self.unknown_fields } fn type_id(&self) -> ::std::any::TypeId { ::std::any::TypeId::of::<TextMessage>() } fn as_any(&self) -> &::std::any::Any { self as &::std::any::Any } fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor { ::protobuf::MessageStatic::descriptor_static(None::<Self>) } } impl ::protobuf::MessageStatic for TextMessage { fn new() -> TextMessage { TextMessage::new() } fn descriptor_static(_: ::std::option::Option<TextMessage>) -> &'static ::protobuf::reflect::MessageDescriptor { static mut descriptor: ::protobuf::lazy::Lazy<::protobuf::reflect::MessageDescriptor> = ::protobuf::lazy::Lazy { lock: ::protobuf::lazy::ONCE_INIT, ptr: 0 as *const ::protobuf::reflect::MessageDescriptor, }; unsafe { descriptor.get(|| { let mut fields = ::std::vec::Vec::new(); fields.push(::protobuf::reflect::accessor::make_singular_string_accessor( "id", TextMessage::has_id, TextMessage::get_id, )); fields.push(::protobuf::reflect::accessor::make_singular_string_accessor( "sender", TextMessage::has_sender, TextMessage::get_sender, )); fields.push(::protobuf::reflect::accessor::make_singular_string_accessor( "text", TextMessage::has_text, TextMessage::get_text, )); ::protobuf::reflect::MessageDescriptor::new::<TextMessage>( "TextMessage", fields, file_descriptor_proto() ) }) } } } impl ::protobuf::Clear for TextMessage { fn clear(&mut self) { self.clear_id(); self.clear_sender(); self.clear_text(); self.unknown_fields.clear(); } } impl ::std::cmp::PartialEq for TextMessage { fn eq(&self, other: &TextMessage) -> bool { self.id == other.id && self.sender == other.sender && self.text == other.text && self.unknown_fields == other.unknown_fields } } impl ::std::fmt::Debug for TextMessage { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { ::protobuf::text_format::fmt(self, f) } } #[derive(Clone,Default)] pub struct MessageAcknowledgement { // message fields message_id: ::protobuf::SingularField<::std::string::String>, // special fields unknown_fields: ::protobuf::UnknownFields, cached_size: ::std::cell::Cell<u32>, } // see codegen.rs for the explanation why impl Sync explicitly unsafe impl ::std::marker::Sync for MessageAcknowledgement {} impl MessageAcknowledgement { pub fn new() -> MessageAcknowledgement { ::std::default::Default::default() } pub fn default_instance() -> &'static MessageAcknowledgement { static mut instance: ::protobuf::lazy::Lazy<MessageAcknowledgement> = ::protobuf::lazy::Lazy { lock: ::protobuf::lazy::ONCE_INIT, ptr: 0 as *const MessageAcknowledgement, }; unsafe { instance.get(|| { MessageAcknowledgement { message_id: ::protobuf::SingularField::none(), unknown_fields: ::protobuf::UnknownFields::new(), cached_size: ::std::cell::Cell::new(0), } }) } } // required string message_id = 1; pub fn clear_message_id(&mut self) { self.message_id.clear(); } pub fn has_message_id(&self) -> bool { self.message_id.is_some() } // Param is passed by value, moved pub fn set_message_id(&mut self, v: ::std::string::String) { self.message_id = ::protobuf::SingularField::some(v); } // Mutable pointer to the field. // If field is not initialized, it is initialized with default value first. pub fn mut_message_id(&mut self) -> &mut ::std::string::String { if self.message_id.is_none() { self.message_id.set_default(); }; self.message_id.as_mut().unwrap() } // Take field pub fn take_message_id(&mut self) -> ::std::string::String { self.message_id.take().unwrap_or_else(|| ::std::string::String::new()) } pub fn get_message_id(&self) -> &str { match self.message_id.as_ref() { Some(v) => &v, None => "", } } } impl ::protobuf::Message for MessageAcknowledgement { fn is_initialized(&self) -> bool { if self.message_id.is_none() { return false; }; true } fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream) -> ::protobuf::ProtobufResult<()> { while !try!(is.eof()) { let (field_number, wire_type) = try!(is.read_tag_unpack()); match field_number { 1 => { try!(::protobuf::rt::read_singular_string_into(wire_type, is, &mut self.message_id)); }, _ => { try!(::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())); }, }; } ::std::result::Result::Ok(()) } // Compute sizes of nested messages #[allow(unused_variables)] fn compute_size(&self) -> u32 { let mut my_size = 0; for value in self.message_id.iter() { my_size += ::protobuf::rt::string_size(1, &value); }; my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields()); self.cached_size.set(my_size); my_size } fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream) -> ::protobuf::ProtobufResult<()> { if let Some(v) = self.message_id.as_ref() { try!(os.write_string(1, &v)); }; try!(os.write_unknown_fields(self.get_unknown_fields())); ::std::result::Result::Ok(()) } fn get_cached_size(&self) -> u32 { self.cached_size.get() } fn get_unknown_fields(&self) -> &::protobuf::UnknownFields { &self.unknown_fields } fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields { &mut self.unknown_fields } fn type_id(&self) -> ::std::any::TypeId { ::std::any::TypeId::of::<MessageAcknowledgement>() } fn as_any(&self) -> &::std::any::Any { self as &::std::any::Any } fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor { ::protobuf::MessageStatic::descriptor_static(None::<Self>) } } impl ::protobuf::MessageStatic for MessageAcknowledgement { fn new() -> MessageAcknowledgement { MessageAcknowledgement::new() } fn descriptor_static(_: ::std::option::Option<MessageAcknowledgement>) -> &'static ::protobuf::reflect::MessageDescriptor { static mut descriptor: ::protobuf::lazy::Lazy<::protobuf::reflect::MessageDescriptor> = ::protobuf::lazy::Lazy { lock: ::protobuf::lazy::ONCE_INIT, ptr: 0 as *const ::protobuf::reflect::MessageDescriptor, }; unsafe { descriptor.get(|| { let mut fields = ::std::vec::Vec::new(); fields.push(::protobuf::reflect::accessor::make_singular_string_accessor( "message_id", MessageAcknowledgement::has_message_id, MessageAcknowledgement::get_message_id, )); ::protobuf::reflect::MessageDescriptor::new::<MessageAcknowledgement>( "MessageAcknowledgement", fields, file_descriptor_proto() ) }) } } } impl ::protobuf::Clear for MessageAcknowledgement { fn clear(&mut self) { self.clear_message_id(); self.unknown_fields.clear(); } } impl ::std::cmp::PartialEq for MessageAcknowledgement { fn eq(&self, other: &MessageAcknowledgement) -> bool { self.message_id == other.message_id && self.unknown_fields == other.unknown_fields } } impl ::std::fmt::Debug for MessageAcknowledgement { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { ::protobuf::text_format::fmt(self, f) } } #[derive(Clone,Default)] pub struct Envelope { // message fields message_type: ::std::option::Option<Envelope_Type>, recipient: ::protobuf::SingularField<::std::string::String>, text_message: ::protobuf::SingularPtrField<TextMessage>, message_acknowledgement: ::protobuf::SingularPtrField<MessageAcknowledgement>, // special fields unknown_fields: ::protobuf::UnknownFields, cached_size: ::std::cell::Cell<u32>, } // see codegen.rs for the explanation why impl Sync explicitly unsafe impl ::std::marker::Sync for Envelope {} impl Envelope { pub fn new() -> Envelope { ::std::default::Default::default() } pub fn default_instance() -> &'static Envelope { static mut instance: ::protobuf::lazy::Lazy<Envelope> = ::protobuf::lazy::Lazy { lock: ::protobuf::lazy::ONCE_INIT, ptr: 0 as *const Envelope, }; unsafe { instance.get(|| { Envelope { message_type: ::std::option::Option::None, recipient: ::protobuf::SingularField::none(), text_message: ::protobuf::SingularPtrField::none(), message_acknowledgement: ::protobuf::SingularPtrField::none(), unknown_fields: ::protobuf::UnknownFields::new(), cached_size: ::std::cell::Cell::new(0), } }) } } // required .Envelope.Type message_type = 1; pub fn clear_message_type(&mut self) { self.message_type = ::std::option::Option::None; } pub fn has_message_type(&self) -> bool { self.message_type.is_some() } // Param is passed by value, moved pub fn set_message_type(&mut self, v: Envelope_Type) { self.message_type = ::std::option::Option::Some(v); } pub fn get_message_type(&self) -> Envelope_Type { self.message_type.unwrap_or(Envelope_Type::TEXT_MESSAGE) } // required string recipient = 2; pub fn clear_recipient(&mut self) { self.recipient.clear(); } pub fn has_recipient(&self) -> bool { self.recipient.is_some() } // Param is passed by value, moved pub fn set_recipient(&mut self, v: ::std::string::String) { self.recipient = ::protobuf::SingularField::some(v); } // Mutable pointer to the field. // If field is not initialized, it is initialized with default value first. pub fn mut_recipient(&mut self) -> &mut ::std::string::String { if self.recipient.is_none() { self.recipient.set_default(); }; self.recipient.as_mut().unwrap() } // Take field pub fn take_recipient(&mut self) -> ::std::string::String { self.recipient.take().unwrap_or_else(|| ::std::string::String::new()) } pub fn get_recipient(&self) -> &str { match self.recipient.as_ref() { Some(v) => &v, None => "", } } // optional .TextMessage text_message = 3; pub fn clear_text_message(&mut self) { self.text_message.clear(); } pub fn has_text_message(&self) -> bool { self.text_message.is_some() } // Param is passed by value, moved pub fn set_text_message(&mut self, v: TextMessage) { self.text_message = ::protobuf::SingularPtrField::some(v); } // Mutable pointer to the field. // If field is not initialized, it is initialized with default value first. pub fn mut_text_message(&mut self) -> &mut TextMessage { if self.text_message.is_none() { self.text_message.set_default(); }; self.text_message.as_mut().unwrap() } // Take field pub fn take_text_message(&mut self) -> TextMessage { self.text_message.take().unwrap_or_else(|| TextMessage::new()) } pub fn get_text_message(&self) -> &TextMessage { self.text_message.as_ref().unwrap_or_else(|| TextMessage::default_instance()) } // optional .MessageAcknowledgement message_acknowledgement = 4; pub fn clear_message_acknowledgement(&mut self) { self.message_acknowledgement.clear(); } pub fn has_message_acknowledgement(&self) -> bool { self.message_acknowledgement.is_some() } // Param is passed by value, moved pub fn set_message_acknowledgement(&mut self, v: MessageAcknowledgement) { self.message_acknowledgement = ::protobuf::SingularPtrField::some(v); } // Mutable pointer to the field. // If field is not initialized, it is initialized with default value first. pub fn mut_message_acknowledgement(&mut self) -> &mut MessageAcknowledgement { if self.message_acknowledgement.is_none() { self.message_acknowledgement.set_default(); }; self.message_acknowledgement.as_mut().unwrap() } // Take field pub fn take_message_acknowledgement(&mut self) -> MessageAcknowledgement { self.message_acknowledgement.take().unwrap_or_else(|| MessageAcknowledgement::new()) } pub fn get_message_acknowledgement(&self) -> &MessageAcknowledgement { self.message_acknowledgement.as_ref().unwrap_or_else(|| MessageAcknowledgement::default_instance()) } } impl ::protobuf::Message for Envelope { fn is_initialized(&self) -> bool { if self.message_type.is_none() { return false; }; if self.recipient.is_none() { return false; }; true } fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream) -> ::protobuf::ProtobufResult<()> { while !try!(is.eof()) { let (field_number, wire_type) = try!(is.read_tag_unpack()); match field_number { 1 => { if wire_type != ::protobuf::wire_format::WireTypeVarint { return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type)); }; let tmp = try!(is.read_enum()); self.message_type = ::std::option::Option::Some(tmp); }, 2 => { try!(::protobuf::rt::read_singular_string_into(wire_type, is, &mut self.recipient)); }, 3 => { try!(::protobuf::rt::read_singular_message_into(wire_type, is, &mut self.text_message)); }, 4 => { try!(::protobuf::rt::read_singular_message_into(wire_type, is, &mut self.message_acknowledgement)); }, _ => { try!(::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())); }, }; } ::std::result::Result::Ok(()) } // Compute sizes of nested messages #[allow(unused_variables)] fn compute_size(&self) -> u32 { let mut my_size = 0; for value in self.message_type.iter() { my_size += ::protobuf::rt::enum_size(1, *value); }; for value in self.recipient.iter() { my_size += ::protobuf::rt::string_size(2, &value); }; for value in self.text_message.iter() { let len = value.compute_size(); my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len; }; for value in self.message_acknowledgement.iter() { let len = value.compute_size(); my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len; }; my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields()); self.cached_size.set(my_size); my_size } fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream) -> ::protobuf::ProtobufResult<()> { if let Some(v) = self.message_type { try!(os.write_enum(1, v.value())); }; if let Some(v) = self.recipient.as_ref() { try!(os.write_string(2, &v)); }; if let Some(v) = self.text_message.as_ref() { try!(os.write_tag(3, ::protobuf::wire_format::WireTypeLengthDelimited)); try!(os.write_raw_varint32(v.get_cached_size())); try!(v.write_to_with_cached_sizes(os)); }; if let Some(v) = self.message_acknowledgement.as_ref() { try!(os.write_tag(4, ::protobuf::wire_format::WireTypeLengthDelimited)); try!(os.write_raw_varint32(v.get_cached_size())); try!(v.write_to_with_cached_sizes(os)); }; try!(os.write_unknown_fields(self.get_unknown_fields())); ::std::result::Result::Ok(()) } fn get_cached_size(&self) -> u32 { self.cached_size.get() } fn get_unknown_fields(&self) -> &::protobuf::UnknownFields { &self.unknown_fields } fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields { &mut self.unknown_fields } fn type_id(&self) -> ::std::any::TypeId { ::std::any::TypeId::of::<Envelope>() } fn as_any(&self) -> &::std::any::Any { self as &::std::any::Any } fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor { ::protobuf::MessageStatic::descriptor_static(None::<Self>) } } impl ::protobuf::MessageStatic for Envelope { fn new() -> Envelope { Envelope::new() } fn descriptor_static(_: ::std::option::Option<Envelope>) -> &'static ::protobuf::reflect::MessageDescriptor { static mut descriptor: ::protobuf::lazy::Lazy<::protobuf::reflect::MessageDescriptor> = ::protobuf::lazy::Lazy { lock: ::protobuf::lazy::ONCE_INIT, ptr: 0 as *const ::protobuf::reflect::MessageDescriptor, }; unsafe { descriptor.get(|| { let mut fields = ::std::vec::Vec::new(); fields.push(::protobuf::reflect::accessor::make_singular_enum_accessor( "message_type", Envelope::has_message_type, Envelope::get_message_type, )); fields.push(::protobuf::reflect::accessor::make_singular_string_accessor( "recipient", Envelope::has_recipient, Envelope::get_recipient, )); fields.push(::protobuf::reflect::accessor::make_singular_message_accessor( "text_message", Envelope::has_text_message, Envelope::get_text_message, )); fields.push(::protobuf::reflect::accessor::make_singular_message_accessor( "message_acknowledgement", Envelope::has_message_acknowledgement, Envelope::get_message_acknowledgement, )); ::protobuf::reflect::MessageDescriptor::new::<Envelope>( "Envelope", fields, file_descriptor_proto() ) }) } } } impl ::protobuf::Clear for Envelope { fn clear(&mut self) { self.clear_message_type(); self.clear_recipient(); self.clear_text_message(); self.clear_message_acknowledgement(); self.unknown_fields.clear(); } } impl ::std::cmp::PartialEq for Envelope { fn eq(&self, other: &Envelope) -> bool { self.message_type == other.message_type && self.recipient == other.recipient && self.text_message == other.text_message && self.message_acknowledgement == other.message_acknowledgement && self.unknown_fields == other.unknown_fields } } impl ::std::fmt::Debug for Envelope { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { ::protobuf::text_format::fmt(self, f) } } #[derive(Clone,PartialEq,Eq,Debug,Hash)] pub enum Envelope_Type { TEXT_MESSAGE = 1, MESSAGE_ACKNOWLEDGEMENT = 2, } impl ::protobuf::ProtobufEnum for Envelope_Type { fn value(&self) -> i32 { *self as i32 } fn from_i32(value: i32) -> ::std::option::Option<Envelope_Type> { match value { 1 => ::std::option::Option::Some(Envelope_Type::TEXT_MESSAGE), 2 => ::std::option::Option::Some(Envelope_Type::MESSAGE_ACKNOWLEDGEMENT), _ => ::std::option::Option::None } } fn values() -> &'static [Self] { static values: &'static [Envelope_Type] = &[ Envelope_Type::TEXT_MESSAGE, Envelope_Type::MESSAGE_ACKNOWLEDGEMENT, ]; values } fn enum_descriptor_static(_: Option<Envelope_Type>) -> &'static ::protobuf::reflect::EnumDescriptor { static mut descriptor: ::protobuf::lazy::Lazy<::protobuf::reflect::EnumDescriptor> = ::protobuf::lazy::Lazy { lock: ::protobuf::lazy::ONCE_INIT, ptr: 0 as *const ::protobuf::reflect::EnumDescriptor, }; unsafe { descriptor.get(|| { ::protobuf::reflect::EnumDescriptor::new("Envelope_Type", file_descriptor_proto()) }) } } } impl ::std::marker::Copy for Envelope_Type { } static file_descriptor_proto_data: &'static [u8] = &[ 0x0a, 0x23, 0x73, 0x72, 0x63, 0x2f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x49, 0x0a, 0x0b, 0x54, 0x65, 0x78, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x02, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x18, 0x02, 0x20, 0x02, 0x28, 0x09, 0x52, 0x06, 0x73, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x65, 0x78, 0x74, 0x18, 0x03, 0x20, 0x02, 0x28, 0x09, 0x52, 0x04, 0x74, 0x65, 0x78, 0x74, 0x22, 0x37, 0x0a, 0x16, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x41, 0x63, 0x6b, 0x6e, 0x6f, 0x77, 0x6c, 0x65, 0x64, 0x67, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x02, 0x28, 0x09, 0x52, 0x09, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x49, 0x64, 0x22, 0x95, 0x02, 0x0a, 0x08, 0x45, 0x6e, 0x76, 0x65, 0x6c, 0x6f, 0x70, 0x65, 0x12, 0x31, 0x0a, 0x0c, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x02, 0x28, 0x0e, 0x32, 0x0e, 0x2e, 0x45, 0x6e, 0x76, 0x65, 0x6c, 0x6f, 0x70, 0x65, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x0b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x72, 0x65, 0x63, 0x69, 0x70, 0x69, 0x65, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x02, 0x28, 0x09, 0x52, 0x09, 0x72, 0x65, 0x63, 0x69, 0x70, 0x69, 0x65, 0x6e, 0x74, 0x12, 0x2f, 0x0a, 0x0c, 0x74, 0x65, 0x78, 0x74, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, 0x2e, 0x54, 0x65, 0x78, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x52, 0x0b, 0x74, 0x65, 0x78, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x50, 0x0a, 0x17, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x5f, 0x61, 0x63, 0x6b, 0x6e, 0x6f, 0x77, 0x6c, 0x65, 0x64, 0x67, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x41, 0x63, 0x6b, 0x6e, 0x6f, 0x77, 0x6c, 0x65, 0x64, 0x67, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x16, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x41, 0x63, 0x6b, 0x6e, 0x6f, 0x77, 0x6c, 0x65, 0x64, 0x67, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x22, 0x35, 0x0a, 0x04, 0x54, 0x79, 0x70, 0x65, 0x12, 0x10, 0x0a, 0x0c, 0x54, 0x45, 0x58, 0x54, 0x5f, 0x4d, 0x45, 0x53, 0x53, 0x41, 0x47, 0x45, 0x10, 0x01, 0x12, 0x1b, 0x0a, 0x17, 0x4d, 0x45, 0x53, 0x53, 0x41, 0x47, 0x45, 0x5f, 0x41, 0x43, 0x4b, 0x4e, 0x4f, 0x57, 0x4c, 0x45, 0x44, 0x47, 0x45, 0x4d, 0x45, 0x4e, 0x54, 0x10, 0x02, 0x4a, 0xf2, 0x05, 0x0a, 0x06, 0x12, 0x04, 0x00, 0x00, 0x14, 0x01, 0x0a, 0x0a, 0x0a, 0x02, 0x04, 0x00, 0x12, 0x04, 0x00, 0x00, 0x04, 0x01, 0x0a, 0x0a, 0x0a, 0x03, 0x04, 0x00, 0x01, 0x12, 0x03, 0x00, 0x08, 0x13, 0x0a, 0x0b, 0x0a, 0x04, 0x04, 0x00, 0x02, 0x00, 0x12, 0x03, 0x01, 0x04, 0x1b, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x00, 0x04, 0x12, 0x03, 0x01, 0x04, 0x0c, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x00, 0x05, 0x12, 0x03, 0x01, 0x0d, 0x13, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x00, 0x01, 0x12, 0x03, 0x01, 0x14, 0x16, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x00, 0x03, 0x12, 0x03, 0x01, 0x19, 0x1a, 0x0a, 0x0b, 0x0a, 0x04, 0x04, 0x00, 0x02, 0x01, 0x12, 0x03, 0x02, 0x04, 0x1f, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x01, 0x04, 0x12, 0x03, 0x02, 0x04, 0x0c, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x01, 0x05, 0x12, 0x03, 0x02, 0x0d, 0x13, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x01, 0x01, 0x12, 0x03, 0x02, 0x14, 0x1a, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x01, 0x03, 0x12, 0x03, 0x02, 0x1d, 0x1e, 0x0a, 0x0b, 0x0a, 0x04, 0x04, 0x00, 0x02, 0x02, 0x12, 0x03, 0x03, 0x04, 0x1d, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x02, 0x04, 0x12, 0x03, 0x03, 0x04, 0x0c, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x02, 0x05, 0x12, 0x03, 0x03, 0x0d, 0x13, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x02, 0x01, 0x12, 0x03, 0x03, 0x14, 0x18, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x02, 0x03, 0x12, 0x03, 0x03, 0x1b, 0x1c, 0x0a, 0x0a, 0x0a, 0x02, 0x04, 0x01, 0x12, 0x04, 0x06, 0x00, 0x08, 0x01, 0x0a, 0x0a, 0x0a, 0x03, 0x04, 0x01, 0x01, 0x12, 0x03, 0x06, 0x08, 0x1e, 0x0a, 0x0b, 0x0a, 0x04, 0x04, 0x01, 0x02, 0x00, 0x12, 0x03, 0x07, 0x04, 0x23, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x01, 0x02, 0x00, 0x04, 0x12, 0x03, 0x07, 0x04, 0x0c, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x01, 0x02, 0x00, 0x05, 0x12, 0x03, 0x07, 0x0d, 0x13, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x01, 0x02, 0x00, 0x01, 0x12, 0x03, 0x07, 0x14, 0x1e, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x01, 0x02, 0x00, 0x03, 0x12, 0x03, 0x07, 0x21, 0x22, 0x0a, 0x0a, 0x0a, 0x02, 0x04, 0x02, 0x12, 0x04, 0x0a, 0x00, 0x14, 0x01, 0x0a, 0x0a, 0x0a, 0x03, 0x04, 0x02, 0x01, 0x12, 0x03, 0x0a, 0x08, 0x10, 0x0a, 0x0c, 0x0a, 0x04, 0x04, 0x02, 0x04, 0x00, 0x12, 0x04, 0x0b, 0x04, 0x0e, 0x05, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x02, 0x04, 0x00, 0x01, 0x12, 0x03, 0x0b, 0x09, 0x0d, 0x0a, 0x0d, 0x0a, 0x06, 0x04, 0x02, 0x04, 0x00, 0x02, 0x00, 0x12, 0x03, 0x0c, 0x08, 0x19, 0x0a, 0x0e, 0x0a, 0x07, 0x04, 0x02, 0x04, 0x00, 0x02, 0x00, 0x01, 0x12, 0x03, 0x0c, 0x08, 0x14, 0x0a, 0x0e, 0x0a, 0x07, 0x04, 0x02, 0x04, 0x00, 0x02, 0x00, 0x02, 0x12, 0x03, 0x0c, 0x17, 0x18, 0x0a, 0x0d, 0x0a, 0x06, 0x04, 0x02, 0x04, 0x00, 0x02, 0x01, 0x12, 0x03, 0x0d, 0x08, 0x24, 0x0a, 0x0e, 0x0a, 0x07, 0x04, 0x02, 0x04, 0x00, 0x02, 0x01, 0x01, 0x12, 0x03, 0x0d, 0x08, 0x1f, 0x0a, 0x0e, 0x0a, 0x07, 0x04, 0x02, 0x04, 0x00, 0x02, 0x01, 0x02, 0x12, 0x03, 0x0d, 0x22, 0x23, 0x0a, 0x0b, 0x0a, 0x04, 0x04, 0x02, 0x02, 0x00, 0x12, 0x03, 0x10, 0x04, 0x23, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x02, 0x02, 0x00, 0x04, 0x12, 0x03, 0x10, 0x04, 0x0c, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x02, 0x02, 0x00, 0x06, 0x12, 0x03, 0x10, 0x0d, 0x11, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x02, 0x02, 0x00, 0x01, 0x12, 0x03, 0x10, 0x12, 0x1e, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x02, 0x02, 0x00, 0x03, 0x12, 0x03, 0x10, 0x21, 0x22, 0x0a, 0x0b, 0x0a, 0x04, 0x04, 0x02, 0x02, 0x01, 0x12, 0x03, 0x11, 0x04, 0x22, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x02, 0x02, 0x01, 0x04, 0x12, 0x03, 0x11, 0x04, 0x0c, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x02, 0x02, 0x01, 0x05, 0x12, 0x03, 0x11, 0x0d, 0x13, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x02, 0x02, 0x01, 0x01, 0x12, 0x03, 0x11, 0x14, 0x1d, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x02, 0x02, 0x01, 0x03, 0x12, 0x03, 0x11, 0x20, 0x21, 0x0a, 0x0b, 0x0a, 0x04, 0x04, 0x02, 0x02, 0x02, 0x12, 0x03, 0x12, 0x04, 0x2a, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x02, 0x02, 0x02, 0x04, 0x12, 0x03, 0x12, 0x04, 0x0c, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x02, 0x02, 0x02, 0x06, 0x12, 0x03, 0x12, 0x0d, 0x18, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x02, 0x02, 0x02, 0x01, 0x12, 0x03, 0x12, 0x19, 0x25, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x02, 0x02, 0x02, 0x03, 0x12, 0x03, 0x12, 0x28, 0x29, 0x0a, 0x0b, 0x0a, 0x04, 0x04, 0x02, 0x02, 0x03, 0x12, 0x03, 0x13, 0x04, 0x40, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x02, 0x02, 0x03, 0x04, 0x12, 0x03, 0x13, 0x04, 0x0c, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x02, 0x02, 0x03, 0x06, 0x12, 0x03, 0x13, 0x0d, 0x23, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x02, 0x02, 0x03, 0x01, 0x12, 0x03, 0x13, 0x24, 0x3b, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x02, 0x02, 0x03, 0x03, 0x12, 0x03, 0x13, 0x3e, 0x3f, ]; static mut file_descriptor_proto_lazy: ::protobuf::lazy::Lazy<::protobuf::descriptor::FileDescriptorProto> = ::protobuf::lazy::Lazy { lock: ::protobuf::lazy::ONCE_INIT, ptr: 0 as *const ::protobuf::descriptor::FileDescriptorProto, }; fn parse_descriptor_proto() -> ::protobuf::descriptor::FileDescriptorProto { ::protobuf::parse_from_bytes(file_descriptor_proto_data).unwrap() } pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto { unsafe { file_descriptor_proto_lazy.get(|| { parse_descriptor_proto() }) } }<|fim▁end|>
self.sender.take().unwrap_or_else(|| ::std::string::String::new())
<|file_name|>Conga.ts<|end_file_name|><|fim▁begin|>///<reference path="../definitions/waa.d.ts" /> ///<reference path="Instrument.ts" /> module audiobus.instruments { export class Conga extends Instrument { private osc2:OscillatorNode; // create constructor( audioContext:AudioContext, outputTo:GainNode ) { super( audioContext, outputTo ); // Synthesize!<|fim▁hole|> this.osc2.connect( this.gain ); } public start( f:number=1200, offsetA:number=0.160 ):void { var t:number = this.context.currentTime; this.osc2.frequency.setValueAtTime(f, t); this.osc2.frequency.linearRampToValueAtTime(800, t + 0.005); this.gain.gain.cancelScheduledValues( t ); this.gain.gain.setValueAtTime(0.5, t); this.gain.gain.exponentialRampToValueAtTime(0.5, t + 0.010); this.gain.gain.linearRampToValueAtTime(0.0, t + offsetA); if ( !this.hasInitialised ) this.osc2.start(0); super.start(); } } }<|fim▁end|>
this.osc2 = audioContext.createOscillator(); this.osc2.type = OscillatorType.sine;
<|file_name|>progressevent.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::bindings::codegen::Bindings::EventBinding::EventMethods; use dom::bindings::codegen::Bindings::ProgressEventBinding; use dom::bindings::codegen::Bindings::ProgressEventBinding::ProgressEventMethods; use dom::bindings::codegen::InheritTypes::{EventCast, ProgressEventDerived}; use dom::bindings::error::Fallible; use dom::bindings::global::GlobalRef; use dom::bindings::js::{JSRef, Rootable, Temporary}; use dom::bindings::utils::reflect_dom_object; use dom::event::{Event, EventTypeId}; use util::str::DOMString; #[dom_struct] pub struct ProgressEvent { event: Event, length_computable: bool, loaded: u64, total: u64 } impl ProgressEventDerived for Event { fn is_progressevent(&self) -> bool { *self.type_id() == EventTypeId::ProgressEvent } } impl ProgressEvent { fn new_inherited(length_computable: bool, loaded: u64, total: u64) -> ProgressEvent { ProgressEvent { event: Event::new_inherited(EventTypeId::ProgressEvent), length_computable: length_computable, loaded: loaded, total: total } } pub fn new(global: GlobalRef, type_: DOMString, can_bubble: bool, cancelable: bool, length_computable: bool, loaded: u64, total: u64) -> Temporary<ProgressEvent> { let ev = reflect_dom_object(box ProgressEvent::new_inherited(length_computable, loaded, total), global, ProgressEventBinding::Wrap).root(); let event: JSRef<Event> = EventCast::from_ref(ev.r()); event.InitEvent(type_, can_bubble, cancelable); Temporary::from_rooted(ev.r()) } pub fn Constructor(global: GlobalRef, type_: DOMString, init: &ProgressEventBinding::ProgressEventInit) -> Fallible<Temporary<ProgressEvent>> { let ev = ProgressEvent::new(global, type_, init.parent.bubbles, init.parent.cancelable, init.lengthComputable, init.loaded, init.total); Ok(ev) } } impl<'a> ProgressEventMethods for JSRef<'a, ProgressEvent> { fn LengthComputable(self) -> bool { self.length_computable } fn Loaded(self) -> u64{ self.loaded<|fim▁hole|> fn Total(self) -> u64 { self.total } }<|fim▁end|>
}
<|file_name|>base.js<|end_file_name|><|fim▁begin|>/** * Base js functions */ $(document).ready(function(){ //Init jQuery Masonry layout init_masonry(); //Select menu onchange $("#collapsed-navbar").change(function () { window.location = $(this).val(); }); }); function init_masonry(){ var $container = $('#content'); <|fim▁hole|> $container.masonry({ itemSelector: '.box', isAnimated: true }); }); } */ $(document).ready(function(){ //Start carousel $('.carousel').carousel({interval:false}); });<|fim▁end|>
$container.imagesLoaded( function(){
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models from django.contrib.auth.models import User from police.models import Stationdata class general_diary(models.Model): ref_id = models.CharField(max_length=40,unique=True,default="00000") firstname = models.CharField(max_length=20) lastname = models.CharField(max_length=20) mobile = models.CharField(max_length=10) email = models.CharField(max_length=80) address = models.TextField() DOB = models.DateField('date of birth') idType_1 = models.CharField(max_length=10) idType_1_value = models.CharField(max_length=15) idType_2 = models.CharField(max_length=20) idType_2_value = models.CharField(max_length=15) StationCode = models.ForeignKey(Stationdata) Subject = models.CharField(max_length=200) pub_date = models.DateTimeField('date published') detail = models.TextField() Time = models.DateTimeField('Occurence') Place = models.CharField(max_length=200) Loss = models.CharField(max_length=200) OTP = models.BooleanField(default=False) def __str__(self): # __unicode__ on Python 2 return self.Subject class Fir(models.Model): ref_id = models.CharField(max_length=40,unique=True,default="00000") firstname = models.CharField(max_length=20) lastname = models.CharField(max_length=20) mobile = models.CharField(max_length=10) email = models.CharField(max_length=80) address = models.TextField() DOB = models.DateField('date of birth') idType_1 = models.CharField(max_length=10) idType_1_value = models.CharField(max_length=15) idType_2 = models.CharField(max_length=20) idType_2_value = models.CharField(max_length=15) StationCode = models.ForeignKey(Stationdata) Subject = models.CharField(max_length=200) pub_date = models.DateTimeField('date published') detail = models.TextField() Suspect = models.CharField(max_length=500) Time = models.DateTimeField('Occurence') Place = models.CharField(max_length=200) Witness = models.CharField(max_length=500) Loss = models.CharField(max_length=200) OTP = models.BooleanField(default=False) def __str__(self): # __unicode__ on Python 2 return self.Subject class lookup_table(models.Model): ref_id = models.CharField(max_length=40,unique=True,default="00000")<|fim▁hole|> hashmap = models.CharField(max_length=70,unique=True,default="00000") type = models.CharField(max_length=5,default="GD") def __str__(self): # __unicode__ on Python 2 return self.hashmap<|fim▁end|>
<|file_name|>states.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # __author__ = chenchiyuan from __future__ import division, unicode_literals, print_function from hawaii.apps.weixin.models import App from hawaii.apps.weixin.response import MessageResponse, EventResponse from hawaii.apps.weixin.weixin.interface import StateInterface <|fim▁hole|> super(NoCacheState, self).__init__(*args, **kwargs) def get_context(self): return { "from": "weixin" } def next(self, input): state, kwargs = super(NoCacheState, self).next(input) return "NO_CACHE", kwargs def to_xml(self, input): response = MessageResponse.response(input) return self.response_articles(response) def response_articles(self, response): if type(response) is list: context = self.get_context() return self._to_full_text(response, context=context) elif type(response) in (unicode, str): return self._to_wx_text(response) else: return self._to_wx_text("") class MenuEventState(NoCacheState): def to_xml(self, input): response = EventResponse.response(input) return self.response_articles(response) class SubscribeEventState(NoCacheState): def to_xml(self, input): app = App.only_one() rule = app.subscribe_rule if not rule: return self._to_wx_text("") response = EventResponse.response(rule.id) return self.response_articles(response)<|fim▁end|>
class NoCacheState(StateInterface): def __init__(self, *args, **kwargs):
<|file_name|>hilbert_algo.js<|end_file_name|><|fim▁begin|>var hilbert = (function() { // From Mike Bostock: http://bl.ocks.org/597287 // Adapted from Nick Johnson: http://bit.ly/biWkkq var pairs = [ [[0, 3], [1, 0], [3, 1], [2, 0]], [[2, 1], [1, 1], [3, 0], [0, 2]], [[2, 2], [3, 3], [1, 2], [0, 1]], [[0, 0], [3, 2], [1, 3], [2, 3]] ]; // d2xy and rot are from: // http://en.wikipedia.org/wiki/Hilbert_curve#Applications_and_mapping_algorithms var rot = function(n, x, y, rx, ry) { if (ry === 0) { if (rx === 1) { x = n - 1 - x; y = n - 1 - y; } return [y, x]; } return [x, y]; }; return { xy2d: function(x, y, z) { var quad = 0, pair, i = 0; while (--z >= 0) { pair = pairs[quad][(x & (1 << z) ? 2 : 0) | (y & (1 << z) ? 1 : 0)]; i = (i << 2) | pair[0]; quad = pair[1]; } return i; }, d2xy: function(z, t) { var n = 1 << z, x = 0, y = 0; for (var s = 1; s < n; s *= 2) { var rx = 1 & (t / 2), ry = 1 & (t ^ rx); var xy = rot(s, x, y, rx, ry); x = xy[0] + s * rx; y = xy[1] + s * ry; t /= 4; } return [x, y];<|fim▁hole|> }; })();<|fim▁end|>
}
<|file_name|>display_utils.js<|end_file_name|><|fim▁begin|>/* Copyright 2015 Mozilla Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import { assert, BaseException, isString, removeNullCharacters, stringToBytes, Util, warn, } from "../shared/util.js"; import { BaseCanvasFactory, BaseCMapReaderFactory, BaseStandardFontDataFactory, BaseSVGFactory, } from "./base_factory.js"; const DEFAULT_LINK_REL = "noopener noreferrer nofollow"; const SVG_NS = "http://www.w3.org/2000/svg"; class DOMCanvasFactory extends BaseCanvasFactory { constructor({ ownerDocument = globalThis.document } = {}) { super(); this._document = ownerDocument; } _createCanvas(width, height) { const canvas = this._document.createElement("canvas"); canvas.width = width; canvas.height = height; return canvas; } } async function fetchData(url, asTypedArray = false) { if ( (typeof PDFJSDev !== "undefined" && PDFJSDev.test("MOZCENTRAL")) || isValidFetchUrl(url, document.baseURI) ) { const response = await fetch(url); if (!response.ok) { throw new Error(response.statusText); } return asTypedArray ? new Uint8Array(await response.arrayBuffer()) : stringToBytes(await response.text()); } // The Fetch API is not supported. return new Promise((resolve, reject) => { const request = new XMLHttpRequest(); request.open("GET", url, /* asTypedArray = */ true); if (asTypedArray) { request.responseType = "arraybuffer"; } request.onreadystatechange = () => { if (request.readyState !== XMLHttpRequest.DONE) { return; } if (request.status === 200 || request.status === 0) { let data; if (asTypedArray && request.response) { data = new Uint8Array(request.response); } else if (!asTypedArray && request.responseText) { data = stringToBytes(request.responseText); } if (data) { resolve(data); return; } } reject(new Error(request.statusText)); }; request.send(null); }); } class DOMCMapReaderFactory extends BaseCMapReaderFactory { _fetchData(url, compressionType) { return fetchData(url, /* asTypedArray = */ this.isCompressed).then(data => { return { cMapData: data, compressionType }; }); } } class DOMStandardFontDataFactory extends BaseStandardFontDataFactory { _fetchData(url) { return fetchData(url, /* asTypedArray = */ true); } } class DOMSVGFactory extends BaseSVGFactory { _createSVG(type) { return document.createElementNS(SVG_NS, type); } } /** * @typedef {Object} PageViewportParameters * @property {Array<number>} viewBox - The xMin, yMin, xMax and * yMax coordinates. * @property {number} scale - The scale of the viewport. * @property {number} rotation - The rotation, in degrees, of the viewport. * @property {number} [offsetX] - The horizontal, i.e. x-axis, offset. The * default value is `0`. * @property {number} [offsetY] - The vertical, i.e. y-axis, offset. The * default value is `0`. * @property {boolean} [dontFlip] - If true, the y-axis will not be flipped. * The default value is `false`. */ /** * @typedef {Object} PageViewportCloneParameters * @property {number} [scale] - The scale, overriding the one in the cloned * viewport. The default value is `this.scale`. * @property {number} [rotation] - The rotation, in degrees, overriding the one * in the cloned viewport. The default value is `this.rotation`. * @property {number} [offsetX] - The horizontal, i.e. x-axis, offset. * The default value is `this.offsetX`. * @property {number} [offsetY] - The vertical, i.e. y-axis, offset. * The default value is `this.offsetY`. * @property {boolean} [dontFlip] - If true, the x-axis will not be flipped. * The default value is `false`. */ /** * PDF page viewport created based on scale, rotation and offset. */ class PageViewport { /** * @param {PageViewportParameters} */ constructor({ viewBox, scale, rotation, offsetX = 0, offsetY = 0, dontFlip = false, }) { this.viewBox = viewBox; this.scale = scale; this.rotation = rotation; this.offsetX = offsetX; this.offsetY = offsetY; // creating transform to convert pdf coordinate system to the normal // canvas like coordinates taking in account scale and rotation const centerX = (viewBox[2] + viewBox[0]) / 2; const centerY = (viewBox[3] + viewBox[1]) / 2; let rotateA, rotateB, rotateC, rotateD; // Normalize the rotation, by clamping it to the [0, 360) range. rotation %= 360; if (rotation < 0) { rotation += 360; } switch (rotation) { case 180: rotateA = -1; rotateB = 0; rotateC = 0; rotateD = 1; break; case 90: rotateA = 0; rotateB = 1; rotateC = 1; rotateD = 0; break; case 270: rotateA = 0; rotateB = -1; rotateC = -1; rotateD = 0; break; case 0: rotateA = 1; rotateB = 0; rotateC = 0; rotateD = -1; break; default: throw new Error( "PageViewport: Invalid rotation, must be a multiple of 90 degrees." ); } if (dontFlip) { rotateC = -rotateC; rotateD = -rotateD; } let offsetCanvasX, offsetCanvasY; let width, height; if (rotateA === 0) { offsetCanvasX = Math.abs(centerY - viewBox[1]) * scale + offsetX; offsetCanvasY = Math.abs(centerX - viewBox[0]) * scale + offsetY; width = Math.abs(viewBox[3] - viewBox[1]) * scale; height = Math.abs(viewBox[2] - viewBox[0]) * scale; } else { offsetCanvasX = Math.abs(centerX - viewBox[0]) * scale + offsetX; offsetCanvasY = Math.abs(centerY - viewBox[1]) * scale + offsetY; width = Math.abs(viewBox[2] - viewBox[0]) * scale; height = Math.abs(viewBox[3] - viewBox[1]) * scale; } // creating transform for the following operations: // translate(-centerX, -centerY), rotate and flip vertically, // scale, and translate(offsetCanvasX, offsetCanvasY) this.transform = [ rotateA * scale, rotateB * scale, rotateC * scale, rotateD * scale, offsetCanvasX - rotateA * scale * centerX - rotateC * scale * centerY, offsetCanvasY - rotateB * scale * centerX - rotateD * scale * centerY, ]; this.width = width; this.height = height; } /** * Clones viewport, with optional additional properties. * @param {PageViewportCloneParameters} [params] * @returns {PageViewport} Cloned viewport. */ clone({ scale = this.scale, rotation = this.rotation, offsetX = this.offsetX, offsetY = this.offsetY, dontFlip = false, } = {}) { return new PageViewport({ viewBox: this.viewBox.slice(), scale, rotation, offsetX, offsetY, dontFlip, }); } /** * Converts PDF point to the viewport coordinates. For examples, useful for * converting PDF location into canvas pixel coordinates. * @param {number} x - The x-coordinate. * @param {number} y - The y-coordinate. * @returns {Object} Object containing `x` and `y` properties of the * point in the viewport coordinate space. * @see {@link convertToPdfPoint} * @see {@link convertToViewportRectangle} */ convertToViewportPoint(x, y) { return Util.applyTransform([x, y], this.transform); } /** * Converts PDF rectangle to the viewport coordinates. * @param {Array} rect - The xMin, yMin, xMax and yMax coordinates. * @returns {Array} Array containing corresponding coordinates of the * rectangle in the viewport coordinate space. * @see {@link convertToViewportPoint} */ convertToViewportRectangle(rect) { const topLeft = Util.applyTransform([rect[0], rect[1]], this.transform); const bottomRight = Util.applyTransform([rect[2], rect[3]], this.transform); return [topLeft[0], topLeft[1], bottomRight[0], bottomRight[1]]; } /** * Converts viewport coordinates to the PDF location. For examples, useful * for converting canvas pixel location into PDF one. * @param {number} x - The x-coordinate. * @param {number} y - The y-coordinate. * @returns {Object} Object containing `x` and `y` properties of the * point in the PDF coordinate space. * @see {@link convertToViewportPoint} */ convertToPdfPoint(x, y) { return Util.applyInverseTransform([x, y], this.transform); } } class RenderingCancelledException extends BaseException { constructor(msg, type) { super(msg, "RenderingCancelledException"); this.type = type; } } const LinkTarget = { NONE: 0, // Default value. SELF: 1, BLANK: 2, PARENT: 3, TOP: 4, }; /** * @typedef ExternalLinkParameters * @typedef {Object} ExternalLinkParameters * @property {string} url - An absolute URL. * @property {LinkTarget} [target] - The link target. The default value is * `LinkTarget.NONE`. * @property {string} [rel] - The link relationship. The default value is * `DEFAULT_LINK_REL`. * @property {boolean} [enabled] - Whether the link should be enabled. The * default value is true. */ /** * Adds various attributes (href, title, target, rel) to hyperlinks. * @param {HTMLLinkElement} link - The link element. * @param {ExternalLinkParameters} params */ function addLinkAttributes(link, { url, target, rel, enabled = true } = {}) { assert( url && typeof url === "string", 'addLinkAttributes: A valid "url" parameter must provided.' ); const urlNullRemoved = removeNullCharacters(url); if (enabled) { link.href = link.title = urlNullRemoved; } else { link.href = ""; link.title = `Disabled: ${urlNullRemoved}`; link.onclick = () => { return false; }; } let targetStr = ""; // LinkTarget.NONE switch (target) { case LinkTarget.NONE: break; case LinkTarget.SELF: targetStr = "_self"; break; case LinkTarget.BLANK: targetStr = "_blank"; break; case LinkTarget.PARENT: targetStr = "_parent"; break; case LinkTarget.TOP: targetStr = "_top"; break; } link.target = targetStr; link.rel = typeof rel === "string" ? rel : DEFAULT_LINK_REL; } function isDataScheme(url) { const ii = url.length; let i = 0; while (i < ii && url[i].trim() === "") { i++; } return url.substring(i, i + 5).toLowerCase() === "data:"; } function isPdfFile(filename) { return typeof filename === "string" && /\.pdf$/i.test(filename); } /** * Gets the filename from a given URL. * @param {string} url * @returns {string} */ function getFilenameFromUrl(url) { const anchor = url.indexOf("#"); const query = url.indexOf("?"); const end = Math.min( anchor > 0 ? anchor : url.length, query > 0 ? query : url.length ); return url.substring(url.lastIndexOf("/", end) + 1, end); } /** * Returns the filename or guessed filename from the url (see issue 3455). * @param {string} url - The original PDF location. * @param {string} defaultFilename - The value returned if the filename is * unknown, or the protocol is unsupported. * @returns {string} Guessed PDF filename. */ function getPdfFilenameFromUrl(url, defaultFilename = "document.pdf") { if (typeof url !== "string") { return defaultFilename; } if (isDataScheme(url)) { warn('getPdfFilenameFromUrl: ignore "data:"-URL for performance reasons.'); return defaultFilename; } const reURI = /^(?:(?:[^:]+:)?\/\/[^/]+)?([^?#]*)(\?[^#]*)?(#.*)?$/; // SCHEME HOST 1.PATH 2.QUERY 3.REF // Pattern to get last matching NAME.pdf const reFilename = /[^/?#=]+\.pdf\b(?!.*\.pdf\b)/i; const splitURI = reURI.exec(url); let suggestedFilename = reFilename.exec(splitURI[1]) || reFilename.exec(splitURI[2]) || reFilename.exec(splitURI[3]); if (suggestedFilename) { suggestedFilename = suggestedFilename[0]; if (suggestedFilename.includes("%")) { // URL-encoded %2Fpath%2Fto%2Ffile.pdf should be file.pdf try { suggestedFilename = reFilename.exec( decodeURIComponent(suggestedFilename) )[0]; } catch (ex) { // Possible (extremely rare) errors: // URIError "Malformed URI", e.g. for "%AA.pdf" // TypeError "null has no properties", e.g. for "%2F.pdf" } } } return suggestedFilename || defaultFilename; } class StatTimer { constructor() { this.started = Object.create(null); this.times = []; } time(name) { if (name in this.started) { warn(`Timer is already running for ${name}`); } this.started[name] = Date.now(); } timeEnd(name) { if (!(name in this.started)) { warn(`Timer has not been started for ${name}`); } this.times.push({ name, start: this.started[name], end: Date.now(), }); // Remove timer from started so it can be called again. delete this.started[name]; } toString() { // Find the longest name for padding purposes. const outBuf = []; let longest = 0; for (const time of this.times) { const name = time.name; if (name.length > longest) { longest = name.length; } } for (const time of this.times) { const duration = time.end - time.start; outBuf.push(`${time.name.padEnd(longest)} ${duration}ms\n`); } return outBuf.join(""); } } function isValidFetchUrl(url, baseUrl) { try { const { protocol } = baseUrl ? new URL(url, baseUrl) : new URL(url); // The Fetch API only supports the http/https protocols, and not file/ftp. return protocol === "http:" || protocol === "https:"; } catch (ex) { return false; // `new URL()` will throw on incorrect data. } } /** * @param {string} src * @param {boolean} [removeScriptElement] * @returns {Promise<void>} */ function loadScript(src, removeScriptElement = false) { return new Promise((resolve, reject) => { const script = document.createElement("script"); script.src = src; script.onload = function (evt) { if (removeScriptElement) { script.remove(); } resolve(evt); }; script.onerror = function () { reject(new Error(`Cannot load script at: ${script.src}`)); }; (document.head || document.documentElement).appendChild(script); }); } // Deprecated API function -- display regardless of the `verbosity` setting. function deprecated(details) { console.log("Deprecated API usage: " + details); } let pdfDateStringRegex; class PDFDateString { /** * Convert a PDF date string to a JavaScript `Date` object. * * The PDF date string format is described in section 7.9.4 of the official * PDF 32000-1:2008 specification. However, in the PDF 1.7 reference (sixth * edition) Adobe describes the same format including a trailing apostrophe. * This syntax in incorrect, but Adobe Acrobat creates PDF files that contain * them. We ignore all apostrophes as they are not necessary for date parsing. * * Moreover, Adobe Acrobat doesn't handle changing the date to universal time * and doesn't use the user's time zone (effectively ignoring the HH' and mm'<|fim▁hole|> * * @param {string} input * @returns {Date|null} */ static toDateObject(input) { if (!input || !isString(input)) { return null; } // Lazily initialize the regular expression. if (!pdfDateStringRegex) { pdfDateStringRegex = new RegExp( "^D:" + // Prefix (required) "(\\d{4})" + // Year (required) "(\\d{2})?" + // Month (optional) "(\\d{2})?" + // Day (optional) "(\\d{2})?" + // Hour (optional) "(\\d{2})?" + // Minute (optional) "(\\d{2})?" + // Second (optional) "([Z|+|-])?" + // Universal time relation (optional) "(\\d{2})?" + // Offset hour (optional) "'?" + // Splitting apostrophe (optional) "(\\d{2})?" + // Offset minute (optional) "'?" // Trailing apostrophe (optional) ); } // Optional fields that don't satisfy the requirements from the regular // expression (such as incorrect digit counts or numbers that are out of // range) will fall back the defaults from the specification. const matches = pdfDateStringRegex.exec(input); if (!matches) { return null; } // JavaScript's `Date` object expects the month to be between 0 and 11 // instead of 1 and 12, so we have to correct for that. const year = parseInt(matches[1], 10); let month = parseInt(matches[2], 10); month = month >= 1 && month <= 12 ? month - 1 : 0; let day = parseInt(matches[3], 10); day = day >= 1 && day <= 31 ? day : 1; let hour = parseInt(matches[4], 10); hour = hour >= 0 && hour <= 23 ? hour : 0; let minute = parseInt(matches[5], 10); minute = minute >= 0 && minute <= 59 ? minute : 0; let second = parseInt(matches[6], 10); second = second >= 0 && second <= 59 ? second : 0; const universalTimeRelation = matches[7] || "Z"; let offsetHour = parseInt(matches[8], 10); offsetHour = offsetHour >= 0 && offsetHour <= 23 ? offsetHour : 0; let offsetMinute = parseInt(matches[9], 10) || 0; offsetMinute = offsetMinute >= 0 && offsetMinute <= 59 ? offsetMinute : 0; // Universal time relation 'Z' means that the local time is equal to the // universal time, whereas the relations '+'/'-' indicate that the local // time is later respectively earlier than the universal time. Every date // is normalized to universal time. if (universalTimeRelation === "-") { hour += offsetHour; minute += offsetMinute; } else if (universalTimeRelation === "+") { hour -= offsetHour; minute -= offsetMinute; } return new Date(Date.UTC(year, month, day, hour, minute, second)); } } /** * NOTE: This is (mostly) intended to support printing of XFA forms. */ function getXfaPageViewport(xfaPage, { scale = 1, rotation = 0 }) { const { width, height } = xfaPage.attributes.style; const viewBox = [0, 0, parseInt(width), parseInt(height)]; return new PageViewport({ viewBox, scale, rotation, }); } export { addLinkAttributes, DEFAULT_LINK_REL, deprecated, DOMCanvasFactory, DOMCMapReaderFactory, DOMStandardFontDataFactory, DOMSVGFactory, getFilenameFromUrl, getPdfFilenameFromUrl, getXfaPageViewport, isDataScheme, isPdfFile, isValidFetchUrl, LinkTarget, loadScript, PageViewport, PDFDateString, RenderingCancelledException, StatTimer, };<|fim▁end|>
* parts of the date string).
<|file_name|>plan.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- ### required - do no delete @auth.requires_login() def plan(): return dict() def new(): form = SQLFORM.factory(db.contacts,db.groups) if form.accepts(request.vars): _id_user = db.contacts.insert(**db.contacts._filter_fields(form.vars)) <|fim▁hole|> response.flash = 'User registered successfully' return locals() def update(): id = request.args(0) group = db(db.groups.id == id).select()[0] form = SQLFORM(db.contacts, group.contact.id) group = SQLFORM(db.group, group.id) # Adding the group form form.append(group) if form.accepts(request.vars): # Updating the contacts db.contacts.update(**db.contacts._filter_fields(form.vars)) # Atualizando o grupo old_group = db(db.groups.id == group.id).select().first() old_group.update_record(group=group.vars.group) response.session = 'Updated with success!' return locals()<|fim▁end|>
form.vars.contact = _id_user id = db.groups.insert(**db.groups._filter_fields(form.vars))
<|file_name|>health.py<|end_file_name|><|fim▁begin|># Copyright 2015, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Reference implementation for health checking in gRPC Python.""" import threading import grpc from grpc_health.v1 import health_pb2 class HealthServicer(health_pb2.HealthServicer): """Servicer handling RPCs for service statuses.""" def __init__(self): self._server_status_lock = threading.Lock() self._server_status = {} def Check(self, request, context): with self._server_status_lock: status = self._server_status.get(request.service) if status is None: context.set_code(grpc.StatusCode.NOT_FOUND) return health_pb2.HealthCheckResponse() else: return health_pb2.HealthCheckResponse(status=status) def set(self, service, status): """Sets the status of a service. Args: service: string, the name of the service. NOTE, '' must be set. status: HealthCheckResponse.status enum value indicating<|fim▁hole|> self._server_status[service] = status<|fim▁end|>
the status of the service """ with self._server_status_lock:
<|file_name|>ScaleImage.java<|end_file_name|><|fim▁begin|>package com.newppt.android.ui; import com.newppt.android.data.AnimUtils2; import android.content.Context; import android.util.AttributeSet; import android.view.MotionEvent; import android.widget.ImageView; public class ScaleImage extends ImageView { final private int FLIP_DISTANCE = 30; public ScaleImage(Context context) { super(context); // TODO Auto-generated constructor stub } public ScaleImage(Context context, AttributeSet attrs) { super(context, attrs); // TODO Auto-generated constructor stub } public ScaleImage(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); // TODO Auto-generated constructor stub } private int count = 0; private long firClick; private long secClick; private boolean scaleTip = true; private float x; private float y; @Override public boolean onTouchEvent(MotionEvent event) { // TODO Auto-generated method stub if (MotionEvent.ACTION_DOWN == event.getAction()) { count++; if (count == 1) {<|fim▁hole|> } else if (count == 2) { secClick = System.currentTimeMillis(); float mx = event.getX(); float my = event.getY(); if (secClick - firClick < 700 && Math.abs(mx - x) < FLIP_DISTANCE && Math.abs(my - y) < FLIP_DISTANCE) { // ˫���¼� if (scaleTip) { x = event.getX(); y = event.getY(); AnimUtils2 animUtils2 = new AnimUtils2(); animUtils2.imageZoomOut(this, 200, x, y); scaleTip = false; } else { AnimUtils2 animUtils2 = new AnimUtils2(); animUtils2.imageZoomIn(this, 200, x, y); scaleTip = true; } } count = 0; firClick = 0; secClick = 0; } } return true; // return super.onTouchEvent(event); } }<|fim▁end|>
firClick = System.currentTimeMillis(); x = event.getX(); y = event.getY();
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>import re from exchange.constants import ( CURRENCIES, CURRENCY_NAMES, DEFAULT_CURRENCY, CURRENCY_EUR, CURRENCY_UAH, CURRENCY_USD, CURRENCY_SESSION_KEY) def round_number(value, decimal_places=2, down=False): assert decimal_places > 0 factor = 1.0 ** decimal_places sign = -1 if value < 0 else 1 return int(value * factor + sign * (0 if down else 0.5)) / factor def format_number(value): <|fim▁hole|> return value def format_price(price, round_price=False): price = float(price) return format_number(round_number(price) if round_price else price) def format_printable_price(price, currency=DEFAULT_CURRENCY): return '%s %s' % (format_price(price), dict(CURRENCIES)[currency]) def get_currency_from_session(session): currency = session.get(CURRENCY_SESSION_KEY) or DEFAULT_CURRENCY return int(currency) def get_price_factory(rates, src, dst): if src == dst: return lambda p: p name = lambda c: CURRENCY_NAMES[c] if src == CURRENCY_UAH: return lambda p: p / getattr(rates, name(dst)) if dst == CURRENCY_UAH: return lambda p: p * getattr(rates, name(src)) if src == CURRENCY_USD and dst == CURRENCY_EUR: return lambda p: p * rates.usd_eur if src == CURRENCY_EUR and dst == CURRENCY_USD: return lambda p: p / rates.usd_eur raise ValueError('Unknown currencies')<|fim▁end|>
append_comma = lambda match_object: "%s," % match_object.group(0) value = "%.2f" % float(value) value = re.sub("(\d)(?=(\d{3})+\.)", append_comma, value)
<|file_name|>dictionary-add.js<|end_file_name|><|fim▁begin|>function dictionaryAddCtrl($scope, DictService, $state) { $scope.$emit("update-title", { pageTitle: "字典列表", links: [ { url: "common.dictionary-list", name: "字典列表" }, { url: "common.dictionary-add", name: "新增字典" } ] });<|fim▁hole|> DictService.save($scope.dict, function () { //MessageService.saveSuccess(); //LocationTo.path("/sys/dict/list"); $state.transitionTo('common.dictionary-list') }, function () { }); }; } dictionaryAddCtrl.$inject = [ '$scope', 'DictService', '$state'];<|fim▁end|>
$scope.save = function () {
<|file_name|>test_cassandra_backend.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import unittest from datetime import datetime from unittest.mock import call from uuid import uuid4, UUID from mock import patch, MagicMock from alamo_worker.alerter.backend.cassandra import ( CassandraDriver, SELECT_QUERY, INSERT_QUERY, INSERT_SERVICE_QUERY, AsyncCassandraDriver ) from alamo_worker.alerter.utils import CachedResult from tests.base import run_async class CassandraDriverTest(unittest.TestCase): @classmethod def setUpClass(cls): cls.check_uuid = str(uuid4()) cls.trigger_uuid = str(uuid4()) @patch('alamo_worker.alerter.backend.cassandra.Cluster') @patch('alamo_worker.alerter.backend.cassandra.PlainTextAuthProvider') def setUp(self, auth_mock, cluster_mock): session_mock = MagicMock() cluster_mock.connect = MagicMock(return_value=session_mock) self.trigger = dict( id=self.trigger_uuid, uuid=str(uuid4()), severity='WARNING', enabled=True, result=dict(status=0, message='') ) self.driver = CassandraDriver( contact_points=[''], username='username', password='password', keyspace='test' ) def test_prepared_statements(self): expected_calls = [call(SELECT_QUERY), call(INSERT_QUERY), call(INSERT_SERVICE_QUERY)] self.driver._connect() self.driver._session.prepare.assert_has_calls( expected_calls, any_order=True ) def test_get_result_method(self): self.driver.get_result(self.check_uuid, self.trigger_uuid, 10) self.driver._connect() self.driver._session.execute.assert_called_once_with( self.driver._save_query_stmt, (UUID(self.check_uuid), UUID(self.trigger_uuid), 10), execution_profile='normal' ) def test_save_result_method(self): _now = datetime.now() self.driver._connect() self.driver.save_result( self.check_uuid, self.trigger, _now, _now, _now, False, '999' ) self.assertTrue(self.driver._session.execute.called) class DummyRow(object): def __init__(self, *, status, alert_sent, message): self.status = status self.alert_sent = alert_sent self.message = message class AsyncCassandraDriverTestCase(unittest.TestCase): @classmethod def setUpClass(cls): cls.check_uuid = uuid4() cls.trigger_uuid = uuid4() @patch.object(AsyncCassandraDriver, '_connect') def setUp(self, mock_connect): async def _execute(*args, **kwargs): return [ DummyRow(status=0, alert_sent=False, message='') ] driver = AsyncCassandraDriver( contact_points=[''], username='username', password='password', keyspace='test') driver._session = MagicMock() driver._session.execute_future.side_effect = _execute self.driver = driver self.trigger = dict( id=self.trigger_uuid, uuid=str(uuid4()), severity='WARNING', enabled=True, result=dict(status=0, message='') ) def test_get_result(self): result = run_async( self.driver.get_result( str(self.check_uuid), str(self.trigger_uuid), 10) ) self.driver._session.execute_future.assert_called_once_with( self.driver._retrieve_query_stmt, ( self.check_uuid, self.trigger_uuid, 10 ), execution_profile='normal' ) expected = CachedResult(0, 0, False, '') self.assertEqual(result, [expected]) def test_save_result_method(self): _now = datetime.now() self.driver._save_query_stmt = MagicMock() self.driver._save_service_query_stmt = MagicMock() <|fim▁hole|> run_async(self.driver.save_result( str(self.check_uuid), self.trigger, _now, _now, _now, False, '999' )) self.assertTrue(self.driver._session.execute_future.called)<|fim▁end|>
<|file_name|>index.js<|end_file_name|><|fim▁begin|>'use strict' if (!process.addAsyncListener) require('async-listener') var noop = function () {} module.exports = function () { return new AsyncState() } function AsyncState () { var state = this process.addAsyncListener({ create: asyncFunctionInitialized,<|fim▁hole|> before: asyncCallbackBefore, error: noop, after: asyncCallbackAfter }) // Record the state currently set on on the async-state object and return a // snapshot of it. The returned object will later be passed as the `data` // arg in the functions below. function asyncFunctionInitialized () { var data = {} for (var key in state) { data[key] = state[key] } return data } // We just returned from the event-loop: We'll now restore the state // previously saved by `asyncFunctionInitialized`. function asyncCallbackBefore (context, data) { for (var key in data) { state[key] = data[key] } } // Clear the state so that it doesn't leak between isolated async stacks. function asyncCallbackAfter (context, data) { for (var key in state) { delete state[key] } } }<|fim▁end|>
<|file_name|>frameobject.rs<|end_file_name|><|fim▁begin|>use libc::c_int; use crate::code::{PyCodeObject, CO_MAXBLOCKS}; use crate::object::*; use crate::pyport::Py_ssize_t; use crate::pystate::PyThreadState; #[repr(C)] #[derive(Copy, Clone)] pub struct PyTryBlock { pub b_type: c_int, pub b_handler: c_int, pub b_level: c_int, } #[repr(C)] #[derive(Copy, Clone)] pub struct PyFrameObject { #[cfg(py_sys_config = "Py_TRACE_REFS")] pub _ob_next: *mut PyObject, #[cfg(py_sys_config = "Py_TRACE_REFS")] pub _ob_prev: *mut PyObject, pub ob_refcnt: Py_ssize_t, pub ob_type: *mut PyTypeObject, pub ob_size: Py_ssize_t, pub f_back: *mut PyFrameObject, /* previous frame, or NULL */ pub f_code: *mut PyCodeObject, /* code segment */ pub f_builtins: *mut PyObject, /* builtin symbol table (PyDictObject) */ pub f_globals: *mut PyObject, /* global symbol table (PyDictObject) */ pub f_locals: *mut PyObject, /* local symbol table (any mapping) */ pub f_valuestack: *mut *mut PyObject, /* points after the last local */ /* Next free slot in f_valuestack. Frame creation sets to f_valuestack. Frame evaluation usually NULLs it, but a frame that yields sets it to the current stack top. */ pub f_stacktop: *mut *mut PyObject, pub f_trace: *mut PyObject, /* Trace function */ pub f_exc_type: *mut PyObject, pub f_exc_value: *mut PyObject, pub f_exc_traceback: *mut PyObject, pub f_tstate: *mut PyThreadState, pub f_lasti: c_int, /* Last instruction if called */ /* Call PyFrame_GetLineNumber() instead of reading this field directly. As of 2.3 f_lineno is only valid when tracing is active (i.e. when f_trace is set). At other times we use PyCode_Addr2Line to calculate the line from the current bytecode index. */ pub f_lineno: c_int, /* Current line number */ pub f_iblock: c_int, /* index in f_blockstack */ pub f_blockstack: [PyTryBlock; CO_MAXBLOCKS], /* for try and loop blocks */ pub f_localsplus: [*mut PyObject; 1], /* locals+stack, dynamically sized */ } #[cfg_attr(windows, link(name = "pythonXY"))] extern "C" { pub static mut PyFrame_Type: PyTypeObject; } #[inline] pub unsafe fn PyFrame_Check(op: *mut PyObject) -> c_int { ((*op).ob_type == &mut PyFrame_Type) as c_int } ignore! { #[inline] pub unsafe fn PyFrame_IsRestricted(f: *mut PyFrameObject) -> c_int { ((*f).f_builtins != (*(*(*f).f_tstate).interp).builtins) as c_int } } #[cfg_attr(windows, link(name = "pythonXY"))] extern "C" { pub fn PyFrame_New( tstate: *mut PyThreadState, code: *mut PyCodeObject, globals: *mut PyObject, locals: *mut PyObject, ) -> *mut PyFrameObject; pub fn PyFrame_BlockSetup( f: *mut PyFrameObject, _type: c_int, handler: c_int, level: c_int, ) -> (); pub fn PyFrame_BlockPop(f: *mut PyFrameObject) -> *mut PyTryBlock; pub fn PyFrame_LocalsToFast(f: *mut PyFrameObject, clear: c_int) -> (); pub fn PyFrame_FastToLocals(f: *mut PyFrameObject) -> (); <|fim▁hole|>}<|fim▁end|>
pub fn PyFrame_ClearFreeList() -> c_int; pub fn PyFrame_GetLineNumber(f: *mut PyFrameObject) -> c_int;
<|file_name|>cfg_setting_imgsensor.cpp<|end_file_name|><|fim▁begin|>/* Copyright Statement: * * This software/firmware and related documentation ("MediaTek Software") are * protected under relevant copyright laws. The information contained herein * is confidential and proprietary to MediaTek Inc. and/or its licensors. * Without the prior written permission of MediaTek inc. and/or its licensors, * any reproduction, modification, use or disclosure of MediaTek Software, * and information contained herein, in whole or in part, shall be strictly prohibited. */ /* MediaTek Inc. (C) 2010. All rights reserved. * * BY OPENING THIS FILE, RECEIVER HEREBY UNEQUIVOCALLY ACKNOWLEDGES AND AGREES * THAT THE SOFTWARE/FIRMWARE AND ITS DOCUMENTATIONS ("MEDIATEK SOFTWARE") * RECEIVED FROM MEDIATEK AND/OR ITS REPRESENTATIVES ARE PROVIDED TO RECEIVER ON * AN "AS-IS" BASIS ONLY. MEDIATEK EXPRESSLY DISCLAIMS ANY AND ALL WARRANTIES, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE OR NONINFRINGEMENT. * NEITHER DOES MEDIATEK PROVIDE ANY WARRANTY WHATSOEVER WITH RESPECT TO THE * SOFTWARE OF ANY THIRD PARTY WHICH MAY BE USED BY, INCORPORATED IN, OR * SUPPLIED WITH THE MEDIATEK SOFTWARE, AND RECEIVER AGREES TO LOOK ONLY TO SUCH * THIRD PARTY FOR ANY WARRANTY CLAIM RELATING THERETO. RECEIVER EXPRESSLY ACKNOWLEDGES * THAT IT IS RECEIVER'S SOLE RESPONSIBILITY TO OBTAIN FROM ANY THIRD PARTY ALL PROPER LICENSES * CONTAINED IN MEDIATEK SOFTWARE. MEDIATEK SHALL ALSO NOT BE RESPONSIBLE FOR ANY MEDIATEK * SOFTWARE RELEASES MADE TO RECEIVER'S SPECIFICATION OR TO CONFORM TO A PARTICULAR * STANDARD OR OPEN FORUM. RECEIVER'S SOLE AND EXCLUSIVE REMEDY AND MEDIATEK'S ENTIRE AND * CUMULATIVE LIABILITY WITH RESPECT TO THE MEDIATEK SOFTWARE RELEASED HEREUNDER WILL BE, * AT MEDIATEK'S OPTION, TO REVISE OR REPLACE THE MEDIATEK SOFTWARE AT ISSUE, * OR REFUND ANY SOFTWARE LICENSE FEES OR SERVICE CHARGE PAID BY RECEIVER TO * MEDIATEK FOR SUCH MEDIATEK SOFTWARE AT ISSUE. * * The following software/firmware and/or related documentation ("MediaTek Software") * have been modified by MediaTek Inc. All revisions are subject to any receiver's * applicable license agreements with MediaTek Inc. */ //#ifndef _CFG_SETTING_IMGSENSOR_H_ //#define _CFG_SETTING_IMGSENSOR_H_ #include "camera_custom_imgsensor_cfg.h" using namespace NSCamCustomSensor; namespace NSCamCustomSensor { /******************************************************************************* * Image Sensor Orientation *******************************************************************************/ SensorOrientation_T const& getSensorOrientation() { static SensorOrientation_T const inst = { u4Degree_0 : 90, // main sensor in degree (0, 90, 180, 270) u4Degree_1 : 270, // sub sensor in degree (0, 90, 180, 270) u4Degree_2 : 90, // main2 sensor in degree (0, 90, 180, 270) }; return inst; } /******************************************************************************* * Return fake orientation for front sensor or not * MTRUE: return 90 for front sensor in degree 0, * return 270 for front sensor in degree 180. * MFALSE: not return fake orientation. *******************************************************************************/ MBOOL isRetFakeSubOrientation() { return MFALSE; } /******************************************************************************* * Return fake orientation for back sensor or not * MTRUE: return 90 for back sensor in degree 0, * return 270 for back sensor in degree 180. * MFALSE: not return fake orientation. *******************************************************************************/ MBOOL isRetFakeMainOrientation() { return MFALSE; } /******************************************************************************* * Return fake orientation for back (3D) sensor or not * MTRUE: return 90 for back sensor in degree 0, * return 270 for back sensor in degree 180. * MFALSE: not return fake orientation. *******************************************************************************/ MBOOL isRetFakeMain2Orientation() { return MFALSE; } /******************************************************************************* * Sensor Input Data Bit Order * Return: * 0 : raw data input [9:2] * 1 : raw data input [7:0] * -1 : error *******************************************************************************/ MINT32 getSensorInputDataBitOrder(EDevId const eDevId) { switch (eDevId) { case eDevId_ImgSensor0: return 1; case eDevId_ImgSensor1: return 0; case eDevId_ImgSensor2: return 0; default: break; } return -1; } /******************************************************************************* * Sensor Pixel Clock Inverse in PAD side. * Return: * 0 : no inverse * 1 : inverse * -1 : error *******************************************************************************/ MINT32 getSensorPadPclkInv(EDevId const eDevId) { switch (eDevId) { case eDevId_ImgSensor0: return 0; case eDevId_ImgSensor1: return 0; case eDevId_ImgSensor2: return 0; default: break; } return -1; } <|fim▁hole|>* 0 : Back side * 1 : Front side (LCD side) * -1 : error *******************************************************************************/ MINT32 getSensorFacingDirection(EDevId const eDevId) { switch (eDevId) { case eDevId_ImgSensor0: return 0; case eDevId_ImgSensor1: return 1; case eDevId_ImgSensor2: return 0; default: break; } return -1; } /******************************************************************************* * Image Sensor Module FOV *******************************************************************************/ SensorViewAngle_T const& getSensorViewAngle() { static SensorViewAngle_T const inst = { MainSensorHorFOV : 63, MainSensorVerFOV : 49, SubSensorHorFOV : 60, SubSensorVerFOV : 40, Main2SensorHorFOV : 0, //not support Main2SensorVerFOV : 0, }; return inst; } }; //#endif // _CFG_SETTING_IMGSENSOR_H_<|fim▁end|>
/******************************************************************************* * Sensor Placement Facing Direction * Return:
<|file_name|>562.cpp<|end_file_name|><|fim▁begin|>#include <iostream> #include <algorithm> #include <numeric> /*<|fim▁hole|> int T, n; int coins[105]; int f[50005]; int main() { cin >> T; while (T-- > 0) { cin >> n; fill(f, f + 50005, 0); for (int i = 0; i < n; ++i) cin >> coins[i]; int total = accumulate(coins, coins + n, 0); for (int i = 0; i < n; ++i) for (int j = total / 2; j - coins[i] >= 0; --j) f[j] = max(f[j], f[j - coins[i]] + coins[i]); cout << total - 2 * f[total / 2] << endl; } return 0; }<|fim▁end|>
* 原来是自己在初始化f数组时长度没有指定好。233 */ using namespace std;
<|file_name|>http_loader.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use cookie_rs::Cookie as CookiePair; use devtools_traits::HttpRequest as DevtoolsHttpRequest; use devtools_traits::HttpResponse as DevtoolsHttpResponse; use devtools_traits::{ChromeToDevtoolsControlMsg, DevtoolsControlMsg, NetworkEvent}; use flate2::Compression; use flate2::write::{GzEncoder, DeflateEncoder}; use hyper::header::{Accept, AcceptEncoding, ContentEncoding, ContentLength, Cookie as CookieHeader}; use hyper::header::{Authorization, Basic}; use hyper::header::{Encoding, Headers, Host, Location, Quality, QualityItem, qitem, SetCookie}; use hyper::header::{StrictTransportSecurity, UserAgent}; use hyper::http::RawStatus; use hyper::method::Method; use hyper::mime::{Mime, SubLevel, TopLevel}; use hyper::status::StatusCode; use msg::constellation_msg::PipelineId; use net::cookie::Cookie; use net::cookie_storage::CookieStorage; use net::hsts::{HSTSList, HSTSEntry}; use net::http_loader::{load, LoadError, HttpRequestFactory, HttpRequest, HttpResponse, HttpState}; use net::resource_thread::{AuthCacheEntry, CancellationListener}; use net_traits::{LoadData, CookieSource, LoadContext, IncludeSubdomains}; use std::borrow::Cow; use std::collections::HashMap; use std::io::{self, Write, Read, Cursor}; use std::sync::mpsc::Receiver; use std::sync::{Arc, mpsc, RwLock}; use url::Url; const DEFAULT_USER_AGENT: &'static str = "Test-agent"; fn respond_with(body: Vec<u8>) -> MockResponse { let headers = Headers::new(); respond_with_headers(body, headers) } fn respond_with_headers(body: Vec<u8>, mut headers: Headers) -> MockResponse { headers.set(ContentLength(body.len() as u64)); MockResponse::new( headers, StatusCode::Ok, RawStatus(200, Cow::Borrowed("Ok")), body ) } fn read_response(reader: &mut Read) -> String { let mut buf = vec![0; 1024]; match reader.read(&mut buf) { Ok(len) if len > 0 => { unsafe { buf.set_len(len); } String::from_utf8(buf).unwrap() }, Ok(_) => "".to_owned(), Err(e) => panic!("problem reading response {}", e) } } struct MockResponse { h: Headers, sc: StatusCode, sr: RawStatus, msg: Cursor<Vec<u8>> } impl MockResponse { fn new(h: Headers, sc: StatusCode, sr: RawStatus, msg: Vec<u8>) -> MockResponse { MockResponse { h: h, sc: sc, sr: sr, msg: Cursor::new(msg) } } } impl Read for MockResponse { fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { self.msg.read(buf) } } impl HttpResponse for MockResponse { fn headers(&self) -> &Headers { &self.h } fn status(&self) -> StatusCode { self.sc } fn status_raw(&self) -> &RawStatus { &self.sr } } fn redirect_to(host: String) -> MockResponse { let mut headers = Headers::new(); headers.set(Location(host.to_owned())); MockResponse::new( headers, StatusCode::MovedPermanently, RawStatus(301, Cow::Borrowed("Moved Permanently")), b"".to_vec() ) } fn redirect_with_headers(host: String, mut headers: Headers) -> MockResponse { headers.set(Location(host.to_string())); MockResponse::new( headers, StatusCode::MovedPermanently, RawStatus(301, Cow::Borrowed("Moved Permanently")), b"".to_vec() ) } enum ResponseType { Redirect(String), RedirectWithHeaders(String, Headers), Text(Vec<u8>), WithHeaders(Vec<u8>, Headers) } struct MockRequest { headers: Headers, t: ResponseType } impl MockRequest { fn new(t: ResponseType) -> MockRequest { MockRequest { headers: Headers::new(), t: t } } } fn response_for_request_type(t: ResponseType) -> Result<MockResponse, LoadError> { match t { ResponseType::Redirect(location) => { Ok(redirect_to(location)) }, ResponseType::RedirectWithHeaders(location, headers) => { Ok(redirect_with_headers(location, headers)) } ResponseType::Text(b) => { Ok(respond_with(b)) }, ResponseType::WithHeaders(b, h) => { Ok(respond_with_headers(b, h)) } } } impl HttpRequest for MockRequest { type R = MockResponse; fn headers_mut(&mut self) -> &mut Headers { &mut self.headers } fn send(self, _: &Option<Vec<u8>>) -> Result<MockResponse, LoadError> { response_for_request_type(self.t) } } struct AssertRequestMustHaveHeaders { expected_headers: Headers, request_headers: Headers, t: ResponseType } impl AssertRequestMustHaveHeaders { fn new(t: ResponseType, expected_headers: Headers) -> Self { AssertRequestMustHaveHeaders { expected_headers: expected_headers, request_headers: Headers::new(), t: t } } } impl HttpRequest for AssertRequestMustHaveHeaders { type R = MockResponse; fn headers_mut(&mut self) -> &mut Headers { &mut self.request_headers } fn send(self, _: &Option<Vec<u8>>) -> Result<MockResponse, LoadError> { assert_eq!(self.request_headers, self.expected_headers); response_for_request_type(self.t) } } struct AssertRequestMustIncludeHeaders { expected_headers: Headers, request_headers: Headers, t: ResponseType } impl AssertRequestMustIncludeHeaders { fn new(t: ResponseType, expected_headers_op: Option<Headers>) -> Self { match expected_headers_op { Some(expected_headers) => { assert!(expected_headers.len() != 0); AssertRequestMustIncludeHeaders { expected_headers: expected_headers, request_headers: Headers::new(), t: t } } None => AssertRequestMustIncludeHeaders { expected_headers: Headers::new(), request_headers: Headers::new(), t: t } } } } impl HttpRequest for AssertRequestMustIncludeHeaders { type R = MockResponse; fn headers_mut(&mut self) -> &mut Headers { &mut self.request_headers } fn send(self, _: &Option<Vec<u8>>) -> Result<MockResponse, LoadError> { for header in self.expected_headers.iter() { assert!(self.request_headers.get_raw(header.name()).is_some()); assert_eq!( self.request_headers.get_raw(header.name()).unwrap(), self.expected_headers.get_raw(header.name()).unwrap() ) } response_for_request_type(self.t) } } struct AssertMustHaveHeadersRequestFactory { expected_headers: Headers, body: Vec<u8> } impl HttpRequestFactory for AssertMustHaveHeadersRequestFactory { type R = AssertRequestMustHaveHeaders; fn create(&self, _: Url, _: Method) -> Result<AssertRequestMustHaveHeaders, LoadError> { Ok( AssertRequestMustHaveHeaders::new( ResponseType::Text(self.body.clone()), self.expected_headers.clone() ) ) } } struct AssertMustIncludeHeadersRequestFactory { expected_headers: Headers, body: Vec<u8> } impl HttpRequestFactory for AssertMustIncludeHeadersRequestFactory { type R = AssertRequestMustIncludeHeaders; fn create(&self, _: Url, _: Method) -> Result<AssertRequestMustIncludeHeaders, LoadError> { Ok( AssertRequestMustIncludeHeaders::new( ResponseType::Text(self.body.clone()), Some(self.expected_headers.clone()) ) ) } } fn assert_cookie_for_domain(cookie_jar: Arc<RwLock<CookieStorage>>, domain: &str, cookie: &str) { let mut cookie_jar = cookie_jar.write().unwrap(); let url = Url::parse(&*domain).unwrap(); let cookies = cookie_jar.cookies_for_url(&url, CookieSource::HTTP); if let Some(cookie_list) = cookies { assert_eq!(cookie.to_owned(), cookie_list); } else { assert_eq!(cookie.len(), 0); } } struct AssertRequestMustNotIncludeHeaders { headers_not_expected: Vec<String>, request_headers: Headers, t: ResponseType } impl AssertRequestMustNotIncludeHeaders { fn new(t: ResponseType, headers_not_expected: Vec<String>) -> Self { assert!(headers_not_expected.len() != 0); AssertRequestMustNotIncludeHeaders { headers_not_expected: headers_not_expected, request_headers: Headers::new(), t: t } } } impl HttpRequest for AssertRequestMustNotIncludeHeaders { type R = MockResponse; fn headers_mut(&mut self) -> &mut Headers { &mut self.request_headers } fn send(self, _: &Option<Vec<u8>>) -> Result<MockResponse, LoadError> { for header in &self.headers_not_expected { assert!(self.request_headers.get_raw(header).is_none()); } response_for_request_type(self.t) } } struct AssertMustNotIncludeHeadersRequestFactory { headers_not_expected: Vec<String>, body: Vec<u8> } impl HttpRequestFactory for AssertMustNotIncludeHeadersRequestFactory { type R = AssertRequestMustNotIncludeHeaders; fn create(&self, _: Url, _: Method) -> Result<AssertRequestMustNotIncludeHeaders, LoadError> { Ok( AssertRequestMustNotIncludeHeaders::new( ResponseType::Text(self.body.clone()), self.headers_not_expected.clone() ) ) } } struct AssertMustHaveBodyRequest { expected_body: Option<Vec<u8>>, headers: Headers, t: ResponseType } impl AssertMustHaveBodyRequest { fn new(t: ResponseType, expected_body: Option<Vec<u8>>) -> Self { AssertMustHaveBodyRequest { expected_body: expected_body, headers: Headers::new(), t: t } } } impl HttpRequest for AssertMustHaveBodyRequest { type R = MockResponse; fn headers_mut(&mut self) -> &mut Headers { &mut self.headers } fn send(self, body: &Option<Vec<u8>>) -> Result<MockResponse, LoadError> { assert_eq!(self.expected_body, *body); response_for_request_type(self.t) } } fn expect_devtools_http_request(devtools_port: &Receiver<DevtoolsControlMsg>) -> DevtoolsHttpRequest { match devtools_port.recv().unwrap() { DevtoolsControlMsg::FromChrome( ChromeToDevtoolsControlMsg::NetworkEvent(_, net_event)) => { match net_event { NetworkEvent::HttpRequest(httprequest) => { httprequest }, _ => panic!("No HttpRequest Received"), } }, _ => panic!("No HttpRequest Received"), } } fn expect_devtools_http_response(devtools_port: &Receiver<DevtoolsControlMsg>) -> DevtoolsHttpResponse { match devtools_port.recv().unwrap() { DevtoolsControlMsg::FromChrome( ChromeToDevtoolsControlMsg::NetworkEvent(_, net_event_response)) => { match net_event_response { NetworkEvent::HttpResponse(httpresponse) => { httpresponse }, _ => panic!("No HttpResponse Received"), } }, _ => panic!("No HttpResponse Received"), } } #[test] fn test_check_default_headers_loaded_in_every_request() { let url = url!("http://mozilla.com"); let http_state = HttpState::new(); let mut load_data = LoadData::new(LoadContext::Browsing, url.clone(), None); load_data.data = None; load_data.method = Method::Get; let mut headers = Headers::new(); headers.set(AcceptEncoding(vec![qitem(Encoding::Gzip), qitem(Encoding::Deflate), qitem(Encoding::EncodingExt("br".to_owned()))])); headers.set(Host { hostname: "mozilla.com".to_owned() , port: None }); let accept = Accept(vec![ qitem(Mime(TopLevel::Text, SubLevel::Html, vec![])), qitem(Mime(TopLevel::Application, SubLevel::Ext("xhtml+xml".to_owned()), vec![])), QualityItem::new(Mime(TopLevel::Application, SubLevel::Xml, vec![]), Quality(900u16)), QualityItem::new(Mime(TopLevel::Star, SubLevel::Star, vec![]), Quality(800u16)), ]); headers.set(accept); headers.set(UserAgent(DEFAULT_USER_AGENT.to_owned())); // Testing for method.GET let _ = load::<AssertRequestMustHaveHeaders>(load_data.clone(), &http_state, None, &AssertMustHaveHeadersRequestFactory { expected_headers: headers.clone(), body: <[_]>::to_vec(&[]) }, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)); // Testing for method.POST load_data.method = Method::Post; headers.set(ContentLength(0 as u64)); let _ = load::<AssertRequestMustHaveHeaders>(load_data.clone(), &http_state, None, &AssertMustHaveHeadersRequestFactory { expected_headers: headers, body: <[_]>::to_vec(&[]) }, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)); } #[test] fn test_load_when_request_is_not_get_or_head_and_there_is_no_body_content_length_should_be_set_to_0() { let url = url!("http://mozilla.com"); let http_state = HttpState::new(); let mut load_data = LoadData::new(LoadContext::Browsing, url.clone(), None); load_data.data = None; load_data.method = Method::Post; let mut content_length = Headers::new(); content_length.set(ContentLength(0)); let _ = load::<AssertRequestMustIncludeHeaders>( load_data.clone(), &http_state, None, &AssertMustIncludeHeadersRequestFactory { expected_headers: content_length, body: <[_]>::to_vec(&[]) }, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)); } #[test] fn test_request_and_response_data_with_network_messages() { struct Factory; impl HttpRequestFactory for Factory { type R = MockRequest; fn create(&self, _: Url, _: Method) -> Result<MockRequest, LoadError> { let mut headers = Headers::new(); headers.set(Host { hostname: "foo.bar".to_owned(), port: None }); Ok(MockRequest::new( ResponseType::WithHeaders(<[_]>::to_vec("Yay!".as_bytes()), headers)) ) } } let http_state = HttpState::new(); let url = url!("https://mozilla.com"); let (devtools_chan, devtools_port) = mpsc::channel::<DevtoolsControlMsg>(); // This will probably have to be changed as it uses fake_root_pipeline_id which is marked for removal. let pipeline_id = PipelineId::fake_root_pipeline_id(); let mut load_data = LoadData::new(LoadContext::Browsing, url.clone(), Some(pipeline_id)); let mut request_headers = Headers::new(); request_headers.set(Host { hostname: "bar.foo".to_owned(), port: None }); load_data.headers = request_headers.clone(); let _ = load::<MockRequest>(load_data, &http_state, Some(devtools_chan), &Factory, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)); // notification received from devtools let devhttprequest = expect_devtools_http_request(&devtools_port); let devhttpresponse = expect_devtools_http_response(&devtools_port); //Creating default headers for request let mut headers = Headers::new(); headers.set(AcceptEncoding(vec![ qitem(Encoding::Gzip), qitem(Encoding::Deflate), qitem(Encoding::EncodingExt("br".to_owned())) ])); headers.set(Host { hostname: "mozilla.com".to_owned() , port: None }); let accept = Accept(vec![ qitem(Mime(TopLevel::Text, SubLevel::Html, vec![])), qitem(Mime(TopLevel::Application, SubLevel::Ext("xhtml+xml".to_owned()), vec![])), QualityItem::new(Mime(TopLevel::Application, SubLevel::Xml, vec![]), Quality(900u16)), QualityItem::new(Mime(TopLevel::Star, SubLevel::Star, vec![]), Quality(800u16)), ]); headers.set(accept); headers.set(UserAgent(DEFAULT_USER_AGENT.to_owned())); let httprequest = DevtoolsHttpRequest { url: url, method: Method::Get, headers: headers, body: None, pipeline_id: pipeline_id, startedDateTime: devhttprequest.startedDateTime }; let content = "Yay!"; let mut response_headers = Headers::new(); response_headers.set(ContentLength(content.len() as u64)); response_headers.set(Host { hostname: "foo.bar".to_owned(), port: None }); let httpresponse = DevtoolsHttpResponse { headers: Some(response_headers), status: Some(RawStatus(200, Cow::Borrowed("Ok"))), body: None, pipeline_id: pipeline_id, }; assert_eq!(devhttprequest, httprequest); assert_eq!(devhttpresponse, httpresponse); } #[test] fn test_request_and_response_message_from_devtool_without_pipeline_id() { struct Factory; impl HttpRequestFactory for Factory { type R = MockRequest; fn create(&self, _: Url, _: Method) -> Result<MockRequest, LoadError> { let mut headers = Headers::new(); headers.set(Host { hostname: "foo.bar".to_owned(), port: None }); Ok(MockRequest::new( ResponseType::WithHeaders(<[_]>::to_vec("Yay!".as_bytes()), headers)) ) } } let http_state = HttpState::new(); let url = url!("https://mozilla.com"); let (devtools_chan, devtools_port) = mpsc::channel::<DevtoolsControlMsg>(); let load_data = LoadData::new(LoadContext::Browsing, url.clone(), None); let _ = load::<MockRequest>(load_data, &http_state, Some(devtools_chan), &Factory, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)); // notification received from devtools assert!(devtools_port.try_recv().is_err()); } #[test] fn test_load_when_redirecting_from_a_post_should_rewrite_next_request_as_get() { struct Factory; impl HttpRequestFactory for Factory { type R = MockRequest; fn create(&self, url: Url, method: Method) -> Result<MockRequest, LoadError> { if url.domain().unwrap() == "mozilla.com" { assert_eq!(Method::Post, method); Ok(MockRequest::new(ResponseType::Redirect("http://mozilla.org".to_owned()))) } else { assert_eq!(Method::Get, method); Ok(MockRequest::new(ResponseType::Text(<[_]>::to_vec("Yay!".as_bytes())))) } } } let url = url!("http://mozilla.com"); let mut load_data = LoadData::new(LoadContext::Browsing, url.clone(), None); load_data.method = Method::Post; let http_state = HttpState::new(); let _ = load::<MockRequest>(load_data, &http_state, None, &Factory, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)); } #[test] fn test_load_should_decode_the_response_as_deflate_when_response_headers_have_content_encoding_deflate() { struct Factory; impl HttpRequestFactory for Factory { type R = MockRequest; fn create(&self, _: Url, _: Method) -> Result<MockRequest, LoadError> { let mut e = DeflateEncoder::new(Vec::new(), Compression::Default); e.write(b"Yay!").unwrap(); let encoded_content = e.finish().unwrap(); let mut headers = Headers::new(); headers.set(ContentEncoding(vec![Encoding::Deflate])); Ok(MockRequest::new(ResponseType::WithHeaders(encoded_content, headers))) } } let url = url!("http://mozilla.com"); let load_data = LoadData::new(LoadContext::Browsing, url.clone(), None); let http_state = HttpState::new(); let mut response = load::<MockRequest>( load_data, &http_state, None, &Factory, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)) .unwrap();<|fim▁hole|> #[test] fn test_load_should_decode_the_response_as_gzip_when_response_headers_have_content_encoding_gzip() { struct Factory; impl HttpRequestFactory for Factory { type R = MockRequest; fn create(&self, _: Url, _: Method) -> Result<MockRequest, LoadError> { let mut e = GzEncoder::new(Vec::new(), Compression::Default); e.write(b"Yay!").unwrap(); let encoded_content = e.finish().unwrap(); let mut headers = Headers::new(); headers.set(ContentEncoding(vec![Encoding::Gzip])); Ok(MockRequest::new(ResponseType::WithHeaders(encoded_content, headers))) } } let url = url!("http://mozilla.com"); let load_data = LoadData::new(LoadContext::Browsing, url.clone(), None); let http_state = HttpState::new(); let mut response = load::<MockRequest>( load_data, &http_state, None, &Factory, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)) .unwrap(); assert_eq!(read_response(&mut response), "Yay!"); } #[test] fn test_load_doesnt_send_request_body_on_any_redirect() { struct Factory; impl HttpRequestFactory for Factory { type R = AssertMustHaveBodyRequest; fn create(&self, url: Url, _: Method) -> Result<AssertMustHaveBodyRequest, LoadError> { if url.domain().unwrap() == "mozilla.com" { Ok( AssertMustHaveBodyRequest::new( ResponseType::Redirect("http://mozilla.org".to_owned()), Some(<[_]>::to_vec("Body on POST!".as_bytes())) ) ) } else { Ok( AssertMustHaveBodyRequest::new( ResponseType::Text(<[_]>::to_vec("Yay!".as_bytes())), None ) ) } } } let url = url!("http://mozilla.com"); let mut load_data = LoadData::new(LoadContext::Browsing, url.clone(), None); load_data.data = Some(<[_]>::to_vec("Body on POST!".as_bytes())); let http_state = HttpState::new(); let _ = load::<AssertMustHaveBodyRequest>( load_data, &http_state, None, &Factory, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)); } #[test] fn test_load_doesnt_add_host_to_sts_list_when_url_is_http_even_if_sts_headers_are_present() { struct Factory; impl HttpRequestFactory for Factory { type R = MockRequest; fn create(&self, _: Url, _: Method) -> Result<MockRequest, LoadError> { let content = <[_]>::to_vec("Yay!".as_bytes()); let mut headers = Headers::new(); headers.set(StrictTransportSecurity::excluding_subdomains(31536000)); Ok(MockRequest::new(ResponseType::WithHeaders(content, headers))) } } let url = url!("http://mozilla.com"); let load_data = LoadData::new(LoadContext::Browsing, url.clone(), None); let http_state = HttpState::new(); let _ = load::<MockRequest>(load_data, &http_state, None, &Factory, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)); assert_eq!(http_state.hsts_list.read().unwrap().is_host_secure("mozilla.com"), false); } #[test] fn test_load_adds_host_to_sts_list_when_url_is_https_and_sts_headers_are_present() { struct Factory; impl HttpRequestFactory for Factory { type R = MockRequest; fn create(&self, _: Url, _: Method) -> Result<MockRequest, LoadError> { let content = <[_]>::to_vec("Yay!".as_bytes()); let mut headers = Headers::new(); headers.set(StrictTransportSecurity::excluding_subdomains(31536000)); Ok(MockRequest::new(ResponseType::WithHeaders(content, headers))) } } let url = url!("https://mozilla.com"); let load_data = LoadData::new(LoadContext::Browsing, url.clone(), None); let http_state = HttpState::new(); let _ = load::<MockRequest>(load_data, &http_state, None, &Factory, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)); assert!(http_state.hsts_list.read().unwrap().is_host_secure("mozilla.com")); } #[test] fn test_load_sets_cookies_in_the_resource_manager_when_it_get_set_cookie_header_in_response() { struct Factory; impl HttpRequestFactory for Factory { type R = MockRequest; fn create(&self, _: Url, _: Method) -> Result<MockRequest, LoadError> { let content = <[_]>::to_vec("Yay!".as_bytes()); let mut headers = Headers::new(); headers.set(SetCookie(vec![CookiePair::new("mozillaIs".to_owned(), "theBest".to_owned())])); Ok(MockRequest::new(ResponseType::WithHeaders(content, headers))) } } let url = url!("http://mozilla.com"); let http_state = HttpState::new(); assert_cookie_for_domain(http_state.cookie_jar.clone(), "http://mozilla.com", ""); let load_data = LoadData::new(LoadContext::Browsing, url.clone(), None); let _ = load::<MockRequest>(load_data, &http_state, None, &Factory, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)); assert_cookie_for_domain(http_state.cookie_jar.clone(), "http://mozilla.com", "mozillaIs=theBest"); } #[test] fn test_load_sets_requests_cookies_header_for_url_by_getting_cookies_from_the_resource_manager() { let url = url!("http://mozilla.com"); let mut load_data = LoadData::new(LoadContext::Browsing, url.clone(), None); load_data.data = Some(<[_]>::to_vec("Yay!".as_bytes())); let http_state = HttpState::new(); { let mut cookie_jar = http_state.cookie_jar.write().unwrap(); let cookie_url = url.clone(); let cookie = Cookie::new_wrapped( CookiePair::new("mozillaIs".to_owned(), "theBest".to_owned()), &cookie_url, CookieSource::HTTP ).unwrap(); cookie_jar.push(cookie, CookieSource::HTTP); } let mut cookie = Headers::new(); cookie.set(CookieHeader(vec![CookiePair::new("mozillaIs".to_owned(), "theBest".to_owned())])); let _ = load::<AssertRequestMustIncludeHeaders>(load_data.clone(), &http_state, None, &AssertMustIncludeHeadersRequestFactory { expected_headers: cookie, body: <[_]>::to_vec(&*load_data.data.unwrap()) }, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)); } #[test] fn test_load_sends_secure_cookie_if_http_changed_to_https_due_to_entry_in_hsts_store() { let url = url!("http://mozilla.com"); let secured_url = url!("https://mozilla.com"); let http_state = HttpState::new(); { let mut hsts_list = http_state.hsts_list.write().unwrap(); let entry = HSTSEntry::new( "mozilla.com".to_owned(), IncludeSubdomains::Included, Some(1000000) ).unwrap(); hsts_list.push(entry); } { let mut cookie_jar = http_state.cookie_jar.write().unwrap(); let cookie_url = secured_url.clone(); let mut cookie_pair = CookiePair::new("mozillaIs".to_owned(), "theBest".to_owned()); cookie_pair.secure = true; let cookie = Cookie::new_wrapped( cookie_pair, &cookie_url, CookieSource::HTTP ).unwrap(); cookie_jar.push(cookie, CookieSource::HTTP); } let mut load_data = LoadData::new(LoadContext::Browsing, url, None); load_data.data = Some(<[_]>::to_vec("Yay!".as_bytes())); let mut headers = Headers::new(); headers.set_raw("Cookie".to_owned(), vec![<[_]>::to_vec("mozillaIs=theBest".as_bytes())]); let _ = load::<AssertRequestMustIncludeHeaders>( load_data.clone(), &http_state, None, &AssertMustIncludeHeadersRequestFactory { expected_headers: headers, body: <[_]>::to_vec(&*load_data.data.unwrap()) }, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)); } #[test] fn test_load_sends_cookie_if_nonhttp() { let url = url!("http://mozilla.com"); let http_state = HttpState::new(); { let mut cookie_jar = http_state.cookie_jar.write().unwrap(); let cookie_url = url.clone(); let cookie = Cookie::new_wrapped( CookiePair::new("mozillaIs".to_owned(), "theBest".to_owned()), &cookie_url, CookieSource::NonHTTP ).unwrap(); cookie_jar.push(cookie, CookieSource::HTTP); } let mut load_data = LoadData::new(LoadContext::Browsing, url, None); load_data.data = Some(<[_]>::to_vec("Yay!".as_bytes())); let mut headers = Headers::new(); headers.set_raw("Cookie".to_owned(), vec![<[_]>::to_vec("mozillaIs=theBest".as_bytes())]); let _ = load::<AssertRequestMustIncludeHeaders>( load_data.clone(), &http_state, None, &AssertMustIncludeHeadersRequestFactory { expected_headers: headers, body: <[_]>::to_vec(&*load_data.data.unwrap()) }, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)); } #[test] fn test_cookie_set_with_httponly_should_not_be_available_using_getcookiesforurl() { struct Factory; impl HttpRequestFactory for Factory { type R = MockRequest; fn create(&self, _: Url, _: Method) -> Result<MockRequest, LoadError> { let content = <[_]>::to_vec("Yay!".as_bytes()); let mut headers = Headers::new(); headers.set_raw("set-cookie", vec![b"mozillaIs=theBest; HttpOnly;".to_vec()]); Ok(MockRequest::new(ResponseType::WithHeaders(content, headers))) } } let url = url!("http://mozilla.com"); let http_state = HttpState::new(); let load_data = LoadData::new(LoadContext::Browsing, url.clone(), None); let _ = load::<MockRequest>(load_data, &http_state, None, &Factory, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)); let mut cookie_jar = http_state.cookie_jar.write().unwrap(); assert!(cookie_jar.cookies_for_url(&url, CookieSource::NonHTTP).is_none()); } #[test] fn test_when_cookie_received_marked_secure_is_ignored_for_http() { struct Factory; impl HttpRequestFactory for Factory { type R = MockRequest; fn create(&self, _: Url, _: Method) -> Result<MockRequest, LoadError> { let content = <[_]>::to_vec("Yay!".as_bytes()); let mut headers = Headers::new(); headers.set_raw("set-cookie", vec![b"mozillaIs=theBest; Secure;".to_vec()]); Ok(MockRequest::new(ResponseType::WithHeaders(content, headers))) } } let http_state = HttpState::new(); let load_data = LoadData::new(LoadContext::Browsing, url!("http://mozilla.com"), None); let _ = load::<MockRequest>(load_data, &http_state, None, &Factory, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)); assert_cookie_for_domain(http_state.cookie_jar.clone(), "http://mozilla.com", ""); } #[test] fn test_when_cookie_set_marked_httpsonly_secure_isnt_sent_on_http_request() { let sec_url = url!("https://mozilla.com"); let url = url!("http://mozilla.com"); let http_state = HttpState::new(); { let mut cookie_jar = http_state.cookie_jar.write().unwrap(); let cookie_url = sec_url.clone(); let cookie = Cookie::new_wrapped( CookiePair::parse("mozillaIs=theBest; Secure;").unwrap(), &cookie_url, CookieSource::HTTP ).unwrap(); cookie_jar.push(cookie, CookieSource::HTTP); } let mut load_data = LoadData::new(LoadContext::Browsing, url, None); load_data.data = Some(<[_]>::to_vec("Yay!".as_bytes())); assert_cookie_for_domain(http_state.cookie_jar.clone(), "https://mozilla.com", "mozillaIs=theBest"); let _ = load::<AssertRequestMustNotIncludeHeaders>( load_data.clone(), &http_state, None, &AssertMustNotIncludeHeadersRequestFactory { headers_not_expected: vec!["Cookie".to_owned()], body: <[_]>::to_vec(&*load_data.data.unwrap()) }, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)); } #[test] fn test_load_sets_content_length_to_length_of_request_body() { let content = "This is a request body"; let url = url!("http://mozilla.com"); let mut load_data = LoadData::new(LoadContext::Browsing, url.clone(), None); load_data.data = Some(<[_]>::to_vec(content.as_bytes())); let mut content_len_headers = Headers::new(); content_len_headers.set(ContentLength(content.as_bytes().len() as u64)); let http_state = HttpState::new(); let _ = load::<AssertRequestMustIncludeHeaders>(load_data.clone(), &http_state, None, &AssertMustIncludeHeadersRequestFactory { expected_headers: content_len_headers, body: <[_]>::to_vec(&*load_data.data.unwrap()) }, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)); } #[test] fn test_load_uses_explicit_accept_from_headers_in_load_data() { let text_html = qitem(Mime(TopLevel::Text, SubLevel::Html, vec![])); let mut accept_headers = Headers::new(); accept_headers.set(Accept(vec![text_html.clone()])); let url = url!("http://mozilla.com"); let mut load_data = LoadData::new(LoadContext::Browsing, url.clone(), None); load_data.data = Some(<[_]>::to_vec("Yay!".as_bytes())); load_data.headers.set(Accept(vec![text_html.clone()])); let http_state = HttpState::new(); let _ = load::<AssertRequestMustIncludeHeaders>(load_data, &http_state, None, &AssertMustIncludeHeadersRequestFactory { expected_headers: accept_headers, body: <[_]>::to_vec("Yay!".as_bytes()) }, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)); } #[test] fn test_load_sets_default_accept_to_html_xhtml_xml_and_then_anything_else() { let mut accept_headers = Headers::new(); accept_headers.set(Accept(vec![ qitem(Mime(TopLevel::Text, SubLevel::Html, vec![])), qitem(Mime(TopLevel::Application, SubLevel::Ext("xhtml+xml".to_owned()), vec![])), QualityItem::new(Mime(TopLevel::Application, SubLevel::Xml, vec![]), Quality(900)), QualityItem::new(Mime(TopLevel::Star, SubLevel::Star, vec![]), Quality(800)), ])); let url = url!("http://mozilla.com"); let mut load_data = LoadData::new(LoadContext::Browsing, url.clone(), None); load_data.data = Some(<[_]>::to_vec("Yay!".as_bytes())); let http_state = HttpState::new(); let _ = load::<AssertRequestMustIncludeHeaders>(load_data, &http_state, None, &AssertMustIncludeHeadersRequestFactory { expected_headers: accept_headers, body: <[_]>::to_vec("Yay!".as_bytes()) }, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)); } #[test] fn test_load_uses_explicit_accept_encoding_from_load_data_headers() { let mut accept_encoding_headers = Headers::new(); accept_encoding_headers.set(AcceptEncoding(vec![qitem(Encoding::Chunked)])); let url = url!("http://mozilla.com"); let mut load_data = LoadData::new(LoadContext::Browsing, url.clone(), None); load_data.data = Some(<[_]>::to_vec("Yay!".as_bytes())); load_data.headers.set(AcceptEncoding(vec![qitem(Encoding::Chunked)])); let http_state = HttpState::new(); let _ = load::<AssertRequestMustIncludeHeaders>(load_data, &http_state, None, &AssertMustIncludeHeadersRequestFactory { expected_headers: accept_encoding_headers, body: <[_]>::to_vec("Yay!".as_bytes()) }, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)); } #[test] fn test_load_sets_default_accept_encoding_to_gzip_and_deflate() { let mut accept_encoding_headers = Headers::new(); accept_encoding_headers.set(AcceptEncoding(vec![qitem(Encoding::Gzip), qitem(Encoding::Deflate), qitem(Encoding::EncodingExt("br".to_owned()))])); let url = url!("http://mozilla.com"); let mut load_data = LoadData::new(LoadContext::Browsing, url.clone(), None); load_data.data = Some(<[_]>::to_vec("Yay!".as_bytes())); let http_state = HttpState::new(); let _ = load::<AssertRequestMustIncludeHeaders>(load_data, &http_state, None, &AssertMustIncludeHeadersRequestFactory { expected_headers: accept_encoding_headers, body: <[_]>::to_vec("Yay!".as_bytes()) }, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)); } #[test] fn test_load_errors_when_there_a_redirect_loop() { struct Factory; impl HttpRequestFactory for Factory { type R = MockRequest; fn create(&self, url: Url, _: Method) -> Result<MockRequest, LoadError> { if url.domain().unwrap() == "mozilla.com" { Ok(MockRequest::new(ResponseType::Redirect("http://mozilla.org".to_owned()))) } else if url.domain().unwrap() == "mozilla.org" { Ok(MockRequest::new(ResponseType::Redirect("http://mozilla.com".to_owned()))) } else { panic!("unexpected host {:?}", url) } } } let url = url!("http://mozilla.com"); let load_data = LoadData::new(LoadContext::Browsing, url.clone(), None); let http_state = HttpState::new(); match load::<MockRequest>(load_data, &http_state, None, &Factory, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)) { Err(LoadError::InvalidRedirect(_, msg)) => { assert_eq!(msg, "redirect loop"); }, _ => panic!("expected max redirects to fail") } } #[test] fn test_load_errors_when_there_is_too_many_redirects() { struct Factory; impl HttpRequestFactory for Factory { type R = MockRequest; fn create(&self, url: Url, _: Method) -> Result<MockRequest, LoadError> { if url.domain().unwrap() == "mozilla.com" { Ok(MockRequest::new(ResponseType::Redirect(format!("{}/1", url.serialize())))) } else { panic!("unexpected host {:?}", url) } } } let url = url!("http://mozilla.com"); let load_data = LoadData::new(LoadContext::Browsing, url.clone(), None); let http_state = HttpState::new(); match load::<MockRequest>(load_data, &http_state, None, &Factory, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)) { Err(LoadError::MaxRedirects(url)) => { assert_eq!(url.domain().unwrap(), "mozilla.com") }, _ => panic!("expected max redirects to fail") } } #[test] fn test_load_follows_a_redirect() { struct Factory; impl HttpRequestFactory for Factory { type R = MockRequest; fn create(&self, url: Url, _: Method) -> Result<MockRequest, LoadError> { if url.domain().unwrap() == "mozilla.com" { Ok(MockRequest::new(ResponseType::Redirect("http://mozilla.org".to_owned()))) } else if url.domain().unwrap() == "mozilla.org" { Ok( MockRequest::new( ResponseType::Text( <[_]>::to_vec("Yay!".as_bytes()) ) ) ) } else { panic!("unexpected host {:?}", url) } } } let url = url!("http://mozilla.com"); let load_data = LoadData::new(LoadContext::Browsing, url.clone(), None); let http_state = HttpState::new(); match load::<MockRequest>(load_data, &http_state, None, &Factory, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)) { Err(e) => panic!("expected to follow a redirect {:?}", e), Ok(mut lr) => { let response = read_response(&mut lr); assert_eq!(response, "Yay!".to_owned()); } } } struct DontConnectFactory; impl HttpRequestFactory for DontConnectFactory { type R = MockRequest; fn create(&self, url: Url, _: Method) -> Result<MockRequest, LoadError> { Err(LoadError::Connection(url, "should not have connected".to_owned())) } } #[test] fn test_load_errors_when_scheme_is_not_http_or_https() { let url = url!("ftp://not-supported"); let load_data = LoadData::new(LoadContext::Browsing, url.clone(), None); let http_state = HttpState::new(); match load::<MockRequest>(load_data, &http_state, None, &DontConnectFactory, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)) { Err(LoadError::UnsupportedScheme(_)) => {} _ => panic!("expected ftp scheme to be unsupported") } } #[test] fn test_load_errors_when_viewing_source_and_inner_url_scheme_is_not_http_or_https() { let url = url!("view-source:ftp://not-supported"); let load_data = LoadData::new(LoadContext::Browsing, url.clone(), None); let http_state = HttpState::new(); match load::<MockRequest>(load_data, &http_state, None, &DontConnectFactory, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)) { Err(LoadError::UnsupportedScheme(_)) => {} _ => panic!("expected ftp scheme to be unsupported") } } #[test] fn test_load_errors_when_cancelled() { use ipc_channel::ipc; use net::resource_thread::CancellableResource; use net_traits::ResourceId; struct Factory; impl HttpRequestFactory for Factory { type R = MockRequest; fn create(&self, _: Url, _: Method) -> Result<MockRequest, LoadError> { let mut headers = Headers::new(); headers.set(Host { hostname: "Kaboom!".to_owned(), port: None }); Ok(MockRequest::new( ResponseType::WithHeaders(<[_]>::to_vec("BOOM!".as_bytes()), headers)) ) } } let (id_sender, _id_receiver) = ipc::channel().unwrap(); let (cancel_sender, cancel_receiver) = mpsc::channel(); let cancel_resource = CancellableResource::new(cancel_receiver, ResourceId(0), id_sender); let cancel_listener = CancellationListener::new(Some(cancel_resource)); cancel_sender.send(()).unwrap(); let url = url!("https://mozilla.com"); let load_data = LoadData::new(LoadContext::Browsing, url.clone(), None); let http_state = HttpState::new(); match load::<MockRequest>(load_data, &http_state, None, &Factory, DEFAULT_USER_AGENT.to_owned(), &cancel_listener) { Err(LoadError::Cancelled(_, _)) => (), _ => panic!("expected load cancelled error!") } } #[test] fn test_redirect_from_x_to_y_provides_y_cookies_from_y() { let url_x = url!("http://mozilla.com"); let url_y = url!("http://mozilla.org"); struct Factory; impl HttpRequestFactory for Factory { type R = AssertRequestMustIncludeHeaders; fn create(&self, url: Url, _: Method) -> Result<AssertRequestMustIncludeHeaders, LoadError> { if url.domain().unwrap() == "mozilla.com" { let mut expected_headers_x = Headers::new(); expected_headers_x.set_raw("Cookie".to_owned(), vec![<[_]>::to_vec("mozillaIsNot=dotCom".as_bytes())]); Ok(AssertRequestMustIncludeHeaders::new( ResponseType::Redirect("http://mozilla.org".to_owned()), Some(expected_headers_x))) } else if url.domain().unwrap() == "mozilla.org" { let mut expected_headers_y = Headers::new(); expected_headers_y.set_raw( "Cookie".to_owned(), vec![<[_]>::to_vec("mozillaIs=theBest".as_bytes())]); Ok(AssertRequestMustIncludeHeaders::new( ResponseType::Text(<[_]>::to_vec("Yay!".as_bytes())), Some(expected_headers_y))) } else { panic!("unexpected host {:?}", url) } } } let load_data = LoadData::new(LoadContext::Browsing, url_x.clone(), None); let http_state = HttpState::new(); { let mut cookie_jar = http_state.cookie_jar.write().unwrap(); let cookie_x_url = url_x.clone(); let cookie_x = Cookie::new_wrapped( CookiePair::new("mozillaIsNot".to_owned(), "dotCom".to_owned()), &cookie_x_url, CookieSource::HTTP ).unwrap(); cookie_jar.push(cookie_x, CookieSource::HTTP); let cookie_y_url = url_y.clone(); let cookie_y = Cookie::new_wrapped( CookiePair::new("mozillaIs".to_owned(), "theBest".to_owned()), &cookie_y_url, CookieSource::HTTP ).unwrap(); cookie_jar.push(cookie_y, CookieSource::HTTP); } match load::<AssertRequestMustIncludeHeaders>(load_data, &http_state, None, &Factory, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)) { Err(e) => panic!("expected to follow a redirect {:?}", e), Ok(mut lr) => { let response = read_response(&mut lr); assert_eq!(response, "Yay!".to_owned()); } } } #[test] fn test_redirect_from_x_to_x_provides_x_with_cookie_from_first_response() { let url = url!("http://mozilla.org/initial/"); struct Factory; impl HttpRequestFactory for Factory { type R = AssertRequestMustIncludeHeaders; fn create(&self, url: Url, _: Method) -> Result<AssertRequestMustIncludeHeaders, LoadError> { if url.path().unwrap()[0] == "initial" { let mut initial_answer_headers = Headers::new(); initial_answer_headers.set_raw("set-cookie", vec![b"mozillaIs=theBest; path=/;".to_vec()]); Ok(AssertRequestMustIncludeHeaders::new( ResponseType::RedirectWithHeaders("http://mozilla.org/subsequent/".to_owned(), initial_answer_headers), None)) } else if url.path().unwrap()[0] == "subsequent" { let mut expected_subsequent_headers = Headers::new(); expected_subsequent_headers.set_raw("Cookie", vec![b"mozillaIs=theBest".to_vec()]); Ok(AssertRequestMustIncludeHeaders::new( ResponseType::Text(b"Yay!".to_vec()), Some(expected_subsequent_headers))) } else { panic!("unexpected host {:?}", url) } } } let load_data = LoadData::new(LoadContext::Browsing, url.clone(), None); let http_state = HttpState::new(); match load::<AssertRequestMustIncludeHeaders>(load_data, &http_state, None, &Factory, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)) { Err(e) => panic!("expected to follow a redirect {:?}", e), Ok(mut lr) => { let response = read_response(&mut lr); assert_eq!(response, "Yay!".to_owned()); } } } #[test] fn test_if_auth_creds_not_in_url_but_in_cache_it_sets_it() { let url = url!("http://mozilla.com"); let http_state = HttpState::new(); let auth_entry = AuthCacheEntry { user_name: "username".to_owned(), password: "test".to_owned(), }; http_state.auth_cache.write().unwrap().insert(url.clone(), auth_entry); let mut load_data = LoadData::new(LoadContext::Browsing, url, None); load_data.credentials_flag = true; let mut auth_header = Headers::new(); auth_header.set( Authorization( Basic { username: "username".to_owned(), password: Some("test".to_owned()) } ) ); let _ = load::<AssertRequestMustIncludeHeaders>( load_data.clone(), &http_state, None, &AssertMustIncludeHeadersRequestFactory { expected_headers: auth_header, body: <[_]>::to_vec(&[]) }, DEFAULT_USER_AGENT.to_owned(), &CancellationListener::new(None)); }<|fim▁end|>
assert_eq!(read_response(&mut response), "Yay!"); }
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- #------------------------------------------------------------------------------ # file: $Id$ # auth: metagriffin <[email protected]> # date: 2012/04/20 # copy: (C) Copyright 2012-EOT metagriffin -- see LICENSE.txt #------------------------------------------------------------------------------ # This software is free software: you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This software is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see http://www.gnu.org/licenses/. #------------------------------------------------------------------------------ <|fim▁hole|>#------------------------------------------------------------------------------ # end of $Id$ #------------------------------------------------------------------------------<|fim▁end|>
from .tracker import * from .merger import *
<|file_name|>exceptions.py<|end_file_name|><|fim▁begin|>from django.contrib import messages from django.core.exceptions import PermissionDenied class UploadPermissionDenied(PermissionDenied):<|fim▁hole|> def __init__(self, request, log_func, error_message, *args, **kwargs): log_func(error_message) messages.error(request, error_message) super(UploadPermissionDenied, self).__init__(*args, **kwargs)<|fim▁end|>
<|file_name|>server.py<|end_file_name|><|fim▁begin|># coding=UTF-8 from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals #import tornado from tornado import ioloop , web , httpserver , websocket , options #import handler function import handler import os #set server settings server_settings = { "static_path": os.path.join(os.path.dirname(__file__), "static"), "xsrf_cookies": True, "autoreload": True, #"login_url": "/accounts/login", "debug":True, "template_path":os.path.join(os.path.dirname(__file__),"templates"), } #the handlers list handlers=[ (r"/?",handler.MainHandler), (r"/upload",handler.WavFileHandler) ] options.define("port", default=8080, help="the application will be run on the given port", type=int) <|fim▁hole|>if __name__ == "__main__": options.parse_command_line() app_server = httpserver.HTTPServer(web.Application(handlers,**server_settings)) app_server.listen(options.options.port) ioloop.IOLoop.current().start()<|fim▁end|>
<|file_name|>Basis.py<|end_file_name|><|fim▁begin|>import numpy as np class Basis(object): __resolution = 1E-9 __eqdecimals = 9 # how many decimals to round off to for relative coordinates def __init__(self, basis=None, transform=None): if basis is not None: self.origin = basis.origin.copy() if transform is not None: self.matrix = np.asmatrix(transform) * basis.matrix else: self.matrix = basis.matrix.copy() else: self.origin = np.matrix([[0,0,0]], dtype='float64') self.matrix = np.matrix([[1,0,0],[0,1,0],[0,0,1]], dtype='float64') @staticmethod def using(origin_abs, ei, ej, ek): basis = Basis() basis.origin = np.matrix(origin_abs, dtype='float64') basis.matrix = np.matrix([ei, ej, ek], dtype='float64') return basis @staticmethod def set_resolution(decimals): Basis.__eqdecimals = decimals Basis.__resolution = 10**(-decimals) @staticmethod def zero_if_negligible(value): if (value > -Basis.__resolution) and (value < Basis.__resolution): return 0 return value @staticmethod def is_positive(value): return value > -Basis.__resolution @staticmethod def is_negative(value): return value < Basis.__resolution @staticmethod def is_strictly_positive(value): return value > Basis.__resolution @staticmethod def is_strictly_negative(value): return value < -Basis.__resolution def rel_to_abs(self, coord_rel): coord_rel = np.asarray(coord_rel) mat_rel = np.asarray(self.origin + np.asmatrix(coord_rel) * self.matrix) if coord_rel.shape == (3,): return mat_rel[0] return mat_rel def abs_to_rel(self, coord_abs): mat_abs = (np.asmatrix(coord_abs) - self.origin) * self.matrix.getT() #if Basis.__eqdecimals > 0: # mat_abs = np.around(mat_abs, decimals=Basis.__eqdecimals) if coord_abs.shape == (3,): return np.asarray(mat_abs)[0] return np.asarray(mat_abs) def e_i(self): return np.asarray(self.matrix[0,:])[0] def e_j(self): return np.asarray(self.matrix[1,:])[0]<|fim▁hole|> @staticmethod def separation(coord_1, coord_2): # distance between two points; can be abs or rel, so long as the bases are the same dp = np.linalg.norm(coord_2 - coord_1) if Basis.__resolution > 0: if dp < 2 * Basis.__resolution: dp = 0 return dp def offset(self, offset_origin_rel): basis = Basis(self) basis.origin = self.rel_to_abs(offset_origin_rel) return basis def jki(self, offset_origin_rel=None): basis = Basis(self, [[0,1,0],[0,0,1],[1,0,0]]) if offset_origin_rel is not None: basis.origin = self.rel_to_abs(offset_origin_rel) return basis def rotate_i(self, angle, offset_origin_rel=None): # angle [degrees] anti-clockwise rotation of self about self.i; optionally offset the origin if angle == 90: basis = Basis(self, [[1,0,0],[0,0,1],[0,-1,0]]) elif angle == 180: basis = Basis(self, [[1,0,0],[0,-1,0],[0,0,-1]]) elif angle == 270: basis = Basis(self, [[1,0,0],[0,0,-1],[0,1,0]]) elif angle: sin_theta = np.sin(angle * np.pi / 180) cos_theta = np.cos(angle * np.pi / 180) basis = Basis(self, [[1,0,0],[0,cos_theta,sin_theta],[0,-sin_theta,cos_theta]]) else: basis = Basis(self) if offset_origin_rel is not None: basis.origin = self.rel_to_abs(offset_origin_rel) return basis def rotate_j(self, angle, offset_origin_rel=None): # angle [degrees] anti-clockwise rotation of self about self.j; optionally offset the origin if angle == 90: basis = Basis(self, [[0,0,-1],[0,1,0],[1,0,0]]) elif angle == 180: basis = Basis(self, [[-1,0,0],[0,1,0],[0,0,-1]]) elif angle == 270: basis = Basis(self, [[0,0,1],[0,1,0],[-1,0,0]]) elif angle: sin_theta = np.sin(angle * np.pi / 180) cos_theta = np.cos(angle * np.pi / 180) basis = Basis(self, [[cos_theta,0,-sin_theta],[0,1,0],[sin_theta,0,cos_theta]]) else: basis = Basis(self) if offset_origin_rel is not None: basis.origin = self.rel_to_abs(offset_origin_rel) return basis def rotate_k(self, angle, offset_origin_rel=None): # angle [degrees] anti-clockwise rotation of self about self.k; optionally offset the origin if angle == 90: basis = Basis(self, [[0,1,0],[-1,0,0],[0,0,1]]) elif angle == 180: basis = Basis(self, [[-1,0,0],[0,-1,0],[0,0,1]]) elif angle == 270: basis = Basis(self, [[0,-1,0],[1,0,0],[0,0,1]]) elif angle: sin_theta = np.sin(angle * np.pi / 180) cos_theta = np.cos(angle * np.pi / 180) basis = Basis(self, [[cos_theta,sin_theta,0],[-sin_theta,cos_theta,0],[0,0,1]]) else: basis = Basis(self) if offset_origin_rel is not None: basis.origin = self.rel_to_abs(offset_origin_rel) return basis<|fim▁end|>
def e_k(self): return np.asarray(self.matrix[2,:])[0]
<|file_name|>test_variable.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the<|fim▁hole|> from cryptography.fernet import Fernet from airflow import settings from airflow.models import Variable, crypto from tests.test_utils.config import conf_vars class TestVariable(unittest.TestCase): def setUp(self): crypto._fernet = None def tearDown(self): crypto._fernet = None @conf_vars({('core', 'fernet_key'): ''}) def test_variable_no_encryption(self): """ Test variables without encryption """ Variable.set('key', 'value') session = settings.Session() test_var = session.query(Variable).filter(Variable.key == 'key').one() self.assertFalse(test_var.is_encrypted) self.assertEqual(test_var.val, 'value') @conf_vars({('core', 'fernet_key'): Fernet.generate_key().decode()}) def test_variable_with_encryption(self): """ Test variables with encryption """ Variable.set('key', 'value') session = settings.Session() test_var = session.query(Variable).filter(Variable.key == 'key').one() self.assertTrue(test_var.is_encrypted) self.assertEqual(test_var.val, 'value') def test_var_with_encryption_rotate_fernet_key(self): """ Tests rotating encrypted variables. """ key1 = Fernet.generate_key() key2 = Fernet.generate_key() with conf_vars({('core', 'fernet_key'): key1.decode()}): Variable.set('key', 'value') session = settings.Session() test_var = session.query(Variable).filter(Variable.key == 'key').one() self.assertTrue(test_var.is_encrypted) self.assertEqual(test_var.val, 'value') self.assertEqual(Fernet(key1).decrypt(test_var._val.encode()), b'value') # Test decrypt of old value with new key with conf_vars({('core', 'fernet_key'): ','.join([key2.decode(), key1.decode()])}): crypto._fernet = None self.assertEqual(test_var.val, 'value') # Test decrypt of new value with new key test_var.rotate_fernet_key() self.assertTrue(test_var.is_encrypted) self.assertEqual(test_var.val, 'value') self.assertEqual(Fernet(key2).decrypt(test_var._val.encode()), b'value')<|fim▁end|>
# specific language governing permissions and limitations # under the License. import unittest
<|file_name|>rackspace.go<|end_file_name|><|fim▁begin|>/* Copyright 2014 The Kubernetes Authors All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package rackspace import ( "errors" "fmt" "io" "net" "regexp" "time" "github.com/rackspace/gophercloud" osservers "github.com/rackspace/gophercloud/openstack/compute/v2/servers" "github.com/rackspace/gophercloud/pagination" "github.com/rackspace/gophercloud/rackspace" "github.com/rackspace/gophercloud/rackspace/compute/v2/servers" "github.com/scalingdata/gcfg" "github.com/golang/glog" "k8s.io/kubernetes/pkg/api" "k8s.io/kubernetes/pkg/cloudprovider" ) const ProviderName = "rackspace" var ErrNotFound = errors.New("Failed to find object") var ErrMultipleResults = errors.New("Multiple results where only one expected") var ErrNoAddressFound = errors.New("No address found for host") var ErrAttrNotFound = errors.New("Expected attribute not found") // encoding.TextUnmarshaler interface for time.Duration type MyDuration struct { time.Duration } func (d *MyDuration) UnmarshalText(text []byte) error { res, err := time.ParseDuration(string(text)) if err != nil { return err } d.Duration = res return nil } type LoadBalancerOpts struct { SubnetId string `gcfg:"subnet-id"` // required CreateMonitor bool `gcfg:"create-monitor"` MonitorDelay MyDuration `gcfg:"monitor-delay"` MonitorTimeout MyDuration `gcfg:"monitor-timeout"` MonitorMaxRetries uint `gcfg:"monitor-max-retries"` } // Rackspace is an implementation of cloud provider Interface for Rackspace. type Rackspace struct { provider *gophercloud.ProviderClient region string lbOpts LoadBalancerOpts } <|fim▁hole|> Global struct { AuthUrl string `gcfg:"auth-url"` Username string UserId string `gcfg:"user-id"` Password string ApiKey string `gcfg:"api-key"` TenantId string `gcfg:"tenant-id"` TenantName string `gcfg:"tenant-name"` DomainId string `gcfg:"domain-id"` DomainName string `gcfg:"domain-name"` Region string } LoadBalancer LoadBalancerOpts } func init() { cloudprovider.RegisterCloudProvider(ProviderName, func(config io.Reader) (cloudprovider.Interface, error) { cfg, err := readConfig(config) if err != nil { return nil, err } return newRackspace(cfg) }) } func (cfg Config) toAuthOptions() gophercloud.AuthOptions { return gophercloud.AuthOptions{ IdentityEndpoint: cfg.Global.AuthUrl, Username: cfg.Global.Username, UserID: cfg.Global.UserId, Password: cfg.Global.Password, APIKey: cfg.Global.ApiKey, TenantID: cfg.Global.TenantId, TenantName: cfg.Global.TenantName, // Persistent service, so we need to be able to renew tokens AllowReauth: true, } } func readConfig(config io.Reader) (Config, error) { if config == nil { err := fmt.Errorf("no Rackspace cloud provider config file given") return Config{}, err } var cfg Config err := gcfg.ReadInto(&cfg, config) return cfg, err } func newRackspace(cfg Config) (*Rackspace, error) { provider, err := rackspace.AuthenticatedClient(cfg.toAuthOptions()) if err != nil { return nil, err } os := Rackspace{ provider: provider, region: cfg.Global.Region, lbOpts: cfg.LoadBalancer, } return &os, nil } type Instances struct { compute *gophercloud.ServiceClient } // Instances returns an implementation of Instances for Rackspace. func (os *Rackspace) Instances() (cloudprovider.Instances, bool) { glog.V(2).Info("rackspace.Instances() called") compute, err := rackspace.NewComputeV2(os.provider, gophercloud.EndpointOpts{ Region: os.region, }) if err != nil { glog.Warningf("Failed to find compute endpoint: %v", err) return nil, false } glog.V(1).Info("Claiming to support Instances") return &Instances{compute}, true } func (i *Instances) List(name_filter string) ([]string, error) { glog.V(2).Infof("rackspace List(%v) called", name_filter) opts := osservers.ListOpts{ Name: name_filter, Status: "ACTIVE", } pager := servers.List(i.compute, opts) ret := make([]string, 0) err := pager.EachPage(func(page pagination.Page) (bool, error) { sList, err := servers.ExtractServers(page) if err != nil { return false, err } for _, server := range sList { ret = append(ret, server.Name) } return true, nil }) if err != nil { return nil, err } glog.V(2).Infof("Found %v entries: %v", len(ret), ret) return ret, nil } func serverHasAddress(srv osservers.Server, ip string) bool { if ip == firstAddr(srv.Addresses["private"]) { return true } if ip == firstAddr(srv.Addresses["public"]) { return true } if ip == srv.AccessIPv4 { return true } if ip == srv.AccessIPv6 { return true } return false } func getServerByAddress(client *gophercloud.ServiceClient, name string) (*osservers.Server, error) { pager := servers.List(client, nil) serverList := make([]osservers.Server, 0, 1) err := pager.EachPage(func(page pagination.Page) (bool, error) { s, err := servers.ExtractServers(page) if err != nil { return false, err } for _, v := range s { if serverHasAddress(v, name) { serverList = append(serverList, v) } } if len(serverList) > 1 { return false, ErrMultipleResults } return true, nil }) if err != nil { return nil, err } if len(serverList) == 0 { return nil, ErrNotFound } else if len(serverList) > 1 { return nil, ErrMultipleResults } return &serverList[0], nil } func getServerByName(client *gophercloud.ServiceClient, name string) (*osservers.Server, error) { if net.ParseIP(name) != nil { // we're an IP, so we'll have to walk the full list of servers to // figure out which one we are. return getServerByAddress(client, name) } opts := osservers.ListOpts{ Name: fmt.Sprintf("^%s$", regexp.QuoteMeta(name)), Status: "ACTIVE", } pager := servers.List(client, opts) serverList := make([]osservers.Server, 0, 1) err := pager.EachPage(func(page pagination.Page) (bool, error) { s, err := servers.ExtractServers(page) if err != nil { return false, err } serverList = append(serverList, s...) if len(serverList) > 1 { return false, ErrMultipleResults } return true, nil }) if err != nil { return nil, err } if len(serverList) == 0 { return nil, ErrNotFound } else if len(serverList) > 1 { return nil, ErrMultipleResults } return &serverList[0], nil } func firstAddr(netblob interface{}) string { // Run-time types for the win :( list, ok := netblob.([]interface{}) if !ok || len(list) < 1 { return "" } props, ok := list[0].(map[string]interface{}) if !ok { return "" } tmp, ok := props["addr"] if !ok { return "" } addr, ok := tmp.(string) if !ok { return "" } return addr } func getAddressByName(api *gophercloud.ServiceClient, name string) (string, error) { srv, err := getServerByName(api, name) if err != nil { return "", err } var s string if s == "" { s = firstAddr(srv.Addresses["private"]) } if s == "" { s = firstAddr(srv.Addresses["public"]) } if s == "" { s = srv.AccessIPv4 } if s == "" { s = srv.AccessIPv6 } if s == "" { return "", ErrNoAddressFound } return s, nil } func (i *Instances) NodeAddresses(name string) ([]api.NodeAddress, error) { glog.V(2).Infof("NodeAddresses(%v) called", name) ip, err := getAddressByName(i.compute, name) if err != nil { return nil, err } glog.V(2).Infof("NodeAddresses(%v) => %v", name, ip) // net.ParseIP().String() is to maintain compatibility with the old code return []api.NodeAddress{{Type: api.NodeLegacyHostIP, Address: net.ParseIP(ip).String()}}, nil } // ExternalID returns the cloud provider ID of the specified instance (deprecated). func (i *Instances) ExternalID(name string) (string, error) { return "", fmt.Errorf("unimplemented") } // InstanceID returns the cloud provider ID of the specified instance. func (i *Instances) InstanceID(name string) (string, error) { return "", nil } func (i *Instances) AddSSHKeyToAllInstances(user string, keyData []byte) error { return errors.New("unimplemented") } // Implementation of Instances.CurrentNodeName func (i *Instances) CurrentNodeName(hostname string) (string, error) { return hostname, nil } func (os *Rackspace) Clusters() (cloudprovider.Clusters, bool) { return nil, false } // ProviderName returns the cloud provider ID. func (os *Rackspace) ProviderName() string { return ProviderName } // ScrubDNS filters DNS settings for pods. func (os *Rackspace) ScrubDNS(nameservers, searches []string) (nsOut, srchOut []string) { return nameservers, searches } func (os *Rackspace) LoadBalancer() (cloudprovider.LoadBalancer, bool) { return nil, false } func (os *Rackspace) Zones() (cloudprovider.Zones, bool) { glog.V(1).Info("Claiming to support Zones") return os, true } func (os *Rackspace) Routes() (cloudprovider.Routes, bool) { return nil, false } func (os *Rackspace) GetZone() (cloudprovider.Zone, error) { glog.V(1).Infof("Current zone is %v", os.region) return cloudprovider.Zone{Region: os.region}, nil }<|fim▁end|>
type Config struct {
<|file_name|>Or.java<|end_file_name|><|fim▁begin|>package sabstracta; /** * Represents an or operation in the syntax tree. * */ public class Or extends ExpresionBinariaLogica { public Or(Expresion _izq, Expresion _dch) { super(_izq, _dch); } /** <|fim▁hole|> protected String getInst() { return "or"; } }<|fim▁end|>
* Returns the instruction code. */ @Override
<|file_name|>address_resolver.py<|end_file_name|><|fim▁begin|># Rekall Memory Forensics # Copyright 2014 Google Inc. All Rights Reserved. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or (at # your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of<|fim▁hole|># along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # """The module implements the linux specific address resolution plugin.""" __author__ = "Michael Cohen <[email protected]>" import re from rekall import obj from rekall.plugins.common import address_resolver from rekall.plugins.linux import common class LKMModule(address_resolver.Module): """A Linux kernel module.""" def __init__(self, module, **kwargs): self.module = module super(LKMModule, self).__init__( name=unicode(module.name), start=module.base, end=module.end, **kwargs) class KernelModule(address_resolver.Module): """A Fake object which makes the kernel look like a module. This removes the need to treat kernel addresses any different from module addresses, and allows them to be resolved by this module. """ def __init__(self, session=None, **kwargs): super(KernelModule, self).__init__( # Check if the address appears in the kernel binary. start=obj.Pointer.integer_to_address( session.profile.get_constant("_text")), end=session.profile.get_constant("_etext"), name="linux", profile=session.profile, session=session, **kwargs) class LinuxAddressResolver(address_resolver.AddressResolverMixin, common.LinuxPlugin): """A Linux specific address resolver plugin.""" def _EnsureInitialized(self): if self._initialized: return # Insert a psuedo module for the kernel self.AddModule(KernelModule(session=self.session)) # Add LKMs. for kmod in self.session.plugins.lsmod().get_module_list(): self.AddModule(LKMModule(kmod, session=self.session)) self._initialized = True<|fim▁end|>
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License
<|file_name|>transform_hex_multine.py<|end_file_name|><|fim▁begin|>#! /usr/bin/python3 import sys MAXLINELEN = 80 TABLEN = 8 # 3 for two quotes and a comma FIRSTLINELEN = MAXLINELEN - TABLEN - 3 OTHERLINELEN = FIRSTLINELEN - 2 * TABLEN FIRSTLINEBYTES = FIRSTLINELEN // 2 OTHERLINEBYTES = OTHERLINELEN // 2 def fix_line(line): return "".join("\\x{}".format(line[i:i + 2].decode()) for i in range(0, len(line), 2)) def main(): with open(sys.argv[1], "rb") as f: data = f.read().strip().splitlines() with sys.stdout as f: f.write("#define INPUTLEN {}\n".format(len(data[0]) // 2)) f.write("\n") f.write("static const unsigned char input[][INPUTLEN + 1] = {\n")<|fim▁hole|> if len(line) > FIRSTLINEBYTES: line = line[FIRSTLINEBYTES:] while line: f.write("\n\t\t\t\"{}\"".format( fix_line(line[:OTHERLINEBYTES]))) line = line[OTHERLINEBYTES:] f.write(",\n") f.write("};\n") if __name__ == "__main__": main();<|fim▁end|>
for line in data: f.write("\t\"{}\"".format(fix_line(line[:FIRSTLINEBYTES])))
<|file_name|>treeModel.test.ts<|end_file_name|><|fim▁begin|>/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ import * as assert from 'assert'; import * as lifecycle from 'vs/base/common/lifecycle'; import * as _ from 'vs/base/parts/tree/browser/tree'; import * as model from 'vs/base/parts/tree/browser/treeModel'; import * as TreeDefaults from 'vs/base/parts/tree/browser/treeDefaults'; import { Event, Emitter } from 'vs/base/common/event'; import { timeout } from 'vs/base/common/async'; export class FakeRenderer { public getHeight(tree: _.ITree, element: any): number { return 20; } public getTemplateId(tree: _.ITree, element: any): string { return 'fake'; } public renderTemplate(tree: _.ITree, templateId: string, container: any): any { return null; } public renderElement(tree: _.ITree, element: any, templateId: string, templateData: any): void { // noop } public disposeTemplate(tree: _.ITree, templateId: string, templateData: any): void { // noop } } class TreeContext implements _.ITreeContext { public tree: _.ITree = null; public options: _.ITreeOptions = { autoExpandSingleChildren: true }; public dataSource: _.IDataSource; public renderer: _.IRenderer; public controller?: _.IController; public dnd?: _.IDragAndDrop; public filter: _.IFilter; public sorter: _.ISorter; constructor(public configuration: _.ITreeConfiguration) { this.dataSource = configuration.dataSource; this.renderer = configuration.renderer || new FakeRenderer(); this.controller = configuration.controller; this.dnd = configuration.dnd; this.filter = configuration.filter || new TreeDefaults.DefaultFilter(); this.sorter = configuration.sorter || new TreeDefaults.DefaultSorter(); } } class TreeModel extends model.TreeModel { constructor(configuration: _.ITreeConfiguration) { super(new TreeContext(configuration)); } } class EventCounter { private listeners: lifecycle.IDisposable[]; private _count: number; constructor() { this.listeners = []; this._count = 0; } public listen<T>(event: Event<T>, fn: (e: T) => void = null): () => void { let r = event(data => { this._count++; if (fn) { fn(data); } }); this.listeners.push(r); return () => { let idx = this.listeners.indexOf(r); if (idx > -1) { this.listeners.splice(idx, 1); r.dispose(); } }; } public up(): void { this._count++; } public get count(): number { return this._count; } public dispose(): void { this.listeners = lifecycle.dispose(this.listeners); this._count = -1; } } var SAMPLE: any = { ONE: { id: 'one' }, AB: { id: 'ROOT', children: [ { id: 'a', children: [ { id: 'aa' }, { id: 'ab' } ] }, { id: 'b' }, { id: 'c', children: [ { id: 'ca' }, { id: 'cb' } ] } ] }, DEEP: { id: 'ROOT', children: [ { id: 'a', children: [ { id: 'x', children: [ { id: 'xa' }, { id: 'xb' }, ] } ] }, { id: 'b' } ] }, DEEP2: { id: 'ROOT', children: [ { id: 'a', children: [ { id: 'x', children: [ { id: 'xa' }, { id: 'xb' }, ] }, { id: 'y' } ] }, { id: 'b' } ] } }; class TestDataSource implements _.IDataSource { public getId(tree, element): string { return element.id; } public hasChildren(tree, element): boolean { return !!element.children; } public getChildren(tree, element): Promise<any> { return Promise.resolve(element.children); } public getParent(tree, element): Promise<any> { throw new Error('Not implemented'); } } suite('TreeModel', () => { var model: model.TreeModel; var counter: EventCounter; setup(() => { counter = new EventCounter(); model = new TreeModel({ dataSource: new TestDataSource() }); }); teardown(() => { counter.dispose(); model.dispose(); }); test('setInput, getInput', () => { model.setInput(SAMPLE.ONE); assert.equal(model.getInput(), SAMPLE.ONE); }); test('refresh() refreshes all', () => { return model.setInput(SAMPLE.AB).then(() => { counter.listen(model.onRefresh); // 1 counter.listen(model.onDidRefresh); // 1 counter.listen(model.onDidRefreshItem); // 4 counter.listen(model.onRefreshItemChildren); // 1 counter.listen(model.onDidRefreshItemChildren); // 1 return model.refresh(null); }).then(() => { assert.equal(counter.count, 8); }); }); test('refresh(root) refreshes all', () => { return model.setInput(SAMPLE.AB).then(() => { counter.listen(model.onRefresh); // 1 counter.listen(model.onDidRefresh); // 1 counter.listen(model.onDidRefreshItem); // 4 counter.listen(model.onRefreshItemChildren); // 1 counter.listen(model.onDidRefreshItemChildren); // 1 return model.refresh(SAMPLE.AB); }).then(() => { assert.equal(counter.count, 8); }); }); test('refresh(root, false) refreshes the root', () => { return model.setInput(SAMPLE.AB).then(() => { counter.listen(model.onRefresh); // 1 counter.listen(model.onDidRefresh); // 1 counter.listen(model.onDidRefreshItem); // 1 counter.listen(model.onRefreshItemChildren); // 1 counter.listen(model.onDidRefreshItemChildren); // 1 return model.refresh(SAMPLE.AB, false); }).then(() => { assert.equal(counter.count, 5); }); }); test('refresh(collapsed element) does not refresh descendants', () => { return model.setInput(SAMPLE.AB).then(() => { counter.listen(model.onRefresh); // 1 counter.listen(model.onDidRefresh); // 1 counter.listen(model.onDidRefreshItem); // 1 counter.listen(model.onRefreshItemChildren); // 0 counter.listen(model.onDidRefreshItemChildren); // 0 return model.refresh(SAMPLE.AB.children[0]); }).then(() => { assert.equal(counter.count, 3); }); }); test('refresh(expanded element) refreshes the element and descendants', () => { return model.setInput(SAMPLE.AB).then(() => { return model.expand(SAMPLE.AB.children[0]).then(() => { counter.listen(model.onRefresh); // 1 counter.listen(model.onDidRefresh); // 1 counter.listen(model.onDidRefreshItem); // 3 counter.listen(model.onRefreshItemChildren); // 1 counter.listen(model.onDidRefreshItemChildren); // 1 return model.refresh(SAMPLE.AB.children[0]); }); }).then(() => { assert.equal(counter.count, 7); }); }); test('refresh(element, false) refreshes the element', () => { return model.setInput(SAMPLE.AB).then(() => { return model.expand(SAMPLE.AB.children[0]).then(() => { counter.listen(model.onRefresh); // 1 counter.listen(model.onDidRefresh); // 1 counter.listen(model.onDidRefreshItem, item => { // 1 assert.equal(item.id, 'a'); counter.up(); }); counter.listen(model.onRefreshItemChildren); // 1 counter.listen(model.onDidRefreshItemChildren); // 1 return model.refresh(SAMPLE.AB.children[0], false); }); }).then(() => { assert.equal(counter.count, 6); }); }); test('depths', () => { return model.setInput(SAMPLE.AB).then(() => { return model.expandAll(['a', 'c']).then(() => { counter.listen(model.onDidRefreshItem, item => { switch (item.id) { case 'ROOT': assert.equal(item.getDepth(), 0); break; case 'a': assert.equal(item.getDepth(), 1); break; case 'aa': assert.equal(item.getDepth(), 2); break; case 'ab': assert.equal(item.getDepth(), 2); break; case 'b': assert.equal(item.getDepth(), 1); break; case 'c': assert.equal(item.getDepth(), 1); break; case 'ca': assert.equal(item.getDepth(), 2); break; case 'cb': assert.equal(item.getDepth(), 2); break; default: return; } counter.up(); }); return model.refresh(); }); }).then(() => { assert.equal(counter.count, 16); }); }); test('intersections', () => { return model.setInput(SAMPLE.AB).then(() => { return model.expandAll(['a', 'c']).then(() => { // going internals var r = (<any>model).registry; assert(r.getItem('a').intersects(r.getItem('a'))); assert(r.getItem('a').intersects(r.getItem('aa'))); assert(r.getItem('a').intersects(r.getItem('ab'))); assert(r.getItem('aa').intersects(r.getItem('a'))); assert(r.getItem('ab').intersects(r.getItem('a'))); assert(!r.getItem('aa').intersects(r.getItem('ab'))); assert(!r.getItem('a').intersects(r.getItem('b'))); assert(!r.getItem('a').intersects(r.getItem('c'))); assert(!r.getItem('a').intersects(r.getItem('ca'))); assert(!r.getItem('aa').intersects(r.getItem('ca'))); }); }); }); }); suite('TreeModel - TreeNavigator', () => { var model: model.TreeModel; var counter: EventCounter; setup(() => { counter = new EventCounter(); model = new TreeModel({ dataSource: new TestDataSource() }); }); teardown(() => { counter.dispose(); model.dispose(); }); test('next()', () => { return model.setInput(SAMPLE.AB).then(() => { var nav = model.getNavigator(); assert.equal(nav.next()!.id, 'a'); assert.equal(nav.next()!.id, 'b'); assert.equal(nav.next()!.id, 'c'); assert.equal(nav.next() && false, null); }); }); test('previous()', () => { return model.setInput(SAMPLE.AB).then(() => { var nav = model.getNavigator(); nav.next(); nav.next(); assert.equal(nav.next()!.id, 'c'); assert.equal(nav.previous()!.id, 'b'); assert.equal(nav.previous()!.id, 'a'); assert.equal(nav.previous() && false, null); }); }); test('parent()', () => { return model.setInput(SAMPLE.AB).then(() => { return model.expandAll([{ id: 'a' }, { id: 'c' }]).then(() => { var nav = model.getNavigator(); assert.equal(nav.next()!.id, 'a'); assert.equal(nav.next()!.id, 'aa'); assert.equal(nav.parent()!.id, 'a'); assert.equal(nav.next()!.id, 'aa'); assert.equal(nav.next()!.id, 'ab'); assert.equal(nav.parent()!.id, 'a'); assert.equal(nav.next()!.id, 'aa'); assert.equal(nav.next()!.id, 'ab'); assert.equal(nav.next()!.id, 'b'); assert.equal(nav.next()!.id, 'c'); assert.equal(nav.next()!.id, 'ca'); assert.equal(nav.parent()!.id, 'c'); assert.equal(nav.parent() && false, null); }); }); }); test('next() - scoped', () => { return model.setInput(SAMPLE.AB).then(() => { var nav = model.getNavigator(SAMPLE.AB.children[0]); return model.expand({ id: 'a' }).then(() => { assert.equal(nav.next()!.id, 'aa'); assert.equal(nav.next()!.id, 'ab'); assert.equal(nav.next() && false, null); }); }); }); test('previous() - scoped', () => { return model.setInput(SAMPLE.AB).then(() => { var nav = model.getNavigator(SAMPLE.AB.children[0]); return model.expand({ id: 'a' }).then(() => { assert.equal(nav.next()!.id, 'aa'); assert.equal(nav.next()!.id, 'ab'); assert.equal(nav.previous()!.id, 'aa'); assert.equal(nav.previous() && false, null); }); }); }); test('parent() - scoped', () => { return model.setInput(SAMPLE.AB).then(() => { return model.expandAll([{ id: 'a' }, { id: 'c' }]).then(() => { var nav = model.getNavigator(SAMPLE.AB.children[0]); assert.equal(nav.next()!.id, 'aa'); assert.equal(nav.next()!.id, 'ab'); assert.equal(nav.parent() && false, null); }); }); }); test('next() - non sub tree only', () => { return model.setInput(SAMPLE.AB).then(() => { var nav = model.getNavigator(SAMPLE.AB.children[0], false); return model.expand({ id: 'a' }).then(() => { assert.equal(nav.next()!.id, 'aa'); assert.equal(nav.next()!.id, 'ab'); assert.equal(nav.next()!.id, 'b'); assert.equal(nav.next()!.id, 'c'); assert.equal(nav.next() && false, null); }); }); }); test('previous() - non sub tree only', () => { return model.setInput(SAMPLE.AB).then(() => { var nav = model.getNavigator(SAMPLE.AB.children[0], false); return model.expand({ id: 'a' }).then(() => { assert.equal(nav.next()!.id, 'aa'); assert.equal(nav.next()!.id, 'ab'); assert.equal(nav.next()!.id, 'b'); assert.equal(nav.next()!.id, 'c'); assert.equal(nav.previous()!.id, 'b'); assert.equal(nav.previous()!.id, 'ab'); assert.equal(nav.previous()!.id, 'aa'); assert.equal(nav.previous()!.id, 'a'); assert.equal(nav.previous() && false, null); }); }); }); test('parent() - non sub tree only', () => { return model.setInput(SAMPLE.AB).then(() => { return model.expandAll([{ id: 'a' }, { id: 'c' }]).then(() => { var nav = model.getNavigator(SAMPLE.AB.children[0], false); assert.equal(nav.next()!.id, 'aa'); assert.equal(nav.next()!.id, 'ab'); assert.equal(nav.parent()!.id, 'a'); assert.equal(nav.parent() && false, null); }); }); }); test('deep next() - scoped', () => { return model.setInput(SAMPLE.DEEP).then(() => { return model.expand(SAMPLE.DEEP.children[0]).then(() => { return model.expand(SAMPLE.DEEP.children[0].children[0]).then(() => { var nav = model.getNavigator(SAMPLE.DEEP.children[0].children[0]); assert.equal(nav.next()!.id, 'xa'); assert.equal(nav.next()!.id, 'xb'); assert.equal(nav.next() && false, null); }); }); }); }); test('deep previous() - scoped', () => { return model.setInput(SAMPLE.DEEP).then(() => { return model.expand(SAMPLE.DEEP.children[0]).then(() => { return model.expand(SAMPLE.DEEP.children[0].children[0]).then(() => { var nav = model.getNavigator(SAMPLE.DEEP.children[0].children[0]); assert.equal(nav.next()!.id, 'xa'); assert.equal(nav.next()!.id, 'xb'); assert.equal(nav.previous()!.id, 'xa'); assert.equal(nav.previous() && false, null); }); }); }); }); test('last()', () => { return model.setInput(SAMPLE.AB).then(() => { return model.expandAll([{ id: 'a' }, { id: 'c' }]).then(() => { const nav = model.getNavigator(); assert.equal(nav.last()!.id, 'cb'); }); }); }); }); suite('TreeModel - Expansion', () => { var model: model.TreeModel; var counter: EventCounter; setup(() => { counter = new EventCounter(); model = new TreeModel({ dataSource: new TestDataSource() }); }); teardown(() => { counter.dispose(); model.dispose(); }); test('collapse, expand', () => { return model.setInput(SAMPLE.AB).then(() => { counter.listen(model.onExpandItem, (e) => { assert.equal(e.item.id, 'a'); var nav = model.getNavigator(e.item); assert.equal(nav.next() && false, null); }); counter.listen(model.onDidExpandItem, (e) => { assert.equal(e.item.id, 'a'); var nav = model.getNavigator(e.item); assert.equal(nav.next()!.id, 'aa'); assert.equal(nav.next()!.id, 'ab'); assert.equal(nav.next() && false, null); }); assert(!model.isExpanded(SAMPLE.AB.children[0])); var nav = model.getNavigator(); assert.equal(nav.next()!.id, 'a'); assert.equal(nav.next()!.id, 'b'); assert.equal(nav.next()!.id, 'c'); assert.equal(nav.next() && false, null); assert.equal(model.getExpandedElements().length, 0); return model.expand(SAMPLE.AB.children[0]).then(() => { assert(model.isExpanded(SAMPLE.AB.children[0])); nav = model.getNavigator(); assert.equal(nav.next()!.id, 'a'); assert.equal(nav.next()!.id, 'aa'); assert.equal(nav.next()!.id, 'ab'); assert.equal(nav.next()!.id, 'b'); assert.equal(nav.next()!.id, 'c'); assert.equal(nav.next() && false, null); var expandedElements = model.getExpandedElements(); assert.equal(expandedElements.length, 1); assert.equal(expandedElements[0].id, 'a'); assert.equal(counter.count, 2); }); }); }); test('toggleExpansion', () => { return model.setInput(SAMPLE.AB).then(() => { assert(!model.isExpanded(SAMPLE.AB.children[0])); return model.toggleExpansion(SAMPLE.AB.children[0]).then(() => { assert(model.isExpanded(SAMPLE.AB.children[0])); assert(!model.isExpanded(SAMPLE.AB.children[0].children[0])); return model.toggleExpansion(SAMPLE.AB.children[0].children[0]).then(() => { assert(!model.isExpanded(SAMPLE.AB.children[0].children[0])); return model.toggleExpansion(SAMPLE.AB.children[0]).then(() => { assert(!model.isExpanded(SAMPLE.AB.children[0])); }); }); }); }); }); test('collapseAll', () => { return model.setInput(SAMPLE.DEEP2).then(() => { return model.expand(SAMPLE.DEEP2.children[0]).then(() => { return model.expand(SAMPLE.DEEP2.children[0].children[0]).then(() => { assert(model.isExpanded(SAMPLE.DEEP2.children[0])); assert(model.isExpanded(SAMPLE.DEEP2.children[0].children[0])); return model.collapseAll().then(() => { assert(!model.isExpanded(SAMPLE.DEEP2.children[0])); return model.expand(SAMPLE.DEEP2.children[0]).then(() => { assert(!model.isExpanded(SAMPLE.DEEP2.children[0].children[0])); }); }); }); }); }); }); test('auto expand single child folders', () => { return model.setInput(SAMPLE.DEEP).then(() => { return model.expand(SAMPLE.DEEP.children[0]).then(() => { assert(model.isExpanded(SAMPLE.DEEP.children[0])); assert(model.isExpanded(SAMPLE.DEEP.children[0].children[0])); }); }); }); test('expand can trigger refresh', () => { // MUnit.expect(16); return model.setInput(SAMPLE.AB).then(() => { assert(!model.isExpanded(SAMPLE.AB.children[0])); var nav = model.getNavigator(); assert.equal(nav.next()!.id, 'a'); assert.equal(nav.next()!.id, 'b'); assert.equal(nav.next()!.id, 'c'); assert.equal(nav.next() && false, null); var f: () => void = counter.listen(model.onRefreshItemChildren, (e) => { assert.equal(e.item.id, 'a'); f(); }); var g: () => void = counter.listen(model.onDidRefreshItemChildren, (e) => { assert.equal(e.item.id, 'a'); g(); }); return model.expand(SAMPLE.AB.children[0]).then(() => { assert(model.isExpanded(SAMPLE.AB.children[0])); nav = model.getNavigator(); assert.equal(nav.next()!.id, 'a'); assert.equal(nav.next()!.id, 'aa'); assert.equal(nav.next()!.id, 'ab'); assert.equal(nav.next()!.id, 'b'); assert.equal(nav.next()!.id, 'c'); assert.equal(nav.next() && false, null); assert.equal(counter.count, 2); }); }); }); test('top level collapsed', () => { return model.setInput(SAMPLE.AB).then(() => { return model.collapseAll([{ id: 'a' }, { id: 'b' }, { id: 'c' }]).then(() => { var nav = model.getNavigator(); assert.equal(nav.next()!.id, 'a'); assert.equal(nav.next()!.id, 'b'); assert.equal(nav.next()!.id, 'c'); assert.equal(nav.previous()!.id, 'b'); assert.equal(nav.previous()!.id, 'a'); assert.equal(nav.previous() && false, null); }); }); }); test('shouldAutoexpand', () => { // setup const model = new TreeModel({ dataSource: { getId: (_, e) => e, hasChildren: (_, e) => true, getChildren: (_, e) => { if (e === 'root') { return Promise.resolve(['a', 'b', 'c']); } if (e === 'b') { return Promise.resolve(['b1']); } return Promise.resolve([]); }, getParent: (_, e): Promise<any> => { throw new Error('not implemented'); }, shouldAutoexpand: (_, e) => e === 'b' } }); return model.setInput('root').then(() => { return model.refresh('root', true); }).then(() => { assert(!model.isExpanded('a')); assert(model.isExpanded('b')); assert(!model.isExpanded('c')); }); }); }); class TestFilter implements _.IFilter { public fn: (any) => boolean; constructor() { this.fn = () => true; } public isVisible(tree, element): boolean { return this.fn(element); } } suite('TreeModel - Filter', () => { var model: model.TreeModel; var counter: EventCounter; var filter: TestFilter; setup(() => { counter = new EventCounter(); filter = new TestFilter(); model = new TreeModel({ dataSource: new TestDataSource(), filter: filter }); }); teardown(() => { counter.dispose(); model.dispose(); }); test('no filter', () => { return model.setInput(SAMPLE.AB).then(() => { return model.expandAll([{ id: 'a' }, { id: 'c' }]).then(() => { var nav = model.getNavigator(); assert.equal(nav.next()!.id, 'a'); assert.equal(nav.next()!.id, 'aa'); assert.equal(nav.next()!.id, 'ab');<|fim▁hole|> assert.equal(nav.next()!.id, 'c'); assert.equal(nav.next()!.id, 'ca'); assert.equal(nav.next()!.id, 'cb'); assert.equal(nav.previous()!.id, 'ca'); assert.equal(nav.previous()!.id, 'c'); assert.equal(nav.previous()!.id, 'b'); assert.equal(nav.previous()!.id, 'ab'); assert.equal(nav.previous()!.id, 'aa'); assert.equal(nav.previous()!.id, 'a'); assert.equal(nav.previous() && false, null); }); }); }); test('filter all', () => { filter.fn = () => false; return model.setInput(SAMPLE.AB).then(() => { return model.refresh().then(() => { var nav = model.getNavigator(); assert.equal(nav.next() && false, null); }); }); }); test('simple filter', () => { // hide elements that do not start with 'a' filter.fn = (e) => e.id[0] === 'a'; return model.setInput(SAMPLE.AB).then(() => { return model.expand({ id: 'a' }).then(() => { var nav = model.getNavigator(); assert.equal(nav.next()!.id, 'a'); assert.equal(nav.next()!.id, 'aa'); assert.equal(nav.next()!.id, 'ab'); assert.equal(nav.previous()!.id, 'aa'); assert.equal(nav.previous()!.id, 'a'); assert.equal(nav.previous() && false, null); }); }); }); test('simple filter 2', () => { // hide 'ab' filter.fn = (e) => e.id !== 'ab'; return model.setInput(SAMPLE.AB).then(() => { return model.expand({ id: 'a' }).then(() => { var nav = model.getNavigator(); assert.equal(nav.next()!.id, 'a'); assert.equal(nav.next()!.id, 'aa'); assert.equal(nav.next()!.id, 'b'); assert.equal(nav.next()!.id, 'c'); assert.equal(nav.next() && false, null); }); }); }); test('simple filter, opposite', () => { // hide elements that start with 'a' filter.fn = (e) => e.id[0] !== 'a'; return model.setInput(SAMPLE.AB).then(() => { return model.expand({ id: 'c' }).then(() => { var nav = model.getNavigator(); assert.equal(nav.next()!.id, 'b'); assert.equal(nav.next()!.id, 'c'); assert.equal(nav.next()!.id, 'ca'); assert.equal(nav.next()!.id, 'cb'); assert.equal(nav.previous()!.id, 'ca'); assert.equal(nav.previous()!.id, 'c'); assert.equal(nav.previous()!.id, 'b'); assert.equal(nav.previous() && false, null); }); }); }); test('simple filter, mischieving', () => { // hide the element 'a' filter.fn = (e) => e.id !== 'a'; return model.setInput(SAMPLE.AB).then(() => { return model.expand({ id: 'c' }).then(() => { var nav = model.getNavigator(); assert.equal(nav.next()!.id, 'b'); assert.equal(nav.next()!.id, 'c'); assert.equal(nav.next()!.id, 'ca'); assert.equal(nav.next()!.id, 'cb'); assert.equal(nav.previous()!.id, 'ca'); assert.equal(nav.previous()!.id, 'c'); assert.equal(nav.previous()!.id, 'b'); assert.equal(nav.previous() && false, null); }); }); }); test('simple filter & previous', () => { // hide 'b' filter.fn = (e) => e.id !== 'b'; return model.setInput(SAMPLE.AB).then(() => { var nav = model.getNavigator({ id: 'c' }, false); assert.equal(nav.previous()!.id, 'a'); assert.equal(nav.previous() && false, null); }); }); }); suite('TreeModel - Traits', () => { var model: model.TreeModel; var counter: EventCounter; setup(() => { counter = new EventCounter(); model = new TreeModel({ dataSource: new TestDataSource() }); }); teardown(() => { counter.dispose(); model.dispose(); }); test('Selection', () => { return model.setInput(SAMPLE.AB).then(() => { assert.equal(model.getSelection().length, 0); model.select(SAMPLE.AB.children[1]); assert(model.isSelected(SAMPLE.AB.children[1])); assert.equal(model.getSelection().length, 1); model.select(SAMPLE.AB.children[0]); assert(model.isSelected(SAMPLE.AB.children[0])); assert.equal(model.getSelection().length, 2); model.select(SAMPLE.AB.children[2]); assert(model.isSelected(SAMPLE.AB.children[2])); assert.equal(model.getSelection().length, 3); model.deselect(SAMPLE.AB.children[0]); assert(!model.isSelected(SAMPLE.AB.children[0])); assert.equal(model.getSelection().length, 2); model.setSelection([]); assert(!model.isSelected(SAMPLE.AB.children[0])); assert(!model.isSelected(SAMPLE.AB.children[1])); assert(!model.isSelected(SAMPLE.AB.children[2])); assert.equal(model.getSelection().length, 0); model.selectAll([SAMPLE.AB.children[0], SAMPLE.AB.children[1], SAMPLE.AB.children[2]]); assert.equal(model.getSelection().length, 3); model.select(SAMPLE.AB.children[0]); assert.equal(model.getSelection().length, 3); model.deselectAll([SAMPLE.AB.children[0], SAMPLE.AB.children[1], SAMPLE.AB.children[2]]); assert.equal(model.getSelection().length, 0); model.deselect(SAMPLE.AB.children[0]); assert.equal(model.getSelection().length, 0); model.setSelection([SAMPLE.AB.children[0]]); assert.equal(model.getSelection().length, 1); assert(model.isSelected(SAMPLE.AB.children[0])); assert(!model.isSelected(SAMPLE.AB.children[1])); assert(!model.isSelected(SAMPLE.AB.children[2])); model.setSelection([SAMPLE.AB.children[0], SAMPLE.AB.children[1], SAMPLE.AB.children[2]]); assert.equal(model.getSelection().length, 3); assert(model.isSelected(SAMPLE.AB.children[0])); assert(model.isSelected(SAMPLE.AB.children[1])); assert(model.isSelected(SAMPLE.AB.children[2])); model.setSelection([SAMPLE.AB.children[1], SAMPLE.AB.children[2]]); assert.equal(model.getSelection().length, 2); assert(!model.isSelected(SAMPLE.AB.children[0])); assert(model.isSelected(SAMPLE.AB.children[1])); assert(model.isSelected(SAMPLE.AB.children[2])); model.setSelection([]); assert.deepEqual(model.getSelection(), []); assert.equal(model.getSelection().length, 0); assert(!model.isSelected(SAMPLE.AB.children[0])); assert(!model.isSelected(SAMPLE.AB.children[1])); assert(!model.isSelected(SAMPLE.AB.children[2])); model.selectNext(); assert.equal(model.getSelection().length, 1); assert(model.isSelected(SAMPLE.AB.children[0])); model.selectNext(); assert.equal(model.getSelection().length, 1); assert(model.isSelected(SAMPLE.AB.children[1])); model.selectNext(); assert.equal(model.getSelection().length, 1); assert(model.isSelected(SAMPLE.AB.children[2])); model.selectNext(); assert.equal(model.getSelection().length, 1); assert(model.isSelected(SAMPLE.AB.children[2])); model.selectPrevious(); assert.equal(model.getSelection().length, 1); assert(model.isSelected(SAMPLE.AB.children[1])); model.selectPrevious(); assert.equal(model.getSelection().length, 1); assert(model.isSelected(SAMPLE.AB.children[0])); model.selectPrevious(); assert.equal(model.getSelection().length, 1); assert(model.isSelected(SAMPLE.AB.children[0])); model.selectNext(2); assert.equal(model.getSelection().length, 1); assert(model.isSelected(SAMPLE.AB.children[2])); model.selectPrevious(4); assert.equal(model.getSelection().length, 1); assert(model.isSelected(SAMPLE.AB.children[0])); assert.equal(model.isSelected(SAMPLE.AB.children[0]), true); assert.equal(model.isSelected(SAMPLE.AB.children[2]), false); }); }); test('Focus', () => { return model.setInput(SAMPLE.AB).then(() => { assert(!model.getFocus()); model.setFocus(SAMPLE.AB.children[1]); assert(model.isFocused(SAMPLE.AB.children[1])); assert(model.getFocus()); model.setFocus(SAMPLE.AB.children[0]); assert(model.isFocused(SAMPLE.AB.children[0])); assert(model.getFocus()); model.setFocus(SAMPLE.AB.children[2]); assert(model.isFocused(SAMPLE.AB.children[2])); assert(model.getFocus()); model.setFocus(); assert(!model.isFocused(SAMPLE.AB.children[0])); assert(!model.isFocused(SAMPLE.AB.children[1])); assert(!model.isFocused(SAMPLE.AB.children[2])); assert(!model.getFocus()); model.setFocus(SAMPLE.AB.children[0]); assert(model.getFocus()); assert(model.isFocused(SAMPLE.AB.children[0])); assert(!model.isFocused(SAMPLE.AB.children[1])); assert(!model.isFocused(SAMPLE.AB.children[2])); model.setFocus(); assert(!model.getFocus()); assert(!model.isFocused(SAMPLE.AB.children[0])); assert(!model.isFocused(SAMPLE.AB.children[1])); assert(!model.isFocused(SAMPLE.AB.children[2])); model.focusNext(); assert(model.getFocus()); assert(model.isFocused(SAMPLE.AB.children[0])); model.focusNext(); assert(model.getFocus()); assert(model.isFocused(SAMPLE.AB.children[1])); model.focusNext(); assert(model.getFocus()); assert(model.isFocused(SAMPLE.AB.children[2])); model.focusNext(); assert(model.getFocus()); assert(model.isFocused(SAMPLE.AB.children[2])); model.focusPrevious(); assert(model.getFocus()); assert(model.isFocused(SAMPLE.AB.children[1])); model.focusPrevious(); assert(model.getFocus()); assert(model.isFocused(SAMPLE.AB.children[0])); model.focusPrevious(); assert(model.getFocus()); assert(model.isFocused(SAMPLE.AB.children[0])); model.focusNext(2); assert(model.getFocus()); assert(model.isFocused(SAMPLE.AB.children[2])); model.focusPrevious(4); assert(model.getFocus()); assert(model.isFocused(SAMPLE.AB.children[0])); assert.equal(model.isFocused(SAMPLE.AB.children[0]), true); assert.equal(model.isFocused(SAMPLE.AB.children[2]), false); model.focusFirst(); assert(model.isFocused(SAMPLE.AB.children[0])); model.focusNth(0); assert(model.isFocused(SAMPLE.AB.children[0])); model.focusNth(1); assert(model.isFocused(SAMPLE.AB.children[1])); }); }); test('Highlight', () => { return model.setInput(SAMPLE.AB).then(() => { assert(!model.getHighlight()); model.setHighlight(SAMPLE.AB.children[1]); assert(model.isHighlighted(SAMPLE.AB.children[1])); assert(model.getHighlight()); model.setHighlight(SAMPLE.AB.children[0]); assert(model.isHighlighted(SAMPLE.AB.children[0])); assert(model.getHighlight()); model.setHighlight(SAMPLE.AB.children[2]); assert(model.isHighlighted(SAMPLE.AB.children[2])); assert(model.getHighlight()); model.setHighlight(); assert(!model.isHighlighted(SAMPLE.AB.children[0])); assert(!model.isHighlighted(SAMPLE.AB.children[1])); assert(!model.isHighlighted(SAMPLE.AB.children[2])); assert(!model.getHighlight()); model.setHighlight(SAMPLE.AB.children[0]); assert(model.getHighlight()); assert(model.isHighlighted(SAMPLE.AB.children[0])); assert(!model.isHighlighted(SAMPLE.AB.children[1])); assert(!model.isHighlighted(SAMPLE.AB.children[2])); assert.equal(model.isHighlighted(SAMPLE.AB.children[0]), true); assert.equal(model.isHighlighted(SAMPLE.AB.children[2]), false); model.setHighlight(); assert(!model.getHighlight()); assert(!model.isHighlighted(SAMPLE.AB.children[0])); assert(!model.isHighlighted(SAMPLE.AB.children[1])); assert(!model.isHighlighted(SAMPLE.AB.children[2])); }); }); }); class DynamicModel implements _.IDataSource { private data: any; public promiseFactory: { (): Promise<any>; } | null; private _onGetChildren = new Emitter<any>(); readonly onGetChildren: Event<any> = this._onGetChildren.event; private _onDidGetChildren = new Emitter<any>(); readonly onDidGetChildren: Event<any> = this._onDidGetChildren.event; constructor() { this.data = { root: [] }; this.promiseFactory = null; } public addChild(parent, child): void { if (!this.data[parent]) { this.data[parent] = []; } this.data[parent].push(child); } public removeChild(parent, child): void { this.data[parent].splice(this.data[parent].indexOf(child), 1); if (this.data[parent].length === 0) { delete this.data[parent]; } } public move(element, oldParent, newParent): void { this.removeChild(oldParent, element); this.addChild(newParent, element); } public rename(parent, oldName, newName): void { this.removeChild(parent, oldName); this.addChild(parent, newName); } public getId(tree, element): string { return element; } public hasChildren(tree, element): boolean { return !!this.data[element]; } public getChildren(tree, element): Promise<any> { this._onGetChildren.fire(element); var result = this.promiseFactory ? this.promiseFactory() : Promise.resolve(null); return result.then(() => { this._onDidGetChildren.fire(element); return Promise.resolve(this.data[element]); }); } public getParent(tree, element): Promise<any> { throw new Error('Not implemented'); } } suite('TreeModel - Dynamic data model', () => { var model: model.TreeModel; var dataModel: DynamicModel; var counter: EventCounter; setup(() => { counter = new EventCounter(); dataModel = new DynamicModel(); model = new TreeModel({ dataSource: dataModel, }); }); teardown(() => { counter.dispose(); model.dispose(); }); test('items get property disposed', () => { dataModel.addChild('root', 'grandfather'); dataModel.addChild('grandfather', 'father'); dataModel.addChild('father', 'son'); dataModel.addChild('father', 'daughter'); dataModel.addChild('son', 'baby'); return model.setInput('root').then(() => { return model.expandAll(['grandfather', 'father', 'son']).then(() => { dataModel.removeChild('grandfather', 'father'); var items = ['baby', 'son', 'daughter', 'father']; var times = 0; counter.listen(model.onDidDisposeItem, item => { assert.equal(items[times++], item.id); }); return model.refresh().then(() => { assert.equal(times, items.length); assert.equal(counter.count, 4); }); }); }); }); test('addChild, removeChild, collapse', () => { dataModel.addChild('root', 'super'); dataModel.addChild('root', 'hyper'); dataModel.addChild('root', 'mega'); return model.setInput('root').then(() => { var nav = model.getNavigator(); assert.equal(nav.next()!.id, 'super'); assert.equal(nav.next()!.id, 'hyper'); assert.equal(nav.next()!.id, 'mega'); assert.equal(nav.next() && false, null); dataModel.removeChild('root', 'hyper'); return model.refresh().then(() => { nav = model.getNavigator(); assert.equal(nav.next()!.id, 'super'); assert.equal(nav.next()!.id, 'mega'); assert.equal(nav.next() && false, null); dataModel.addChild('mega', 'micro'); dataModel.addChild('mega', 'nano'); dataModel.addChild('mega', 'pico'); return model.refresh().then(() => { return model.expand('mega').then(() => { nav = model.getNavigator(); assert.equal(nav.next()!.id, 'super'); assert.equal(nav.next()!.id, 'mega'); assert.equal(nav.next()!.id, 'micro'); assert.equal(nav.next()!.id, 'nano'); assert.equal(nav.next()!.id, 'pico'); assert.equal(nav.next() && false, null); model.collapse('mega'); nav = model.getNavigator(); assert.equal(nav.next()!.id, 'super'); assert.equal(nav.next()!.id, 'mega'); assert.equal(nav.next() && false, null); }); }); }); }); }); test('move', () => { dataModel.addChild('root', 'super'); dataModel.addChild('super', 'apples'); dataModel.addChild('super', 'bananas'); dataModel.addChild('super', 'pears'); dataModel.addChild('root', 'hyper'); dataModel.addChild('root', 'mega'); return model.setInput('root').then(() => { return model.expand('super').then(() => { var nav = model.getNavigator(); assert.equal(nav.next()!.id, 'super'); assert.equal(nav.next()!.id, 'apples'); assert.equal(nav.next()!.id, 'bananas'); assert.equal(nav.next()!.id, 'pears'); assert.equal(nav.next()!.id, 'hyper'); assert.equal(nav.next()!.id, 'mega'); assert.equal(nav.next() && false, null); dataModel.move('bananas', 'super', 'hyper'); dataModel.move('apples', 'super', 'mega'); return model.refresh().then(() => { return model.expandAll(['hyper', 'mega']).then(() => { nav = model.getNavigator(); assert.equal(nav.next()!.id, 'super'); assert.equal(nav.next()!.id, 'pears'); assert.equal(nav.next()!.id, 'hyper'); assert.equal(nav.next()!.id, 'bananas'); assert.equal(nav.next()!.id, 'mega'); assert.equal(nav.next()!.id, 'apples'); assert.equal(nav.next() && false, null); }); }); }); }); }); test('refreshing grandfather recursively should not refresh collapsed father\'s children immediately', () => { dataModel.addChild('root', 'grandfather'); dataModel.addChild('grandfather', 'father'); dataModel.addChild('father', 'son'); return model.setInput('root').then(() => { return model.expand('grandfather').then(() => { return model.collapse('father').then(() => { var times = 0; var listener = dataModel.onGetChildren((element) => { times++; assert.equal(element, 'grandfather'); }); return model.refresh('grandfather').then(() => { assert.equal(times, 1); listener.dispose(); listener = dataModel.onGetChildren((element) => { times++; assert.equal(element, 'father'); }); return model.expand('father').then(() => { assert.equal(times, 2); listener.dispose(); }); }); }); }); }); }); test('simultaneously refreshing two disjoint elements should parallelize the refreshes', () => { dataModel.addChild('root', 'father'); dataModel.addChild('root', 'mother'); dataModel.addChild('father', 'son'); dataModel.addChild('mother', 'daughter'); return model.setInput('root').then(() => { return model.expand('father').then(() => { return model.expand('mother').then(() => { var nav = model.getNavigator(); assert.equal(nav.next()!.id, 'father'); assert.equal(nav.next()!.id, 'son'); assert.equal(nav.next()!.id, 'mother'); assert.equal(nav.next()!.id, 'daughter'); assert.equal(nav.next() && false, null); dataModel.removeChild('father', 'son'); dataModel.removeChild('mother', 'daughter'); dataModel.addChild('father', 'brother'); dataModel.addChild('mother', 'sister'); dataModel.promiseFactory = () => { return timeout(0); }; var getTimes = 0; var gotTimes = 0; var getListener = dataModel.onGetChildren((element) => { getTimes++; }); var gotListener = dataModel.onDidGetChildren((element) => { gotTimes++; }); var p1 = model.refresh('father'); assert.equal(getTimes, 1); assert.equal(gotTimes, 0); var p2 = model.refresh('mother'); assert.equal(getTimes, 2); assert.equal(gotTimes, 0); return Promise.all([p1, p2]).then(() => { assert.equal(getTimes, 2); assert.equal(gotTimes, 2); nav = model.getNavigator(); assert.equal(nav.next()!.id, 'father'); assert.equal(nav.next()!.id, 'brother'); assert.equal(nav.next()!.id, 'mother'); assert.equal(nav.next()!.id, 'sister'); assert.equal(nav.next() && false, null); getListener.dispose(); gotListener.dispose(); }); }); }); }); }); test('simultaneously recursively refreshing two intersecting elements should concatenate the refreshes - ancestor first', () => { dataModel.addChild('root', 'grandfather'); dataModel.addChild('grandfather', 'father'); dataModel.addChild('father', 'son'); return model.setInput('root').then(() => { return model.expand('grandfather').then(() => { return model.expand('father').then(() => { var nav = model.getNavigator(); assert.equal(nav.next()!.id, 'grandfather'); assert.equal(nav.next()!.id, 'father'); assert.equal(nav.next()!.id, 'son'); assert.equal(nav.next() && false, null); var refreshTimes = 0; counter.listen(model.onDidRefreshItem, (e) => { refreshTimes++; }); var getTimes = 0; var getListener = dataModel.onGetChildren((element) => { getTimes++; }); var gotTimes = 0; var gotListener = dataModel.onDidGetChildren((element) => { gotTimes++; }); var p1Completes: Array<(value?: any) => void> = []; dataModel.promiseFactory = () => { return new Promise((c) => { p1Completes.push(c); }); }; model.refresh('grandfather').then(() => { // just a single get assert.equal(refreshTimes, 1); // (+1) grandfather assert.equal(getTimes, 1); assert.equal(gotTimes, 0); // unblock the first get p1Completes.shift()!(); // once the first get is unblocked, the second get should appear assert.equal(refreshTimes, 2); // (+1) first father refresh assert.equal(getTimes, 2); assert.equal(gotTimes, 1); var p2Complete; dataModel.promiseFactory = () => { return new Promise((c) => { p2Complete = c; }); }; var p2 = model.refresh('father'); // same situation still assert.equal(refreshTimes, 3); // (+1) second father refresh assert.equal(getTimes, 2); assert.equal(gotTimes, 1); // unblock the second get p1Completes.shift()!(); // the third get should have appeared, it should've been waiting for the second one assert.equal(refreshTimes, 4); // (+1) first son request assert.equal(getTimes, 3); assert.equal(gotTimes, 2); p2Complete(); // all good assert.equal(refreshTimes, 5); // (+1) second son request assert.equal(getTimes, 3); assert.equal(gotTimes, 3); return p2.then(() => { nav = model.getNavigator(); assert.equal(nav.next()!.id, 'grandfather'); assert.equal(nav.next()!.id, 'father'); assert.equal(nav.next()!.id, 'son'); assert.equal(nav.next() && false, null); getListener.dispose(); gotListener.dispose(); }); }); }); }); }); }); test('refreshing an empty element that adds children should still keep it collapsed', () => { dataModel.addChild('root', 'grandfather'); dataModel.addChild('grandfather', 'father'); return model.setInput('root').then(() => { return model.expand('grandfather').then(() => { return model.expand('father').then(() => { assert(!model.isExpanded('father')); dataModel.addChild('father', 'son'); return model.refresh('father').then(() => { assert(!model.isExpanded('father')); }); }); }); }); }); test('refreshing a collapsed element that adds children should still keep it collapsed', () => { dataModel.addChild('root', 'grandfather'); dataModel.addChild('grandfather', 'father'); dataModel.addChild('father', 'son'); return model.setInput('root').then(() => { return model.expand('grandfather').then(() => { return model.expand('father').then(() => { return model.collapse('father').then(() => { assert(!model.isExpanded('father')); dataModel.addChild('father', 'daughter'); return model.refresh('father').then(() => { assert(!model.isExpanded('father')); }); }); }); }); }); }); test('recursively refreshing an ancestor of an expanded element, should keep that element expanded', () => { dataModel.addChild('root', 'grandfather'); dataModel.addChild('grandfather', 'father'); dataModel.addChild('father', 'son'); return model.setInput('root').then(() => { return model.expand('grandfather').then(() => { return model.expand('father').then(() => { assert(model.isExpanded('grandfather')); assert(model.isExpanded('father')); return model.refresh('grandfather').then(() => { assert(model.isExpanded('grandfather')); assert(model.isExpanded('father')); }); }); }); }); }); test('recursively refreshing an ancestor of a collapsed element, should keep that element collapsed', () => { dataModel.addChild('root', 'grandfather'); dataModel.addChild('grandfather', 'father'); dataModel.addChild('father', 'son'); return model.setInput('root').then(() => { return model.expand('grandfather').then(() => { return model.expand('father').then(() => { return model.collapse('father').then(() => { assert(model.isExpanded('grandfather')); assert(!model.isExpanded('father')); return model.refresh('grandfather').then(() => { assert(model.isExpanded('grandfather')); assert(!model.isExpanded('father')); }); }); }); }); }); }); test('Bug 10855:[explorer] quickly deleting things causes NPE in tree - intersectsLock should always be called when trying to unlock', () => { dataModel.addChild('root', 'father'); dataModel.addChild('father', 'son'); dataModel.addChild('root', 'mother'); dataModel.addChild('mother', 'daughter'); return model.setInput('root').then(() => { // delay expansions and refreshes dataModel.promiseFactory = () => { return timeout(0); }; var promises: Promise<any>[] = []; promises.push(model.expand('father')); dataModel.removeChild('root', 'father'); promises.push(model.refresh('root')); promises.push(model.expand('mother')); dataModel.removeChild('root', 'mother'); promises.push(model.refresh('root')); return Promise.all(promises).then(() => { assert(true, 'all good'); }, (errs) => { assert(false, 'should not fail'); }); }); }); }); suite('TreeModel - bugs', () => { var counter: EventCounter; setup(() => { counter = new EventCounter(); }); teardown(() => { counter.dispose(); }); /** * This bug occurs when an item is expanded right during its removal */ test('Bug 10566:[tree] build viewlet is broken after some time', () => { // setup let model = new TreeModel({ dataSource: { getId: (_, e) => e, hasChildren: (_, e) => e === 'root' || e === 'bart', getChildren: (_, e) => { if (e === 'root') { return getRootChildren(); } if (e === 'bart') { return getBartChildren(); } return Promise.resolve([]); }, getParent: (_, e): Promise<any> => { throw new Error('not implemented'); }, } }); let listeners = <any>[]; // helpers var getGetRootChildren = (children: string[], millis = 0) => () => timeout(millis).then(() => children); var getRootChildren = getGetRootChildren(['homer', 'bart', 'lisa', 'marge', 'maggie'], 0); var getGetBartChildren = (millis = 0) => () => timeout(millis).then(() => ['milhouse', 'nelson']); var getBartChildren = getGetBartChildren(0); // item expanding should not exist! counter.listen(model.onExpandItem, () => { assert(false, 'should never receive item:expanding event'); }); counter.listen(model.onDidExpandItem, () => { assert(false, 'should never receive item:expanded event'); }); return model.setInput('root').then(() => { // remove bart getRootChildren = getGetRootChildren(['homer', 'lisa', 'marge', 'maggie'], 10); // refresh root var p1 = model.refresh('root', true).then(() => { assert(true); }, () => { assert(false, 'should never reach this'); }); // at the same time, try to expand bart! var p2 = model.expand('bart').then(() => { assert(false, 'should never reach this'); }, () => { assert(true, 'bart should fail to expand since he was removed meanwhile'); }); // what now? return Promise.all([p1, p2]); }).then(() => { // teardown while (listeners.length > 0) { listeners.pop()(); } listeners = null; model.dispose(); assert.equal(counter.count, 0); }); }); test('collapsed resolved parent should also update all children visibility on refresh', async function () { const counter = new EventCounter(); const dataModel = new DynamicModel(); let isSonVisible = true; const filter: _.IFilter = { isVisible(_, element) { return element !== 'son' || isSonVisible; } }; const model = new TreeModel({ dataSource: dataModel, filter }); dataModel.addChild('root', 'father'); dataModel.addChild('father', 'son'); await model.setInput('root'); await model.expand('father'); let nav = model.getNavigator(); assert.equal(nav.next()!.id, 'father'); assert.equal(nav.next()!.id, 'son'); assert.equal(nav.next(), null); await model.collapse('father'); isSonVisible = false; await model.refresh(undefined, true); await model.expand('father'); nav = model.getNavigator(); assert.equal(nav.next()!.id, 'father'); assert.equal(nav.next(), null); counter.dispose(); model.dispose(); }); });<|fim▁end|>
assert.equal(nav.next()!.id, 'b');
<|file_name|>OggStream.cpp<|end_file_name|><|fim▁begin|>// // C++ Implementation: Audio::OggStream // #include "config.h" #ifdef HAVE_OGG #include "OggStream.h" #include "OggData.h" #include "config.h" #include <utility> #include <limits> #include <stdlib.h> #include <vorbis/vorbisfile.h> #include "vsfilesystem.h" #ifndef OGG_BUFFER_SIZE #define OGG_BUFFER_SIZE 4096*2*2 #endif namespace Audio { OggStream::OggStream(const std::string& path, VSFileSystem::VSFileType type) throw(Exception) : Stream(path) { if ( file.OpenReadOnly(path, type) <= VSFileSystem::Ok ) throw FileOpenException("Error opening file \"" + path + "\""); oggData = new __impl::OggData(file, getFormatInternal(), 0); // Cache duration in case ov_time_total gets expensive duration = ov_time_total( &oggData->vorbisFile, oggData->streamIndex ); // Allocate read buffer readBufferSize = OGG_BUFFER_SIZE; readBufferAvail = 0; readBuffer = malloc(readBufferSize); } OggStream::~OggStream() { // destructor closes the file already delete oggData; } double OggStream::getLengthImpl() const throw(Exception) { return duration; } double OggStream::getPositionImpl() const throw() { return ov_time_tell( &oggData->vorbisFile );<|fim▁hole|> { if (position >= duration) throw EndOfStreamException(); readBufferAvail = 0; switch (ov_time_seek(&oggData->vorbisFile, position)) { case 0: break; case OV_ENOSEEK: throw Exception("Stream not seekable"); case OV_EINVAL: throw Exception("Invalid argument or state"); case OV_EREAD: throw Exception("Read error"); case OV_EFAULT: throw Exception("Internal logic fault, bug or heap/stack corruption"); case OV_EBADLINK:throw CorruptStreamException(false); default: throw Exception("Unidentified error code"); } } void OggStream::getBufferImpl(void *&buffer, unsigned int &bufferSize) throw(Exception) { if (readBufferAvail == 0) throw NoBufferException(); buffer = readBuffer; bufferSize = readBufferAvail; } void OggStream::nextBufferImpl() throw(Exception) { int curStream = oggData->streamIndex; long ovr; switch( ovr = ov_read(&oggData->vorbisFile, (char*)readBuffer, readBufferSize, 0, 2, 1, &curStream) ) { case OV_HOLE: throw CorruptStreamException(false); case OV_EBADLINK: throw CorruptStreamException(false); case 0: throw EndOfStreamException(); default: readBufferSize = ovr; } } }; #endif // HAVE_OGG<|fim▁end|>
} void OggStream::seekImpl(double position) throw(Exception)
<|file_name|>rock_paper_scissors.go<|end_file_name|><|fim▁begin|>// The MIT License (MIT) // Copyright (c) 2014 Philipp Neugebauer package main import ( "bufio" "fmt" "os" "math/rand" "strconv" ) func computer(inputChannel chan int, resultChannel chan string){ for human_choice := range inputChannel { computer_choice := rand.Intn(3) evaluation(computer_choice, human_choice, resultChannel) } } func evaluation(computer_choice int, human_choice int, resultChannel chan string){ switch human_choice { case 0: switch computer_choice { case 0: resultChannel <- "draw" case 1: resultChannel <- "loss" case 2: resultChannel <- "win" } case 1: switch computer_choice { case 0: resultChannel <- "win" case 1: resultChannel <- "draw" case 2: resultChannel <- "loss" } case 2: switch computer_choice { case 0: resultChannel <- "loss" case 1: resultChannel <- "win" case 2: resultChannel <- "draw" } default: resultChannel <- "Only numbers between 0 and 2 are valid!" } close(resultChannel) } func main() { computerChannel := make(chan int) resultChannel := make(chan string) go computer(computerChannel, resultChannel) reader := bufio.NewReader(os.Stdin) fmt.Println("Choose: \n 0 = rock\n 1 = paper\n 2 = scissors")<|fim▁hole|> computerChannel <- choice close(computerChannel) for message := range resultChannel { fmt.Println("Result:", message) } }<|fim▁end|>
text, _ := reader.ReadString('\n') choice, _ := strconv.Atoi(text)
<|file_name|>_meta.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators <|fim▁hole|> class MetaValidator(_plotly_utils.basevalidators.AnyValidator): def __init__(self, plotly_name="meta", parent_name="surface", **kwargs): super(MetaValidator, self).__init__( plotly_name=plotly_name, parent_name=parent_name, array_ok=kwargs.pop("array_ok", True), edit_type=kwargs.pop("edit_type", "plot"), role=kwargs.pop("role", "info"), **kwargs )<|fim▁end|>
<|file_name|>kit.cpp<|end_file_name|><|fim▁begin|>// // kit.cpp // Neo4j-cpp-driver // // Created by skyblue on 2017/7/9. // Copyright © 2017年 skyblue. All rights reserved. // #include "kit.hpp" #include <sstream> namespace neo4jDriver { //Neo4j工具包 std::string Kit::getStatusCode(std::string httpHeader) { size_t begin = httpHeader.find_first_of(" "); std::string temp = httpHeader.substr(begin+1, httpHeader.length()); size_t end = temp.find_first_of(" "); std::string statusCode = temp.substr(0, end); return statusCode; }; std::string Kit::getWhereString(std::string fieldName, Json::Value &properties, std::string idFieldName) { return Kit::append(fieldName, properties, " AND ", idFieldName); }; std::string Kit::getWhereString(std::string fieldName, std::string propertiesNamePrefix, Json::Value &properties, std::string idFieldName) { return Kit::append(fieldName, propertiesNamePrefix, properties, " AND ", idFieldName); }; std::string Kit::getSetString(std::string fieldName, Json::Value &properties) { return Kit::append(fieldName, properties, ","); }; std::string Kit::getLabelString(const std::vector<std::string> &labels) { std::string labelsString = ""; for (int i=0; i < labels.size(); i++) { if (i+1 < labels.size()) { labelsString += labels[i] + ":"; } else { labelsString += labels[i]; } } return labelsString; }; unsigned long long int Kit::getNodeOrRelationshipID(std::string nodeOrRelationshipSelf) { size_t id; <|fim▁hole|> sstream << idString; sstream >> id; sstream.clear(); return id; }; /* * 私有方法 */ std::string Kit::append(std::string fieldName, Json::Value &properties, std::string appendToken, std::string idFieldName) { std::string parameters = ""; bool isFirst = true; for (Json::ValueIterator i = properties.begin(); i != properties.end(); i++) { if (isFirst) { if (idFieldName == "" || (idFieldName != "" && i.name() != idFieldName)) { parameters += fieldName + "." + i.name() + "={" + i.name() + "}"; } else { parameters += "id(" + fieldName + ")={" + i.name() + "}"; } isFirst = false; } else { if (idFieldName == "" || (idFieldName != "" && i.name() != idFieldName)) { parameters += appendToken + fieldName + "." + i.name() + "={" + i.name() + "}"; } else { parameters += appendToken + "id(" + fieldName + ")={" + i.name() + "}"; } } } return parameters; }; std::string Kit::append(std::string fieldName, std::string propertiesNamePrefix, Json::Value &properties, std::string appendToken, std::string idFieldName) { std::string parameters = ""; bool isFirst = true; for (Json::ValueIterator i = properties.begin(); i != properties.end(); i++) { if (isFirst) { if (idFieldName == "" || (idFieldName != "" && i.name() != idFieldName)) { parameters += fieldName + "." + i.name() + "={" + propertiesNamePrefix + i.name() + "}"; } else { parameters += "id(" + fieldName + ")={" + propertiesNamePrefix + i.name() + "}"; } isFirst = false; } else { if (idFieldName == "" || (idFieldName != "" && i.name() != idFieldName)) { parameters += appendToken + fieldName + "." + i.name() + "={" + propertiesNamePrefix + i.name() + "}"; } else { parameters += appendToken + "id(" + fieldName + ")={" + propertiesNamePrefix + i.name() + "}"; } } } return parameters; }; }<|fim▁end|>
size_t begin = nodeOrRelationshipSelf.find_last_of("/"); std::string idString = nodeOrRelationshipSelf.substr(begin + 1, nodeOrRelationshipSelf.length()); std::stringstream sstream;
<|file_name|>command_list.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # Copyright 2016 Criteo # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License.<|fim▁hole|># # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """List Command.""" from __future__ import print_function from biggraphite.cli import command from biggraphite.glob_utils import graphite_glob def list_metrics(accessor, pattern, graphite=True): """Return the list of metrics corresponding to pattern. Exit with error message if None. Args: accessor: Accessor, a connected accessor pattern: string, e.g. my.metric.a or my.metric.**.a Optional Args: graphite: bool, use graphite globbing if True. Returns: iterable(Metric) """ if not graphite: metrics_names = accessor.glob_metric_names(pattern) else: metrics, _ = graphite_glob( accessor, pattern, metrics=True, directories=False ) metrics_names = [metric.name for metric in metrics] for metric in metrics_names: if metric is None: continue yield accessor.get_metric(metric) class CommandList(command.BaseCommand): """List for metrics.""" NAME = "list" HELP = "List metrics." def add_arguments(self, parser): """Add custom arguments. See command.CommandBase. """ parser.add_argument("glob", help="One metric name or globbing on metrics names") parser.add_argument( "--graphite", default=False, action="store_true", help="Enable Graphite globbing", ) def run(self, accessor, opts): """List metrics and directories. See command.CommandBase. """ accessor.connect() if not opts.graphite: directories_names = accessor.glob_directory_names(opts.glob) else: _, directories_names = graphite_glob( accessor, opts.glob, metrics=False, directories=True ) for directory in directories_names: print("d %s" % directory) for metric in list_metrics(accessor, opts.glob, opts.graphite): if metric: print("m %s %s" % (metric.name, metric.metadata.as_string_dict()))<|fim▁end|>
# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0
<|file_name|>generic_utils.py<|end_file_name|><|fim▁begin|># Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Python utilities required by Keras.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import binascii import codecs import marshal import os import re import sys import time import types as python_types import numpy as np import six from tensorflow.python.util import nest from tensorflow.python.util import tf_decorator from tensorflow.python.util import tf_inspect from tensorflow.python.util.tf_export import keras_export _GLOBAL_CUSTOM_OBJECTS = {} @keras_export('keras.utils.CustomObjectScope') class CustomObjectScope(object): """Provides a scope that changes to `_GLOBAL_CUSTOM_OBJECTS` cannot escape. Code within a `with` statement will be able to access custom objects by name. Changes to global custom objects persist within the enclosing `with` statement. At end of the `with` statement, global custom objects are reverted to state at beginning of the `with` statement. Example: Consider a custom object `MyObject` (e.g. a class): ```python with CustomObjectScope({'MyObject':MyObject}): layer = Dense(..., kernel_regularizer='MyObject') # save, load, etc. will recognize custom object by name ``` """ def __init__(self, *args): self.custom_objects = args self.backup = None def __enter__(self): self.backup = _GLOBAL_CUSTOM_OBJECTS.copy() for objects in self.custom_objects: _GLOBAL_CUSTOM_OBJECTS.update(objects) return self def __exit__(self, *args, **kwargs): _GLOBAL_CUSTOM_OBJECTS.clear() _GLOBAL_CUSTOM_OBJECTS.update(self.backup) @keras_export('keras.utils.custom_object_scope') def custom_object_scope(*args): """Provides a scope that changes to `_GLOBAL_CUSTOM_OBJECTS` cannot escape. Convenience wrapper for `CustomObjectScope`. Code within a `with` statement will be able to access custom objects by name. Changes to global custom objects persist within the enclosing `with` statement. At end of the `with` statement, global custom objects are reverted to state at beginning of the `with` statement. Example: Consider a custom object `MyObject` ```python with custom_object_scope({'MyObject':MyObject}): layer = Dense(..., kernel_regularizer='MyObject') # save, load, etc. will recognize custom object by name ``` Arguments: *args: Variable length list of dictionaries of name, class pairs to add to custom objects. Returns: Object of type `CustomObjectScope`. """ return CustomObjectScope(*args) @keras_export('keras.utils.get_custom_objects') def get_custom_objects(): """Retrieves a live reference to the global dictionary of custom objects. Updating and clearing custom objects using `custom_object_scope` is preferred, but `get_custom_objects` can be used to directly access `_GLOBAL_CUSTOM_OBJECTS`. Example: ```python get_custom_objects().clear() get_custom_objects()['MyObject'] = MyObject ``` Returns: Global dictionary of names to classes (`_GLOBAL_CUSTOM_OBJECTS`). """ return _GLOBAL_CUSTOM_OBJECTS def serialize_keras_class_and_config(cls_name, cls_config): """Returns the serialization of the class with the given config.""" return {'class_name': cls_name, 'config': cls_config} @keras_export('keras.utils.serialize_keras_object') def serialize_keras_object(instance): _, instance = tf_decorator.unwrap(instance) if instance is None: return None if hasattr(instance, 'get_config'): return serialize_keras_class_and_config(instance.__class__.__name__, instance.get_config()) if hasattr(instance, '__name__'): return instance.__name__ raise ValueError('Cannot serialize', instance) def class_and_config_for_serialized_keras_object( config, module_objects=None, custom_objects=None, printable_module_name='object'): """Returns the class name and config for a serialized keras object.""" if (not isinstance(config, dict) or 'class_name' not in config or 'config' not in config): raise ValueError('Improper config format: ' + str(config)) class_name = config['class_name'] if custom_objects and class_name in custom_objects: cls = custom_objects[class_name] elif class_name in _GLOBAL_CUSTOM_OBJECTS: cls = _GLOBAL_CUSTOM_OBJECTS[class_name] else: module_objects = module_objects or {} cls = module_objects.get(class_name) if cls is None: raise ValueError('Unknown ' + printable_module_name + ': ' + class_name) return (cls, config['config']) @keras_export('keras.utils.deserialize_keras_object') def deserialize_keras_object(identifier, module_objects=None, custom_objects=None, printable_module_name='object'): if identifier is None: return None if isinstance(identifier, dict): # In this case we are dealing with a Keras config dictionary. config = identifier (cls, cls_config) = class_and_config_for_serialized_keras_object( config, module_objects, custom_objects, printable_module_name) if hasattr(cls, 'from_config'): arg_spec = tf_inspect.getfullargspec(cls.from_config) custom_objects = custom_objects or {} if 'custom_objects' in arg_spec.args: return cls.from_config( cls_config, custom_objects=dict( list(_GLOBAL_CUSTOM_OBJECTS.items()) + list(custom_objects.items()))) with CustomObjectScope(custom_objects): return cls.from_config(cls_config) else: # Then `cls` may be a function returning a class. # in this case by convention `config` holds # the kwargs of the function. custom_objects = custom_objects or {} with CustomObjectScope(custom_objects): return cls(**cls_config) elif isinstance(identifier, six.string_types): object_name = identifier if custom_objects and object_name in custom_objects: obj = custom_objects.get(object_name) elif object_name in _GLOBAL_CUSTOM_OBJECTS: obj = _GLOBAL_CUSTOM_OBJECTS[object_name] else: obj = module_objects.get(object_name) if obj is None: raise ValueError('Unknown ' + printable_module_name + ':' + object_name) # Classes passed by name are instantiated with no args, functions are # returned as-is. if tf_inspect.isclass(obj): return obj() return obj else: raise ValueError('Could not interpret serialized ' + printable_module_name + ': ' + identifier) def func_dump(func): """Serializes a user defined function. Arguments: func: the function to serialize. Returns:<|fim▁hole|> A tuple `(code, defaults, closure)`. """ if os.name == 'nt': raw_code = marshal.dumps(func.__code__).replace(b'\\', b'/') code = codecs.encode(raw_code, 'base64').decode('ascii') else: raw_code = marshal.dumps(func.__code__) code = codecs.encode(raw_code, 'base64').decode('ascii') defaults = func.__defaults__ if func.__closure__: closure = tuple(c.cell_contents for c in func.__closure__) else: closure = None return code, defaults, closure def func_load(code, defaults=None, closure=None, globs=None): """Deserializes a user defined function. Arguments: code: bytecode of the function. defaults: defaults of the function. closure: closure of the function. globs: dictionary of global objects. Returns: A function object. """ if isinstance(code, (tuple, list)): # unpack previous dump code, defaults, closure = code if isinstance(defaults, list): defaults = tuple(defaults) def ensure_value_to_cell(value): """Ensures that a value is converted to a python cell object. Arguments: value: Any value that needs to be casted to the cell type Returns: A value wrapped as a cell object (see function "func_load") """ def dummy_fn(): # pylint: disable=pointless-statement value # just access it so it gets captured in .__closure__ cell_value = dummy_fn.__closure__[0] if not isinstance(value, type(cell_value)): return cell_value return value if closure is not None: closure = tuple(ensure_value_to_cell(_) for _ in closure) try: raw_code = codecs.decode(code.encode('ascii'), 'base64') except (UnicodeEncodeError, binascii.Error): raw_code = code.encode('raw_unicode_escape') code = marshal.loads(raw_code) if globs is None: globs = globals() return python_types.FunctionType( code, globs, name=code.co_name, argdefs=defaults, closure=closure) def has_arg(fn, name, accept_all=False): """Checks if a callable accepts a given keyword argument. Arguments: fn: Callable to inspect. name: Check if `fn` can be called with `name` as a keyword argument. accept_all: What to return if there is no parameter called `name` but the function accepts a `**kwargs` argument. Returns: bool, whether `fn` accepts a `name` keyword argument. """ arg_spec = tf_inspect.getfullargspec(fn) if accept_all and arg_spec.varkw is not None: return True return name in arg_spec.args @keras_export('keras.utils.Progbar') class Progbar(object): """Displays a progress bar. Arguments: target: Total number of steps expected, None if unknown. width: Progress bar width on screen. verbose: Verbosity mode, 0 (silent), 1 (verbose), 2 (semi-verbose) stateful_metrics: Iterable of string names of metrics that should *not* be averaged over time. Metrics in this list will be displayed as-is. All others will be averaged by the progbar before display. interval: Minimum visual progress update interval (in seconds). unit_name: Display name for step counts (usually "step" or "sample"). """ def __init__(self, target, width=30, verbose=1, interval=0.05, stateful_metrics=None, unit_name='step'): self.target = target self.width = width self.verbose = verbose self.interval = interval self.unit_name = unit_name if stateful_metrics: self.stateful_metrics = set(stateful_metrics) else: self.stateful_metrics = set() self._dynamic_display = ((hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()) or 'ipykernel' in sys.modules or 'posix' in sys.modules) self._total_width = 0 self._seen_so_far = 0 # We use a dict + list to avoid garbage collection # issues found in OrderedDict self._values = {} self._values_order = [] self._start = time.time() self._last_update = 0 def update(self, current, values=None): """Updates the progress bar. Arguments: current: Index of current step. values: List of tuples: `(name, value_for_last_step)`. If `name` is in `stateful_metrics`, `value_for_last_step` will be displayed as-is. Else, an average of the metric over time will be displayed. """ values = values or [] for k, v in values: if k not in self._values_order: self._values_order.append(k) if k not in self.stateful_metrics: if k not in self._values: self._values[k] = [v * (current - self._seen_so_far), current - self._seen_so_far] else: self._values[k][0] += v * (current - self._seen_so_far) self._values[k][1] += (current - self._seen_so_far) else: # Stateful metrics output a numeric value. This representation # means "take an average from a single value" but keeps the # numeric formatting. self._values[k] = [v, 1] self._seen_so_far = current now = time.time() info = ' - %.0fs' % (now - self._start) if self.verbose == 1: if (now - self._last_update < self.interval and self.target is not None and current < self.target): return prev_total_width = self._total_width if self._dynamic_display: sys.stdout.write('\b' * prev_total_width) sys.stdout.write('\r') else: sys.stdout.write('\n') if self.target is not None: numdigits = int(np.log10(self.target)) + 1 bar = ('%' + str(numdigits) + 'd/%d [') % (current, self.target) prog = float(current) / self.target prog_width = int(self.width * prog) if prog_width > 0: bar += ('=' * (prog_width - 1)) if current < self.target: bar += '>' else: bar += '=' bar += ('.' * (self.width - prog_width)) bar += ']' else: bar = '%7d/Unknown' % current self._total_width = len(bar) sys.stdout.write(bar) if current: time_per_unit = (now - self._start) / current else: time_per_unit = 0 if self.target is not None and current < self.target: eta = time_per_unit * (self.target - current) if eta > 3600: eta_format = '%d:%02d:%02d' % (eta // 3600, (eta % 3600) // 60, eta % 60) elif eta > 60: eta_format = '%d:%02d' % (eta // 60, eta % 60) else: eta_format = '%ds' % eta info = ' - ETA: %s' % eta_format else: if time_per_unit >= 1 or time_per_unit == 0: info += ' %.0fs/%s' % (time_per_unit, self.unit_name) elif time_per_unit >= 1e-3: info += ' %.0fms/%s' % (time_per_unit * 1e3, self.unit_name) else: info += ' %.0fus/%s' % (time_per_unit * 1e6, self.unit_name) for k in self._values_order: info += ' - %s:' % k if isinstance(self._values[k], list): avg = np.mean(self._values[k][0] / max(1, self._values[k][1])) if abs(avg) > 1e-3: info += ' %.4f' % avg else: info += ' %.4e' % avg else: info += ' %s' % self._values[k] self._total_width += len(info) if prev_total_width > self._total_width: info += (' ' * (prev_total_width - self._total_width)) if self.target is not None and current >= self.target: info += '\n' sys.stdout.write(info) sys.stdout.flush() elif self.verbose == 2: if self.target is not None and current >= self.target: numdigits = int(np.log10(self.target)) + 1 count = ('%' + str(numdigits) + 'd/%d') % (current, self.target) info = count + info for k in self._values_order: info += ' - %s:' % k avg = np.mean(self._values[k][0] / max(1, self._values[k][1])) if avg > 1e-3: info += ' %.4f' % avg else: info += ' %.4e' % avg info += '\n' sys.stdout.write(info) sys.stdout.flush() self._last_update = now def add(self, n, values=None): self.update(self._seen_so_far + n, values) def make_batches(size, batch_size): """Returns a list of batch indices (tuples of indices). Arguments: size: Integer, total size of the data to slice into batches. batch_size: Integer, batch size. Returns: A list of tuples of array indices. """ num_batches = int(np.ceil(size / float(batch_size))) return [(i * batch_size, min(size, (i + 1) * batch_size)) for i in range(0, num_batches)] def slice_arrays(arrays, start=None, stop=None): """Slice an array or list of arrays. This takes an array-like, or a list of array-likes, and outputs: - arrays[start:stop] if `arrays` is an array-like - [x[start:stop] for x in arrays] if `arrays` is a list Can also work on list/array of indices: `slice_arrays(x, indices)` Arguments: arrays: Single array or list of arrays. start: can be an integer index (start index) or a list/array of indices stop: integer (stop index); should be None if `start` was a list. Returns: A slice of the array(s). Raises: ValueError: If the value of start is a list and stop is not None. """ if arrays is None: return [None] if isinstance(start, list) and stop is not None: raise ValueError('The stop argument has to be None if the value of start ' 'is a list.') elif isinstance(arrays, list): if hasattr(start, '__len__'): # hdf5 datasets only support list objects as indices if hasattr(start, 'shape'): start = start.tolist() return [None if x is None else x[start] for x in arrays] return [ None if x is None else None if not hasattr(x, '__getitem__') else x[start:stop] for x in arrays ] else: if hasattr(start, '__len__'): if hasattr(start, 'shape'): start = start.tolist() return arrays[start] if hasattr(start, '__getitem__'): return arrays[start:stop] return [None] def to_list(x): """Normalizes a list/tensor into a list. If a tensor is passed, we return a list of size 1 containing the tensor. Arguments: x: target object to be normalized. Returns: A list. """ if isinstance(x, list): return x return [x] def object_list_uid(object_list): """Creates a single string from object ids.""" object_list = nest.flatten(object_list) return ', '.join([str(abs(id(x))) for x in object_list]) def to_snake_case(name): intermediate = re.sub('(.)([A-Z][a-z0-9]+)', r'\1_\2', name) insecure = re.sub('([a-z])([A-Z])', r'\1_\2', intermediate).lower() # If the class is private the name starts with "_" which is not secure # for creating scopes. We prefix the name with "private" in this case. if insecure[0] != '_': return insecure return 'private' + insecure def is_all_none(structure): iterable = nest.flatten(structure) # We cannot use Python's `any` because the iterable may return Tensors. for element in iterable: if element is not None: return False return True def check_for_unexpected_keys(name, input_dict, expected_values): unknown = set(input_dict.keys()).difference(expected_values) if unknown: raise ValueError('Unknown entries in {} dictionary: {}. Only expected ' 'following keys: {}'.format(name, list(unknown), expected_values)) def validate_kwargs(kwargs, allowed_kwargs, error_message='Keyword argument not understood:'): """Checks that all keyword arguments are in the set of allowed keys.""" for kwarg in kwargs: if kwarg not in allowed_kwargs: raise TypeError(error_message, kwarg)<|fim▁end|>
<|file_name|>count_islands.py<|end_file_name|><|fim▁begin|>""" A 2d grid map of m rows and n columns is initially filled with water. We may perform an addLand operation which turns the water at position (row, col) into a land. Given a list of positions to operate, count the number of islands after each addLand operation. An island is surrounded by water and is formed by connecting adjacent lands horizontally or vertically. You may assume all four edges of the grid are all surrounded by water. Given m = 3, n = 3, positions = [[0,0], [0,1], [1,2], [2,1]]. Initially, the 2d grid grid is filled with water. (Assume 0 represents water and 1 represents land). 0 0 0 0 0 0 0 0 0 Operation #1: addLand(0, 0) turns the water at grid[0][0] into a land. 1 0 0 0 0 0 Number of islands = 1 0 0 0 Operation #2: addLand(0, 1) turns the water at grid[0][1] into a land. 1 1 0 0 0 0 Number of islands = 1 0 0 0 Operation #3: addLand(1, 2) turns the water at grid[1][2] into a land. 1 1 0 0 0 1 Number of islands = 2 0 0 0 Operation #4: addLand(2, 1) turns the water at grid[2][1] into a land. 1 1 0 0 0 1 Number of islands = 3<|fim▁hole|>class Solution(object): def num_islands2(self, m, n, positions): ans = [] islands = Union() for p in map(tuple, positions): islands.add(p) for dp in (0, 1), (0, -1), (1, 0), (-1, 0): q = (p[0] + dp[0], p[1] + dp[1]) if q in islands.id: islands.unite(p, q) ans += [islands.count] return ans class Union(object): def __init__(self): self.id = {} self.sz = {} self.count = 0 def add(self, p): self.id[p] = p self.sz[p] = 1 self.count += 1 def root(self, i): while i != self.id[i]: self.id[i] = self.id[self.id[i]] i = self.id[i] return i def unite(self, p, q): i, j = self.root(p), self.root(q) if i == j: return if self.sz[i] > self.sz[j]: i, j = j, i self.id[i] = j self.sz[j] += self.sz[i] self.count -= 1<|fim▁end|>
0 1 0 """
<|file_name|>GithubFile.ts<|end_file_name|><|fim▁begin|>/** * Swaggy Jenkins * Jenkins API clients generated from Swagger / Open API specification * * The version of the OpenAPI document: 1.1.2-pre.0 * Contact: [email protected] * * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). * https://openapi-generator.tech * Do not edit the class manually. */ <|fim▁hole|> "_class"?: string; }<|fim▁end|>
import * as models from './models'; export interface GithubFile { "content"?: models.GithubContent;
<|file_name|>status-completed.js<|end_file_name|><|fim▁begin|>sap.ui.define(['sap/ui/webc/common/thirdparty/base/asset-registries/Icons'], function (Icons) { 'use strict'; const name = "status-completed"; const pathData = "M256 0q53 0 99.5 20T437 75t55 81.5 20 99.5-20 99.5-55 81.5-81.5 55-99.5 20-99.5-20T75 437t-55-81.5T0 256t20-99.5T75 75t81.5-55T256 0zM128 256q-14 0-23 9t-9 23q0 12 9 23l64 64q11 9 23 9 13 0 23-9l192-192q9-11 9-23 0-13-9.5-22.5T384 128q-12 0-23 9L192 307l-41-42q-10-9-23-9z";<|fim▁hole|> const collection = "SAP-icons-v5"; const packageName = "@ui5/webcomponents-icons"; Icons.registerIcon(name, { pathData, ltr, collection, packageName }); var pathDataV4 = { pathData }; return pathDataV4; });<|fim▁end|>
const ltr = false;
<|file_name|>Css2Properties.ts<|end_file_name|><|fim▁begin|>/// <reference path="Css1Properties.ts" /> module xlib.ui.element.elements { <|fim▁hole|> borderCollapse?: any; borderSpacing?: any; bottom?: any; captionSide?: any; clip?: any; content?: any; counterIncrement?: any; counterReset?: any; cursor?: any; direction?: any; emptyCells?: any; left?: any; maxHeight?: any; maxWidth?: any; minHeight?: any; minWidth?: any; orphans?: any; outline?: any; outlineColor?: any; outlineStyle?: any; outlineWidth?: any; overflow?: any; pageBreakAfter?: any; pageBreakBefore?: any; pageBreakInside?: any; position?: any; quotes?: any; right?: any; tableLayout?: any; top?: any; unicodeBidi?: any; visibility?: any; widows?: any; zIndex?: any; } }<|fim▁end|>
export interface Css2Properties extends Css1Properties {
<|file_name|>dom_html_form_element.rs<|end_file_name|><|fim▁begin|>// This file was generated by gir (https://github.com/gtk-rs/gir) // from gir-files (https://github.com/gtk-rs/gir-files.git) // DO NOT EDIT use crate::DOMElement; use crate::DOMEventTarget; use crate::DOMHTMLCollection; use crate::DOMHTMLElement; use crate::DOMNode; use crate::DOMObject; use glib::object::Cast; use glib::object::IsA; use glib::signal::connect_raw; use glib::signal::SignalHandlerId; use glib::translate::*; use std::boxed::Box as Box_; use std::fmt; use std::mem::transmute; glib::wrapper! { pub struct DOMHTMLFormElement(Object<ffi::WebKitDOMHTMLFormElement, ffi::WebKitDOMHTMLFormElementClass>) @extends DOMHTMLElement, DOMElement, DOMNode, DOMObject, @implements DOMEventTarget; match fn { type_ => || ffi::webkit_dom_html_form_element_get_type(), } } pub const NONE_DOMHTML_FORM_ELEMENT: Option<&DOMHTMLFormElement> = None; pub trait DOMHTMLFormElementExt: 'static { #[cfg_attr(feature = "v2_22", deprecated)] #[doc(alias = "webkit_dom_html_form_element_get_accept_charset")] fn accept_charset(&self) -> Option<glib::GString>; #[cfg_attr(feature = "v2_22", deprecated)] #[doc(alias = "webkit_dom_html_form_element_get_action")] fn action(&self) -> Option<glib::GString>; #[cfg_attr(feature = "v2_22", deprecated)] #[doc(alias = "webkit_dom_html_form_element_get_elements")] fn elements(&self) -> Option<DOMHTMLCollection>; #[cfg_attr(feature = "v2_22", deprecated)] #[doc(alias = "webkit_dom_html_form_element_get_encoding")] fn encoding(&self) -> Option<glib::GString>; #[cfg_attr(feature = "v2_22", deprecated)] #[doc(alias = "webkit_dom_html_form_element_get_enctype")] fn enctype(&self) -> Option<glib::GString>; #[cfg_attr(feature = "v2_22", deprecated)] #[doc(alias = "webkit_dom_html_form_element_get_length")] fn length(&self) -> libc::c_long; #[cfg_attr(feature = "v2_22", deprecated)] #[doc(alias = "webkit_dom_html_form_element_get_method")] fn method(&self) -> Option<glib::GString>; #[cfg_attr(feature = "v2_22", deprecated)] #[doc(alias = "webkit_dom_html_form_element_get_name")] fn name(&self) -> Option<glib::GString>; #[cfg_attr(feature = "v2_22", deprecated)] #[doc(alias = "webkit_dom_html_form_element_get_target")] fn target(&self) -> Option<glib::GString>; #[cfg_attr(feature = "v2_22", deprecated)] #[doc(alias = "webkit_dom_html_form_element_reset")] fn reset(&self); #[cfg_attr(feature = "v2_22", deprecated)] #[doc(alias = "webkit_dom_html_form_element_set_accept_charset")] fn set_accept_charset(&self, value: &str); #[cfg_attr(feature = "v2_22", deprecated)] #[doc(alias = "webkit_dom_html_form_element_set_action")] fn set_action(&self, value: &str); #[cfg_attr(feature = "v2_22", deprecated)] #[doc(alias = "webkit_dom_html_form_element_set_encoding")] fn set_encoding(&self, value: &str); #[cfg_attr(feature = "v2_22", deprecated)] #[doc(alias = "webkit_dom_html_form_element_set_enctype")] fn set_enctype(&self, value: &str); #[cfg_attr(feature = "v2_22", deprecated)] #[doc(alias = "webkit_dom_html_form_element_set_method")] fn set_method(&self, value: &str); #[cfg_attr(feature = "v2_22", deprecated)] #[doc(alias = "webkit_dom_html_form_element_set_name")] fn set_name(&self, value: &str); #[cfg_attr(feature = "v2_22", deprecated)] #[doc(alias = "webkit_dom_html_form_element_set_target")] fn set_target(&self, value: &str); #[cfg_attr(feature = "v2_22", deprecated)] #[doc(alias = "webkit_dom_html_form_element_submit")] fn submit(&self); fn connect_property_accept_charset_notify<F: Fn(&Self) + 'static>( &self, f: F, ) -> SignalHandlerId; fn connect_property_action_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_elements_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_encoding_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_enctype_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_length_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_method_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_name_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_target_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; } impl<O: IsA<DOMHTMLFormElement>> DOMHTMLFormElementExt for O { fn accept_charset(&self) -> Option<glib::GString> { unsafe { from_glib_full(ffi::webkit_dom_html_form_element_get_accept_charset( self.as_ref().to_glib_none().0, )) } } fn action(&self) -> Option<glib::GString> { unsafe { from_glib_full(ffi::webkit_dom_html_form_element_get_action( self.as_ref().to_glib_none().0, )) } } fn elements(&self) -> Option<DOMHTMLCollection> { unsafe { from_glib_full(ffi::webkit_dom_html_form_element_get_elements( self.as_ref().to_glib_none().0, )) } } fn encoding(&self) -> Option<glib::GString> { unsafe { from_glib_full(ffi::webkit_dom_html_form_element_get_encoding( self.as_ref().to_glib_none().0, )) } } fn enctype(&self) -> Option<glib::GString> { unsafe { from_glib_full(ffi::webkit_dom_html_form_element_get_enctype( self.as_ref().to_glib_none().0, )) } } fn length(&self) -> libc::c_long { unsafe { ffi::webkit_dom_html_form_element_get_length(self.as_ref().to_glib_none().0) } } fn method(&self) -> Option<glib::GString> { unsafe { from_glib_full(ffi::webkit_dom_html_form_element_get_method( self.as_ref().to_glib_none().0, )) } } fn name(&self) -> Option<glib::GString> { unsafe { from_glib_full(ffi::webkit_dom_html_form_element_get_name( self.as_ref().to_glib_none().0, )) } } fn target(&self) -> Option<glib::GString> { unsafe { from_glib_full(ffi::webkit_dom_html_form_element_get_target( self.as_ref().to_glib_none().0, )) } } fn reset(&self) { unsafe { ffi::webkit_dom_html_form_element_reset(self.as_ref().to_glib_none().0); } } fn set_accept_charset(&self, value: &str) { unsafe { ffi::webkit_dom_html_form_element_set_accept_charset( self.as_ref().to_glib_none().0, value.to_glib_none().0, ); } } fn set_action(&self, value: &str) { unsafe { ffi::webkit_dom_html_form_element_set_action( self.as_ref().to_glib_none().0, value.to_glib_none().0, ); } } fn set_encoding(&self, value: &str) { unsafe { ffi::webkit_dom_html_form_element_set_encoding( self.as_ref().to_glib_none().0, value.to_glib_none().0, ); } } fn set_enctype(&self, value: &str) { unsafe { ffi::webkit_dom_html_form_element_set_enctype( self.as_ref().to_glib_none().0, value.to_glib_none().0, ); } } fn set_method(&self, value: &str) { unsafe { ffi::webkit_dom_html_form_element_set_method( self.as_ref().to_glib_none().0, value.to_glib_none().0, ); } } fn set_name(&self, value: &str) { unsafe { ffi::webkit_dom_html_form_element_set_name( self.as_ref().to_glib_none().0, value.to_glib_none().0, ); } } fn set_target(&self, value: &str) { unsafe { ffi::webkit_dom_html_form_element_set_target( self.as_ref().to_glib_none().0, value.to_glib_none().0, ); } } fn submit(&self) { unsafe { ffi::webkit_dom_html_form_element_submit(self.as_ref().to_glib_none().0); } } fn connect_property_accept_charset_notify<F: Fn(&Self) + 'static>( &self, f: F, ) -> SignalHandlerId { unsafe extern "C" fn notify_accept_charset_trampoline<P, F: Fn(&P) + 'static>( this: *mut ffi::WebKitDOMHTMLFormElement, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) where P: IsA<DOMHTMLFormElement>, { let f: &F = &*(f as *const F); f(&DOMHTMLFormElement::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::accept-charset\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_accept_charset_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } fn connect_property_action_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_action_trampoline<P, F: Fn(&P) + 'static>( this: *mut ffi::WebKitDOMHTMLFormElement, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) where P: IsA<DOMHTMLFormElement>, { let f: &F = &*(f as *const F); f(&DOMHTMLFormElement::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::action\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_action_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } fn connect_property_elements_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_elements_trampoline<P, F: Fn(&P) + 'static>( this: *mut ffi::WebKitDOMHTMLFormElement, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) where P: IsA<DOMHTMLFormElement>, { let f: &F = &*(f as *const F); f(&DOMHTMLFormElement::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::elements\0".as_ptr() as *const _,<|fim▁hole|> notify_elements_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } fn connect_property_encoding_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_encoding_trampoline<P, F: Fn(&P) + 'static>( this: *mut ffi::WebKitDOMHTMLFormElement, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) where P: IsA<DOMHTMLFormElement>, { let f: &F = &*(f as *const F); f(&DOMHTMLFormElement::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::encoding\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_encoding_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } fn connect_property_enctype_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_enctype_trampoline<P, F: Fn(&P) + 'static>( this: *mut ffi::WebKitDOMHTMLFormElement, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) where P: IsA<DOMHTMLFormElement>, { let f: &F = &*(f as *const F); f(&DOMHTMLFormElement::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::enctype\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_enctype_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } fn connect_property_length_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_length_trampoline<P, F: Fn(&P) + 'static>( this: *mut ffi::WebKitDOMHTMLFormElement, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) where P: IsA<DOMHTMLFormElement>, { let f: &F = &*(f as *const F); f(&DOMHTMLFormElement::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::length\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_length_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } fn connect_property_method_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_method_trampoline<P, F: Fn(&P) + 'static>( this: *mut ffi::WebKitDOMHTMLFormElement, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) where P: IsA<DOMHTMLFormElement>, { let f: &F = &*(f as *const F); f(&DOMHTMLFormElement::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::method\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_method_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } fn connect_property_name_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_name_trampoline<P, F: Fn(&P) + 'static>( this: *mut ffi::WebKitDOMHTMLFormElement, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) where P: IsA<DOMHTMLFormElement>, { let f: &F = &*(f as *const F); f(&DOMHTMLFormElement::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::name\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_name_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } fn connect_property_target_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_target_trampoline<P, F: Fn(&P) + 'static>( this: *mut ffi::WebKitDOMHTMLFormElement, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) where P: IsA<DOMHTMLFormElement>, { let f: &F = &*(f as *const F); f(&DOMHTMLFormElement::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::target\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_target_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } } impl fmt::Display for DOMHTMLFormElement { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str("DOMHTMLFormElement") } }<|fim▁end|>
Some(transmute::<_, unsafe extern "C" fn()>(
<|file_name|>exec_test.go<|end_file_name|><|fim▁begin|>package exec import ( "bytes" "os/exec" "runtime" "testing" <|fim▁hole|>) func TestRun(t *testing.T) { if runtime.GOOS == "windows" { t.Skip("Windows. Not running this test.") } if _, err := exec.LookPath("echo"); err != nil { t.Skipf("echo not found, skipping test: %s", err) } cmd := exec.Command("echo", "-n", "hello world") ui := new(ui.Mock) err := Run(ui, cmd) if err != nil { t.Fatalf("err: %s", err) } var output bytes.Buffer for _, v := range ui.RawBuf { output.WriteString(v) } if output.String() != "hello world" { t.Fatalf("bad: %s", output.String()) } }<|fim▁end|>
"github.com/hashicorp/otto/ui"