prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for easy-jsend // Project: https://github.com/DeadAlready/easy-jsend // Definitions by: Karl Düüna <https://github.com/DeadAlready/> // Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped declare namespace Express { interface MakePartialInput { model:any; opts: { limit: number; skip: number; }; search: Object; result: any; } <|fim▁hole|> limit?: number; offset: number; count: number; data: any; } export interface Response { success (data?: any, status?: number): void; fail (data: any, status?: number): void; error (err: any, status?: number): void; partial? (data: PartialInput, status?: number): void; makePartial? (data: MakePartialInput): void; } } declare module "easy-jsend" { export function init(conf?:{partial:boolean}): void; }<|fim▁end|>
interface PartialInput {
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! //! This Package Contains Some Implementations of Machine Learning //! //!<|fim▁hole|>pub mod common; pub mod eval; pub mod linear;<|fim▁end|>
// #[allow(dead_code)]
<|file_name|>resources.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Resource object code # # Created by: The Resource Compiler for PyQt5 (Qt v5.12.1) # # WARNING! All changes made in this file will be lost! from PyQt5 import QtCore qt_resource_data = b"\ \x00\x00\x04\x0a\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x17\x00\x00\x00\x18\x08\x06\x00\x00\x00\x11\x7c\x66\x75\ \x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\ \x06\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\ \x00\x00\x09\x70\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\ \x00\x9a\x9c\x18\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xd9\x02\x15\ \x16\x11\x2c\x9d\x48\x83\xbb\x00\x00\x03\x8a\x49\x44\x41\x54\x48\ \xc7\xad\x95\x4b\x68\x5c\x55\x18\xc7\x7f\xe7\xdc\x7b\x67\xe6\xce\ \x4c\x66\x26\x49\xd3\x24\x26\xa6\xc6\xf8\x40\x21\xa5\x04\xb3\x28\ \xda\x98\x20\xa5\x0b\xad\x55\xa8\x2b\xc5\x50\x1f\xa0\x6e\x34\x2b\ \x45\x30\x14\x02\xba\x52\x69\x15\x17\x66\x63\x45\x97\x95\xa0\xad\ \x0b\xfb\xc0\x06\x25\xb6\x71\x61\x12\x41\x50\xdb\x2a\x21\xd1\xe2\ \x24\xf3\x9e\xc9\xcc\xbd\xe7\x1c\x17\x35\x43\x1e\x33\x21\xb6\xfd\ \x56\x87\xf3\x9d\xfb\xfb\x1e\xf7\xff\x9d\x23\x8c\x31\x43\x95\xf4\ \x85\x1e\x3f\x3b\x35\xac\xfd\xcc\x43\xdc\xa4\x49\x3b\xfe\x9d\x1d\ \xdb\x7b\x22\x90\x78\xf8\xb2\x28\xa7\xbe\x7d\xc1\x4b\x9d\x79\xdf\ \x18\x15\xe5\x16\x99\x10\x56\xde\x69\xdc\x3f\x22\xfd\xec\xd4\xf0\ \xad\x04\x03\x18\xa3\xa2\x7e\x76\x6a\x58\xde\x68\x2b\xb4\x36\xf8\ \xbe\xc6\x18\x53\xdb\xef\xe7\xfa\xec\xed\x67\x63\x10\x42\x00\xf0\ \xfb\xd5\x65\x2a\x15\x45\xc7\x6d\x0d\x00\xc4\xa2\xc1\xaa\x6f\x0d\ \x3e\x6c\xab\xc2\x1c\x56\xa4\x77\x4b\xb0\xf2\x35\x15\x5f\x21\x85\ \xe0\xc8\x6b\x5f\x92\x2d\x37\x33\x39\xf9\x03\x27\x8e\x1f\xa2\xf7\ \xbe\x9d\x04\x1c\x0b\x37\xe4\xac\xff\xa6\x30\x87\xbd\xba\x00\x6a\ \x06\x79\xe5\xf5\xaf\x89\xd9\x92\xc5\xcc\x0a\xd9\x7c\x19\xcf\xe9\ \xe2\xe4\xa9\x2f\x78\x7c\xff\x01\x72\x85\x0a\x2b\x65\x1f\xa5\x4c\ \xb5\xb2\x55\x16\x80\xbd\x31\xda\xda\x20\x1f\x7d\x3e\xcd\xc2\xfd\ \x59\xa6\x93\x39\x92\xd1\x22\xea\x9b\x16\xce\x9d\x3f\xce\xe0\x83\ \x03\x24\x82\x59\x3a\xdb\x7b\x88\xc7\x82\x68\x63\x58\xc9\xcc\x62\ \x8c\x21\x18\xb0\x6a\xc3\x37\x06\x49\x16\xff\x24\x6b\xa5\x49\xbb\ \x25\xbc\xa2\xa6\x21\xbb\x40\x7f\xdf\x00\x83\xbd\x01\x8e\x3c\xd5\ \x45\xd7\x8e\x6b\x9c\x9c\x98\x25\x1a\xb6\xe8\xbe\x3d\xc2\xdd\x77\ \x44\x48\xc4\x1c\x22\xe1\xeb\x58\x59\xaf\xcf\xd3\x33\x29\x2e\x34\ \x2d\x91\x93\x3e\xbe\x34\x78\x01\xc5\xe2\x61\xc5\xae\x72\x8e\x70\ \xc8\xc2\x0d\x5a\xbc\xf5\xee\x2f\x9c\xfa\x3e\x86\x69\x7a\x8e\xcf\ \x26\xe6\xf9\x63\xa1\x44\xa1\xa4\xd0\xda\x6c\x0d\x2f\x15\x7c\xb4\ \x67\x28\x59\x0a\xcf\xd6\x54\xe2\x06\x13\x87\x2b\x6f\x68\xa6\x27\ \xaf\x31\x32\x36\xc7\xb2\x7f\x17\xef\x7d\x7c\x8c\x33\x67\xcf\x12\ \x70\x24\x4a\x69\xd6\x6a\x46\xd6\xd3\x70\x72\xa9\x82\x67\x34\x45\ \xad\x28\xdb\x1a\x15\x34\x98\xff\x46\xed\xef\x37\x0d\x99\xbf\x4a\ \x3c\x30\x38\xc0\xc8\x4b\xaf\x92\x5a\x9c\xe2\xe0\x23\x6d\x74\xb4\ \xba\x84\x5d\x0b\x29\x45\x7d\xb8\x94\x82\x96\xb6\x10\xf3\xc5\x12\ \x2a\xef\x53\x11\x1a\x63\xad\x3f\x93\x19\x85\xf1\xb1\x77\x58\x5a\ \xf8\x99\x97\x9f\xe9\xa6\x75\x47\x90\xc6\xb8\x43\xd8\xb5\xb6\xce\ \xfc\xfa\xfd\x00\xfb\x3e\xf4\xc8\x05\x35\xba\x5e\xeb\x46\x21\xf9\ \xcf\x0a\xa9\x8c\x87\xe3\x48\xdc\x90\xb5\x6e\x98\x6a\xaa\x65\xf2\ \x52\x92\x43\x2f\x5e\xc2\x8c\x02\x1a\x10\xf5\x07\xac\xc3\x75\x70\ \x83\x92\x80\xb3\xf9\xd0\x26\xf8\x8f\xb3\x29\xc6\x3e\xb8\x8c\x19\ \x35\x75\x6b\x7b\x7e\x3c\xca\x45\x0c\x7e\x49\x31\xf4\x58\x3b\xf7\ \xf6\x34\x90\x88\x39\x04\x1c\x59\x1f\xfe\xdb\xd5\x3c\x5f\x9d\x4b\ \x32\xfd\x44\xb2\xba\xd7\xfa\xb6\x60\xcf\xde\x16\xdc\x90\x45\x4c\ \x4a\x2a\x9e\x62\xfe\x4e\xc5\xc8\xc1\x4e\xda\x76\x86\xe8\xe9\x0a\ \xe3\xd8\x92\x58\xd4\xc6\xb2\x44\x6d\x78\x2a\x53\xe1\xca\x7c\x99\ \x63\x5d\xbf\x56\x9d\xbd\x9f\x44\x18\x7a\xba\x95\x27\x0f\xb4\xd3\ \xdc\x18\xc0\xf3\x0d\x52\x40\xd8\xb5\xb0\xa4\x20\x14\xb2\x70\x6c\ \x81\x63\xcb\xaa\x42\xd6\xfd\xb7\xf4\xec\xa3\x06\xa0\x50\x52\xd8\ \x4e\x1b\x7e\x4a\xd3\x31\xf9\x29\xcf\xfe\xd4\x49\x7f\x5f\x13\xfb\ \xfa\x9b\x71\x43\x92\x58\xd4\x21\x18\x90\xac\xde\xb0\x42\x50\x13\ \x58\x33\xf3\x88\x6b\xa1\xfd\x65\x96\xf2\x79\xc6\x43\x7b\xd8\x75\ \x38\xcc\x3d\xdd\xd1\xaa\xcf\x71\xe4\xff\x7f\x91\x56\x33\xaf\xea\ \x37\xe7\xa1\x94\x21\x16\xb5\xd1\x06\x2c\x29\x36\xf5\x72\x9b\x96\ \x95\xc0\xc4\xda\x9d\x78\x83\x43\x53\x22\x80\x65\x09\x1c\xfb\x86\ \xc1\x00\xe7\x25\x70\x14\x48\x6f\x1e\x22\x51\xe3\x75\xd9\xb6\xa5\ \x81\xa3\x32\xb1\xfb\xf4\x0c\x30\xb8\xb1\x82\x9b\xb0\x09\x60\x30\ \xb1\xfb\xf4\xcc\xbf\xa0\xe9\x6e\xae\x5a\xdf\x4b\x81\x00\x00\x00\ \x00\x49\x45\x4e\x44\xae\x42\x60\x82\ " qt_resource_name = b"\ \x00\x07\ \x07\x3b\xe0\xb3\ \x00\x70\ \x00\x6c\x00\x75\x00\x67\x00\x69\x00\x6e\x00\x73\ \x00\x06\ \x07\x5c\x76\xa2\ \x00\x6f\ \x00\x65\x00\x71\x00\x5f\x00\x74\x00\x62\ \x00\x08\ \x0a\x61\x5a\xa7\ \x00\x69\ \x00\x63\x00\x6f\x00\x6e\x00\x2e\x00\x70\x00\x6e\x00\x67\ " qt_resource_struct_v1 = b"\ \x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\ \x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\ \x00\x00\x00\x14\x00\x02\x00\x00\x00\x01\x00\x00\x00\x03\ \x00\x00\x00\x26\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\<|fim▁hole|>\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\ \x00\x00\x00\x00\x00\x00\x00\x00\ \x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\ \x00\x00\x00\x00\x00\x00\x00\x00\ \x00\x00\x00\x14\x00\x02\x00\x00\x00\x01\x00\x00\x00\x03\ \x00\x00\x00\x00\x00\x00\x00\x00\ \x00\x00\x00\x26\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\ \x00\x00\x01\x69\x23\xc2\x96\x6e\ " qt_version = [int(v) for v in QtCore.qVersion().split('.')] if qt_version < [5, 8, 0]: rcc_version = 1 qt_resource_struct = qt_resource_struct_v1 else: rcc_version = 2 qt_resource_struct = qt_resource_struct_v2 def qInitResources(): QtCore.qRegisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data) def qCleanupResources(): QtCore.qUnregisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data) qInitResources()<|fim▁end|>
" qt_resource_struct_v2 = b"\
<|file_name|>factories.rs<|end_file_name|><|fim▁begin|>use std::collections::HashMap; use pueue_lib::network::message::*; use pueue_lib::settings::*; use pueue_lib::state::PUEUE_DEFAULT_GROUP; /// Create a AddMessage for a given command. pub fn add_message(shared: &Shared, command: &str) -> AddMessage { AddMessage { command: command.into(), path: shared.pueue_directory(), envs: HashMap::new(), start_immediately: false,<|fim▁hole|> label: None, print_task_id: false, } }<|fim▁end|>
stashed: false, group: PUEUE_DEFAULT_GROUP.into(), enqueue_at: None, dependencies: vec![],
<|file_name|>box-of-array-of-drop-2.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // run-pass #![allow(overflowing_literals)] // Test that we cleanup dynamic sized Box<[D]> properly when D has a // destructor. // ignore-emscripten no threads support use std::thread; use std::sync::atomic::{AtomicUsize, Ordering}; static LOG: AtomicUsize = AtomicUsize::new(0); struct D(u8);<|fim▁hole|> println!("Dropping {}", self.0); let old = LOG.load(Ordering::SeqCst); LOG.compare_and_swap(old, old << 4 | self.0 as usize, Ordering::SeqCst); } } fn main() { fn die() -> D { panic!("Oh no"); } let g = thread::spawn(|| { let _b1: Box<[D; 4]> = Box::new([D( 1), D( 2), D( 3), D( 4)]); let _b2: Box<[D; 4]> = Box::new([D( 5), D( 6), D( 7), D( 8)]); let _b3: Box<[D; 4]> = Box::new([D( 9), D(10), die(), D(12)]); let _b4: Box<[D; 4]> = Box::new([D(13), D(14), D(15), D(16)]); }); assert!(g.join().is_err()); // When the panic occurs, we will be in the midst of constructing // the input to `_b3`. Therefore, we drop the elements of the // partially filled array first, before we get around to dropping // the elements of `_b1` and _b2`. // Issue 23222: The order in which the elements actually get // dropped is a little funky. See similar notes in nested-vec-3; // in essence, I would not be surprised if we change the ordering // given in `expect` in the future. let expect = 0x__A_9__5_6_7_8__1_2_3_4; let actual = LOG.load(Ordering::SeqCst); assert!(actual == expect, "expect: 0x{:x} actual: 0x{:x}", expect, actual); }<|fim▁end|>
impl Drop for D { fn drop(&mut self) {
<|file_name|>vcgMeshStatsNode.cpp<|end_file_name|><|fim▁begin|><|fim▁hole|> */ // Local #include <vcgNodes/vcgMeshStats/vcgMeshStatsNode.h> #include <vcgNodes/vcgNodeTypeIds.h> // Utils #include <utilities/debugUtils.h> // Function Sets #include <maya/MFnMeshData.h> #include <maya/MFnTypedAttribute.h> #include <maya/MFnEnumAttribute.h> #include <maya/MFnNumericAttribute.h> #include <maya/MFnMatrixAttribute.h> #include <maya/MFnMatrixData.h> // General Includes #include <maya/MGlobal.h> #include <maya/MPlug.h> #include <maya/MDataBlock.h> #include <maya/MDataHandle.h> #include <maya/MIOStream.h> // Macros #define MCheckStatus(status, message) \ if( MStatus::kSuccess != status ) { \ cerr << message << "\n"; \ return status; \ } // Unique Node TypeId // See 'vcgNodeTypeIds.h', add a definition. MTypeId vcgMeshStatsNode::id(VCG_MESH_STATS_NODE_ID); // Use a unique ID. // Node attributes MObject vcgMeshStatsNode::inMesh; MObject vcgMeshStatsNode::outMesh; MObject vcgMeshStatsNode::aEnable; MObject vcgMeshStatsNode::aOutCentreOfMass; MObject vcgMeshStatsNode::aOutMass; vcgMeshStatsNode::vcgMeshStatsNode() {} vcgMeshStatsNode::~vcgMeshStatsNode() {} MStatus vcgMeshStatsNode::compute(const MPlug &plug, MDataBlock &data) // // Description: // This method computes the value of the given output plug based // on the values of the input attributes. // // Arguments: // plug - the plug to compute // data - object that provides access to the attributes for this node // { MStatus status = MS::kSuccess; MDataHandle stateData = data.outputValue(state, &status); MCheckStatus(status, "ERROR getting state"); INFO("vcgMeshStats plug: " << plug.name()); // Check for the HasNoEffect/PassThrough flag on the node. // // (stateData is an enumeration standard in all depend nodes) // // (0 = Normal) // (1 = HasNoEffect/PassThrough) // (2 = Blocking) // ... // if (stateData.asShort() == 1) { MDataHandle inputData = data.inputValue(inMesh, &status); MCheckStatus(status, "ERROR getting inMesh"); MDataHandle outputData = data.outputValue(outMesh, &status); MCheckStatus(status, "ERROR getting outMesh"); // Simply redirect the inMesh to the outMesh for the PassThrough effect outputData.set(inputData.asMesh()); } else { // Check which output attribute we have been asked to // compute. If this node doesn't know how to compute it, // we must return MS::kUnknownParameter if (plug == outMesh || plug == aOutCentreOfMass || plug == aOutMass) { MDataHandle inputData = data.inputValue(inMesh, &status); MCheckStatus(status, "ERROR getting inMesh"); MDataHandle outputData = data.outputValue(outMesh, &status); MCheckStatus(status, "ERROR getting outMesh"); // Copy the inMesh to the outMesh, so you can // perform operations directly on outMesh outputData.set(inputData.asMesh()); // Return if the node is not enabled. MDataHandle enableData = data.inputValue(aEnable, &status); MCheckStatus(status, "ERROR getting aEnable"); if (!enableData.asBool()) { return MS::kSuccess; } // Get Mesh object MObject mesh = outputData.asMesh(); // Set the mesh object and component List on the factory fFactory.setMesh(mesh); // Now, perform the vcgMeshStats status = fFactory.doIt(); // Centre Of Mass Output MVector centreOfMass(fFactory.getCentreOfMass()); INFO("compute centre of mass X:" << centreOfMass[0]); INFO("compute centre of mass Y:" << centreOfMass[1]); INFO("compute centre of mass Z:" << centreOfMass[2]); MDataHandle centreOfMassData = data.outputValue(aOutCentreOfMass, &status); MCheckStatus(status, "ERROR getting aOutCentreOfMass"); centreOfMassData.setMVector(centreOfMass); // Mass Output float mass = fFactory.getMass(); INFO("compute mass:" << mass); MDataHandle massData = data.outputValue(aOutMass, &status); MCheckStatus(status, "ERROR getting aOutMass"); massData.setFloat(mass); // Mark the output mesh as clean outputData.setClean(); centreOfMassData.setClean(); massData.setClean(); } // else if // { // MDataHandle inputData = data.inputValue(inMesh, &status); // MCheckStatus(status, "ERROR getting inMesh"); // // // Return if the node is not enabled. // MDataHandle enableData = data.inputValue(aEnable, &status); // MCheckStatus(status, "ERROR getting aEnable"); // if (!enableData.asBool()) // { // return MS::kSuccess; // } // // // Get Mesh object // MObject mesh = inputData.asMesh(); // // // Set the mesh object and component List on the factory // fFactory.setMesh(mesh); // // // Now, perform the vcgMeshStats // status = fFactory.doIt(); // // // Centre Of Mass Output // MVector centreOfMass(fFactory.getCentreOfMass()); // MDataHandle centreOfMassData = data.outputValue(aOutCentreOfMass, &status); // MCheckStatus(status, "ERROR getting aOutCentreOfMass"); // centreOfMassData.setMVector(centreOfMass); // // // Mass Output // float mass = fFactory.getMass(); // MDataHandle massData = data.outputValue(aOutMass, &status); // MCheckStatus(status, "ERROR getting aOutMass"); // massData.setFloat(mass); // // // Mark the output mesh as clean // // } else { status = MS::kUnknownParameter; } } return status; } void *vcgMeshStatsNode::creator() // // Description: // this method exists to give Maya a way to create new objects // of this type. // // Return Value: // a new object of this type // { return new vcgMeshStatsNode(); } MStatus vcgMeshStatsNode::initialize() // // Description: // This method is called to create and initialize all of the attributes // and attribute dependencies for this node type. This is only called // once when the node type is registered with Maya. // // Return Values: // MS::kSuccess // MS::kFailure // { MStatus status; MFnTypedAttribute attrFn; MFnNumericAttribute numFn; aEnable = numFn.create("enable", "enable", MFnNumericData::kBoolean, true, &status); CHECK_MSTATUS(status); status = numFn.setDefault(true); status = numFn.setStorable(true); status = numFn.setKeyable(true); status = numFn.setChannelBox(true); status = numFn.setHidden(false); status = addAttribute(aEnable); CHECK_MSTATUS(status); // Centre of Mass aOutCentreOfMass = numFn.createPoint("outCentreOfMass", "outCentreOfMass", &status); // MFnNumericData::k3Float, 0.0, &status); CHECK_MSTATUS(status); // numFn.setDefault(0.0f, 0.0f, 0.0f); // numFn.setKeyable(false); numFn.setStorable(false); numFn.setWritable(false); status = addAttribute(aOutCentreOfMass); CHECK_MSTATUS(status); // Mass aOutMass = numFn.create("outMass", "outMass", MFnNumericData::kFloat, 0.0, &status); CHECK_MSTATUS(status); numFn.setDefault(0.0f); numFn.setKeyable(false); numFn.setStorable(false); numFn.setWritable(false); status = addAttribute(aOutMass); CHECK_MSTATUS(status); // Input Mesh inMesh = attrFn.create("inMesh", "im", MFnMeshData::kMesh); attrFn.setStorable(true); // To be stored during file-save status = addAttribute(inMesh); CHECK_MSTATUS(status); // Output Mesh // Attribute is read-only because it is an output attribute outMesh = attrFn.create("outMesh", "om", MFnMeshData::kMesh); attrFn.setStorable(false); attrFn.setWritable(false); status = addAttribute(outMesh); CHECK_MSTATUS(status); // Attribute affects status = attributeAffects(inMesh, outMesh); status = attributeAffects(aEnable, outMesh); status = attributeAffects(inMesh, aOutCentreOfMass); status = attributeAffects(aEnable, aOutCentreOfMass); status = attributeAffects(inMesh, aOutMass); status = attributeAffects(aEnable, aOutMass); CHECK_MSTATUS(status); return MS::kSuccess; }<|fim▁end|>
/* *
<|file_name|>taxi_pipeline_kubernetes_test.py<|end_file_name|><|fim▁begin|># Copyright 2020 Google LLC. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for tfx.orchestration.experimental.kubernetes.examples.taxi_pipeline_kubernetes.""" import os import tensorflow as tf from tfx.orchestration.experimental.kubernetes.examples import taxi_pipeline_kubernetes class TaxiPipelineKubernetesTest(tf.test.TestCase): def setUp(self): super().setUp() self._test_dir = os.path.join( os.environ.get('TEST_UNDECLARED_OUTPUTS_DIR', self.get_temp_dir()), self._testMethodName) def testTaxiPipelineCheckDagConstruction(self):<|fim▁hole|> logical_pipeline = taxi_pipeline_kubernetes.create_pipeline( pipeline_name='Test', pipeline_root=self._test_dir, data_root=self._test_dir, module_file=self._test_dir, serving_model_dir=self._test_dir, beam_pipeline_args=[]) self.assertEqual(9, len(logical_pipeline.components)) if __name__ == '__main__': tf.test.main()<|fim▁end|>
<|file_name|>test_topi_bitserial_conv2d.py<|end_file_name|><|fim▁begin|>import numpy as np import tvm import topi import topi.testing<|fim▁hole|>def generate_quantized_np(shape, bits, out_dtype): min_val = 0 max_val = 1 << bits return np.random.randint(min_val, max_val, size=shape).astype(out_dtype) def verify_bitserial_conv2d_nchw(batch, in_size, in_channel, num_filter, kernel, stride, padding, activation_bits, weight_bits, dorefa): in_height = in_width = in_size input_type = 'uint32' out_dtype = 'int32' with tvm.target.create('llvm'): A = tvm.placeholder((batch, in_channel, in_height, in_width), dtype=input_type, name='A') W = tvm.placeholder((num_filter, in_channel, kernel, kernel), dtype=input_type, name='W') B = topi.nn.bitserial_conv2d(A, W, stride, padding, activation_bits, weight_bits, out_dtype=out_dtype, layout="NCHW", dorefa=dorefa) s = topi.generic.schedule_bitserial_conv2d_nchw([B]) a_shape = get_const_tuple(A.shape) w_shape = get_const_tuple(W.shape) @memoize("topi.tests.test_topi_bitseral_conv2d_nchw") def get_ref_data(): a_np = generate_quantized_np(get_const_tuple(a_shape), activation_bits, input_type) w_np = generate_quantized_np(get_const_tuple(w_shape), weight_bits, input_type) if dorefa: w_ = np.copy(w_np).astype(out_dtype) for x in np.nditer(w_, op_flags=['readwrite']): x[...] = 1 if x == 1 else -1 b_np = topi.testing.conv2d_nchw_python(a_np.astype(out_dtype), w_, stride, padding) else: b_np = topi.testing.conv2d_nchw_python(a_np, w_np, stride, padding) return a_np, w_np, b_np a_np, w_np, b_np = get_ref_data() ctx = tvm.cpu(0) a = tvm.nd.array(a_np, ctx) w = tvm.nd.array(w_np, ctx) b = tvm.nd.array(np.zeros(get_const_tuple(B.shape), dtype=B.dtype), ctx) func = tvm.build(s, [A, W, B], "llvm") func(a, w, b) np.testing.assert_allclose(b.asnumpy(), b_np, rtol=1e-5) def verify_bitserial_conv2d_nhwc(batch, in_size, in_channel, num_filter, kernel, stride, padding, activation_bits, weight_bits, dorefa): in_height = in_width = in_size input_type='uint32' out_dtype='int32' with tvm.target.create('llvm'): A = tvm.placeholder((batch, in_height, in_width, in_channel), dtype=input_type, name='A') W = tvm.placeholder((kernel, kernel, in_channel, num_filter), dtype=input_type, name='W') B = topi.nn.bitserial_conv2d(A, W, stride, padding, activation_bits, weight_bits, out_dtype=out_dtype, layout="NHWC", dorefa=dorefa) s = topi.generic.schedule_bitserial_conv2d_nhwc([B]) a_shape = get_const_tuple(A.shape) w_shape = get_const_tuple(W.shape) @memoize("topi.tests.test_topi_bitseral_conv2d_nhwc") def get_ref_data(): a_np = generate_quantized_np(get_const_tuple(a_shape), activation_bits, input_type) w_np = generate_quantized_np(get_const_tuple(w_shape), weight_bits, input_type) if dorefa: w_ = np.copy(w_np).astype(out_dtype) for x in np.nditer(w_, op_flags=['readwrite']): x[...] = 1 if x == 1 else -1 b_np = topi.testing.conv2d_nhwc_python(a_np, w_, stride, padding).astype(out_dtype) else: b_np = topi.testing.conv2d_nhwc_python(a_np, w_np, stride, padding).astype(out_dtype) return a_np, w_np, b_np a_np, w_np, b_np = get_ref_data() ctx = tvm.cpu(0) a = tvm.nd.array(a_np, ctx) w = tvm.nd.array(w_np, ctx) b = tvm.nd.array(np.zeros(get_const_tuple(B.shape), dtype=B.dtype), ctx) func = tvm.build(s, [A, W, B], 'llvm') func(a, w, b) np.testing.assert_allclose(b.asnumpy(), b_np, rtol=1e-5) def test_bitserial_conv2d(): in_size = 56 ic, oc = 64, 64 k = 3 stride = 1 pad = 1 verify_bitserial_conv2d_nchw(1, in_size, ic, oc, k, stride, pad, 1, 1, True) verify_bitserial_conv2d_nchw(1, in_size, ic, oc, k, stride, pad, 2, 1, True) verify_bitserial_conv2d_nchw(1, in_size, ic, oc, k, stride, pad, 1, 1, False) verify_bitserial_conv2d_nchw(1, in_size, ic, oc, k, stride, pad, 2, 1, False) verify_bitserial_conv2d_nchw(1, in_size, ic, oc, k, stride, pad, 2, 2, False) verify_bitserial_conv2d_nhwc(1, in_size, ic, oc, k, stride, pad, 1, 1, True) verify_bitserial_conv2d_nhwc(1, in_size, ic, oc, k, stride, pad, 2, 1, True) verify_bitserial_conv2d_nhwc(1, in_size, ic, oc, k, stride, pad, 1, 1, False) verify_bitserial_conv2d_nhwc(1, in_size, ic, oc, k, stride, pad, 2, 1, False) verify_bitserial_conv2d_nhwc(1, in_size, ic, oc, k, stride, pad, 2, 2, False) if __name__ == "__main__": test_bitserial_conv2d()<|fim▁end|>
from topi.util import get_const_tuple from tvm.contrib.pickle_memoize import memoize
<|file_name|>fichier_libcurl.cpp<|end_file_name|><|fim▁begin|>/*********************************************************************/ // dar - disk archive - a backup/restoration program // Copyright (C) 2002-2022 Denis Corbin // // This program is free software; you can redistribute it and/or // modify it under the terms of the GNU General Public License // as published by the Free Software Foundation; either version 2 // of the License, or (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program; if not, write to the Free Software // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. // // to contact the author, see the AUTHOR file /*********************************************************************/ #include "../my_config.h" extern "C" { #if HAVE_UNISTD_H #include <unistd.h> #endif } // end extern "C" #include "tools.hpp" #include "erreurs.hpp" #include "fichier_libcurl.hpp" using namespace std; namespace libdar { #if defined ( LIBCURL_AVAILABLE ) && defined ( LIBTHREADAR_AVAILABLE ) fichier_libcurl::fichier_libcurl(const shared_ptr<user_interaction> & dialog, const std::string & chemin, mycurl_protocol proto, const shared_ptr<mycurl_easyhandle_node> & handle, gf_mode m, U_I waiting, bool force_permission, U_I permission, bool erase): fichier_global(dialog, m), end_data_mode(false), sub_is_dying(false), ehandle(handle), metadatamode(false), current_offset(0), has_maxpos(false), maxpos(0), append_write(!erase), meta_inbuf(0), wait_delay(waiting), interthread(10, tampon_size), synchronize(2), x_proto(proto) { try { if(!ehandle) throw SRC_BUG; // setting x_ref_handle to carry all options that will always be present for this object ehandle->setopt(CURLOPT_URL, chemin); switch(get_mode()) { case gf_read_only: ehandle->setopt(CURLOPT_WRITEDATA, (void *)this); break; case gf_write_only: ehandle->setopt(CURLOPT_READDATA, (void *)this); ehandle->setopt(CURLOPT_UPLOAD, 1L); break; case gf_read_write: throw Efeature("read-write mode for fichier libcurl"); default: throw SRC_BUG; } switch_to_metadata(true); if(append_write && m != gf_read_only) current_offset = get_size(); } catch(...) { detruit(); throw; } } void fichier_libcurl::change_permission(U_I perm) { struct mycurl_slist headers; string order = tools_printf("site CHMOD %o", perm); switch_to_metadata(true); try { headers.append(order); ehandle->setopt(CURLOPT_QUOTE, headers); ehandle->setopt(CURLOPT_NOBODY, (long)1); try { ehandle->apply(get_pointer(), wait_delay); } catch(...) { ehandle->setopt_default(CURLOPT_QUOTE); ehandle->setopt_default(CURLOPT_NOBODY); throw; } ehandle->setopt_default(CURLOPT_QUOTE); ehandle->setopt_default(CURLOPT_NOBODY); } catch(Egeneric & e) { e.prepend_message("Error while changing file permission on remote repository"); throw; } } infinint fichier_libcurl::get_size() const { double filesize; fichier_libcurl *me = const_cast<fichier_libcurl *>(this); if(me == nullptr) throw SRC_BUG; if(!has_maxpos || get_mode() != gf_read_only) { try { me->switch_to_metadata(true); me->ehandle->setopt(CURLOPT_NOBODY, (long)1); try { me->ehandle->apply(get_pointer(), wait_delay); me->ehandle->getinfo(CURLINFO_CONTENT_LENGTH_DOWNLOAD, &filesize); if(filesize == -1) // file does not exist (or filesize is not known) filesize = 0; me->maxpos = tools_double2infinint(filesize); me->has_maxpos = true; } catch(...) { me->ehandle->setopt_default(CURLOPT_NOBODY); throw; } me->ehandle->setopt_default(CURLOPT_NOBODY); } catch(Egeneric & e) { e.prepend_message("Error while reading file size on a remote repository"); throw; } } return maxpos; } bool fichier_libcurl::skippable(skippability direction, const infinint & amount) { if(get_mode() == gf_read_only) { switch(direction) { case skip_backward: return amount <= current_offset; case skip_forward: if(!has_maxpos) (void)get_size(); if(!has_maxpos) throw SRC_BUG; return current_offset + amount < maxpos; default: throw SRC_BUG; } } else return false; } bool fichier_libcurl::skip(const infinint & pos) { if(pos == current_offset) return true; switch(get_mode()) { case gf_read_only: switch_to_metadata(true); // necessary to stop current subthread and change easy_handle offset current_offset = pos; flush_read(); break; case gf_write_only: throw Erange("fichier_libcurl::skip", string(gettext("libcurl does not allow skipping in write mode"))); case gf_read_write: throw SRC_BUG; default: throw SRC_BUG; } return true; } bool fichier_libcurl::skip_to_eof() { (void)get_size(); if(!has_maxpos) throw SRC_BUG; // get_size() sould either throw an exception or set maxpos if(get_mode() == gf_write_only) return true; else return skip(maxpos); } bool fichier_libcurl::skip_relative(S_I x) { if(x >= 0) { infinint tmp(x); tmp += current_offset; return skip(tmp); } else { infinint tmp(-x); if(tmp > current_offset) { skip(0); return false; } else { tmp = current_offset - tmp; return skip(tmp); } } } void fichier_libcurl::inherited_read_ahead(const infinint & amount) { relaunch_thread(amount); } void fichier_libcurl::inherited_truncate(const infinint & pos) { if(pos != get_position()) throw Erange("fichier_libcurl::inherited_truncate", string(gettext("libcurl does not allow truncating at a given position while uploading files"))); } void fichier_libcurl::inherited_sync_write() { // nothing to do because there is no data in transit // except in interthread but it cannot be flushed faster // than the normal multi-thread process does } void fichier_libcurl::inherited_flush_read() { switch_to_metadata(true); interthread.reset(); } void fichier_libcurl::inherited_terminate() { switch(get_mode()) { case gf_write_only: switch_to_metadata(true); break; case gf_read_only: switch_to_metadata(true); break; case gf_read_write: throw SRC_BUG; default: throw SRC_BUG; } } U_I fichier_libcurl::fichier_global_inherited_write(const char *a, U_I size) { U_I wrote = 0; bool full = false; char *ptr; unsigned int ptr_size; switch_to_metadata(false); while(wrote < size && !full) { try { if(!is_running() || sub_is_dying) { join(); throw SRC_BUG; // inherited_run() should throw an exception // as this is not a normal condition: // we have not yet finished writing // data and child thread has already ended } } catch(Edata & e) { // remote disk is full full = true; } if(!full) { U_I toadd = size - wrote; interthread.get_block_to_feed(ptr, ptr_size); if(toadd <= ptr_size) { memcpy(ptr, a + wrote, toadd); interthread.feed(ptr, toadd); wrote = size; } else { memcpy(ptr, a + wrote, ptr_size); interthread.feed(ptr, ptr_size); wrote += ptr_size; } } } current_offset += wrote; if(current_offset > 0) append_write = true; // we can now ignore the request to erase data // and we now need to swap in write append mode return wrote; } bool fichier_libcurl::fichier_global_inherited_read(char *a, U_I size, U_I & read, std::string & message) { char *ptr; unsigned int ptr_size; U_I room; U_I delta; bool maybe_eof = false; set_subthread(size); read = 0; do { delta = 0; while(read + delta < size && (!sub_is_dying || interthread.is_not_empty())) { interthread.fetch(ptr, ptr_size); room = size - read - delta; if(room >= ptr_size) { memcpy(a + read + delta, ptr, ptr_size); interthread.fetch_recycle(ptr); delta += ptr_size; } else { memcpy(a + read + delta, ptr, room); delta += room; ptr_size -= room; memmove(ptr, ptr + room, ptr_size); interthread.fetch_push_back(ptr, ptr_size); } } current_offset += delta; read += delta; if(read < size // we requested more data than what we got so far && (!has_maxpos // we don't know where is EOF || current_offset < maxpos) // or we have not yet reached EOF && !maybe_eof) // avoid looping endelessly { maybe_eof = (delta == 0); U_I remaining = size - read; // if interthread is empty and thread has not been launched at least once // we can only now switch to data mode because current_offset is now correct. // This will (re-)launch the thread that should fill interthread pipe with data set_subthread(remaining); size = read + remaining; } } while(read < size && (is_running() || interthread.is_not_empty())); return true; } void fichier_libcurl::inherited_run() { try { // parent thread is still suspended shared_ptr<user_interaction> thread_ui = get_pointer(); infinint local_network_block = network_block; // set before unlocking parent thread try { if(!thread_ui) throw Ememory("fichier_libcurl::inherited_run"); subthread_cur_offset = current_offset; } catch(...) { initialize_subthread(); throw; } // after next call, the parent thread will be running initialize_subthread(); if(local_network_block.is_zero()) // network_block may be non null only in read-only mode { do { ehandle->apply(thread_ui, wait_delay, end_data_mode); } while(!end_data_mode || still_data_to_write()); } else // reading by block to avoid having interrupting libcurl { do { subthread_net_offset = 0; // keeps trace of the amount of bytes sent to main thread by callback set_range(subthread_cur_offset, local_network_block); try { ehandle->apply(thread_ui, wait_delay); subthread_cur_offset += subthread_net_offset; if(local_network_block < subthread_net_offset) throw SRC_BUG; // we acquired more data from libcurl than expected! local_network_block -= subthread_net_offset; } catch(...) { unset_range(); throw; } unset_range(); } while(!subthread_net_offset.is_zero() // we just grabbed some data in this ending cycle (not reached eof) && !end_data_mode // the current thread has not been asked to stop && !local_network_block.is_zero()); // whe still not have gathered all the requested data } } catch(...) { finalize_subthread(); throw; } finalize_subthread(); } void fichier_libcurl::initialize_subthread() { sub_is_dying = false; synchronize.wait(); // release calling thread as we, as child thread, do now exist } void fichier_libcurl::finalize_subthread() { sub_is_dying = true; if(!end_data_mode) // natural death, main thread has not required our death { char *ptr; unsigned int ptr_size; switch(get_mode()) { case gf_write_only: // making room in the pile to toggle main thread if // it was suspended waiting for a block to feed interthread.fetch(ptr, ptr_size); interthread.fetch_recycle(ptr); break; case gf_read_only: // sending a zero length block to toggle main thread // if it was suspended waiting for a block to fetch interthread.get_block_to_feed(ptr, ptr_size); interthread.feed(ptr, 0); // means eof to main thread break; case gf_read_write: throw SRC_BUG; default: throw SRC_BUG; } } } void fichier_libcurl::set_range(const infinint & begin, const infinint & range_size) { infinint end_range = begin + range_size - 1; string range = tools_printf("%i-%i", &begin, &end_range); // setting the block size if necessary ehandle->setopt(CURLOPT_RANGE, range); } void fichier_libcurl::unset_range() { ehandle->setopt_default(CURLOPT_RANGE); } void fichier_libcurl::switch_to_metadata(bool mode) { if(mode == metadatamode) return; if(!mode) // data mode {<|fim▁hole|> infinint resume; curl_off_t cur_pos = 0; long do_append; switch(get_mode()) { case gf_read_only: ehandle->setopt(CURLOPT_WRITEFUNCTION, (void*)write_data_callback); if(network_block.is_zero()) { // setting the offset of the next byte to read / write resume = current_offset; resume.unstack(cur_pos); if(!resume.is_zero()) throw Erange("fichier_libcurl::switch_to_metadata", gettext("Integer too large for libcurl, cannot skip at the requested offset in the remote repository")); ehandle->setopt(CURLOPT_RESUME_FROM_LARGE, cur_pos); } // else (network_block != 0) the subthread will make use of range // this parameter is set back to its default in stop_thread() break; case gf_write_only: ehandle->setopt(CURLOPT_READFUNCTION, (void*)read_data_callback); // setting the offset of the next byte to read / write do_append = (append_write ? 1 : 0); ehandle->setopt(CURLOPT_APPEND, do_append); // should also set the CURLOPT_INFILESIZE_LARGE option but file size is not known at this time break; case gf_read_write: throw SRC_BUG; default: throw SRC_BUG; } run_thread(); } else // metadata mode { stop_thread(); meta_inbuf = 0; // we don't care existing metadata remaining in transfer switch(get_mode()) { case gf_read_only: ehandle->setopt(CURLOPT_WRITEFUNCTION, (void*)write_meta_callback); break; case gf_write_only: ehandle->setopt(CURLOPT_READFUNCTION, (void*)read_meta_callback); break; case gf_read_write: throw SRC_BUG; default: throw SRC_BUG; } } metadatamode = mode; } void fichier_libcurl::detruit() { try { terminate(); } catch(...) { // ignore all errors } } void fichier_libcurl::run_thread() { if(is_running()) throw SRC_BUG; if(interthread.is_not_empty()) { char *ptr; unsigned int ptr_size; bool bug = false; // the interthread may keep // a single empty block pending // to be fetched. interthread.fetch(ptr, ptr_size); if(ptr_size != 0) bug = true; interthread.fetch_recycle(ptr); if(bug) throw SRC_BUG; // now interthread should be empty if(interthread.is_not_empty()) bug = true; if(bug) throw SRC_BUG; // interthread should have been purged when // previous thread had ended } end_data_mode = false; run(); synchronize.wait(); // waiting for child thread to be ready } void fichier_libcurl::stop_thread() { if(is_running()) { char *ptr = nullptr; unsigned int ptr_size; end_data_mode = true; switch(get_mode()) { case gf_write_only: interthread.get_block_to_feed(ptr, ptr_size); interthread.feed(ptr, 0); // trigger the thread if it was waiting for data from interthread break; case gf_read_only: if(interthread.is_full()) { interthread.fetch(ptr, ptr_size); interthread.fetch_recycle(ptr); // trigger the thread if it was waiting for a free block to fill } break; case gf_read_write: throw SRC_BUG; default: throw SRC_BUG; } } join(); ehandle->setopt_default(CURLOPT_RESUME_FROM_LARGE); } void fichier_libcurl::relaunch_thread(const infinint & block_size) { if(metadatamode) { if(x_proto == proto_ftp) network_block = 0; else network_block = block_size; switch_to_metadata(false); } else { if(sub_is_dying) { stop_thread(); if(x_proto == proto_ftp) network_block = 0; else network_block = block_size; run_thread(); } // else thread is still running so // we cannot change the network_block size } } size_t fichier_libcurl::write_data_callback(char *buffer, size_t size, size_t nmemb, void *userp) { size_t remain = size * nmemb; size_t lu = 0; fichier_libcurl *me = (fichier_libcurl *)(userp); char *ptr; unsigned int ptr_size; if(me == nullptr) throw SRC_BUG; while(!me->end_data_mode && remain > 0) { me->interthread.get_block_to_feed(ptr, ptr_size); if(remain <= ptr_size) { memcpy(ptr, buffer + lu, remain); me->interthread.feed(ptr, remain); lu += remain; remain = 0; } else { memcpy(ptr, buffer + lu, ptr_size); me->interthread.feed(ptr, ptr_size); remain -= ptr_size; lu += ptr_size; } } if(me->network_block > 0) me->subthread_net_offset += lu; if(me->end_data_mode) { if(me->network_block == 0) { if(remain > 0) // not all data could be sent to main thread lu = 0; // to force easy_perform() that called us, to return } else { if(remain > 0) throw SRC_BUG; // main thread should not ask us to stop // until we have provided all the requested data } } return lu; } size_t fichier_libcurl::read_data_callback(char *bufptr, size_t size, size_t nitems, void *userp) { size_t ret; size_t room = size * nitems; fichier_libcurl *me = (fichier_libcurl *)(userp); char *ptr; unsigned int ptr_size; if(me == nullptr) throw SRC_BUG; me->interthread.fetch(ptr, ptr_size); if(ptr_size <= room) { memcpy(bufptr, ptr, ptr_size); me->interthread.fetch_recycle(ptr); ret = ptr_size; } else { memcpy(bufptr, ptr, room); ptr_size -= room; memmove(ptr, ptr + room, ptr_size); me->interthread.fetch_push_back(ptr, ptr_size); ret = room; } return ret; } size_t fichier_libcurl::write_meta_callback(char *buffer, size_t size, size_t nmemb, void *userp) { return size * nmemb; } size_t fichier_libcurl::read_meta_callback(char *bufptr, size_t size, size_t nitems, void *userp) { return 0; } void fichier_libcurl::set_subthread(U_I & needed_bytes) { if(interthread.is_empty()) { // cannot switch to data mode if some data are // in transit because current_offset would be // wrongly positionned in the requested to libcurl if(metadatamode) { if(x_proto == proto_ftp) network_block = 0; // because reading by block lead control session to // be reset when ftp is used, leading a huge amount // of connection an FTP server might see as DoS atempt else { if(has_maxpos && maxpos <= current_offset + needed_bytes) { infinint tmp = maxpos - current_offset; // this sets size the value of tmp: needed_bytes = 0; tmp.unstack(needed_bytes); if(!tmp.is_zero()) throw SRC_BUG; network_block = 0; } else network_block = needed_bytes; } switch_to_metadata(false); } else { if(sub_is_dying) relaunch_thread(needed_bytes); } } } bool fichier_libcurl::still_data_to_write() { if(get_mode() == gf_write_only) { if(interthread.is_empty()) return false; else { char *ptr; unsigned int size; interthread.fetch(ptr, size); if(size == 0) { interthread.fetch_recycle(ptr); return false; } else { interthread.fetch_push_back(ptr, size); return true; } } } else return false; } #endif } // end of namespace<|fim▁end|>
<|file_name|>response.py<|end_file_name|><|fim▁begin|>""" VerseBot for Reddit By Matthieu Grieger Continued By Team VerseBot response.py Copyright (c) 2015 Matthieu Grieger (MIT License) """ MAXIMUM_MESSAGE_LENGTH = 4000<|fim▁hole|> """ Class that holds the properties and methods of a comment response. """ def __init__(self, message, parser, link=None): """ Initializes a Response object. """ self.verse_list = list() self.message = message self.parser = parser self.response = "" if link is not None: self.link = link else: self.link = '' def add_verse(self, verse): """ Adds a verse to the verse list. :param verse: Verse to add to the list of verses """ self.verse_list.append(verse) def is_duplicate_verse(self, verse): """ Checks the incoming verse against the verse list to make sure it is not a duplicate. :param verse: Verse to check duplicates for """ for v in self.verse_list: if (v.book == verse.book and v.chapter == verse.chapter and v.verse == verse.verse and v.translation == verse.translation): return True return False def construct_message(self): """ Constructs a message response. """ for verse in self.verse_list: verse.get_contents() if verse.contents is not None: if verse.verse is not None: self.response += ("[**%s %d:%s | %s**](%s)\n\n>" % (verse.book, verse.chapter, verse.verse, verse.translation_title, verse.permalink)) else: self.response += ("[**%s %d | %s**](%s)\n\n>" % (verse.book, verse.chapter, verse.translation_title, verse.permalink)) self.response += verse.contents self.response += "\n\n" if self.response == "": return None else: if self.exceeds_max_length(): self.response = self.generate_overflow_response() # self.response += self.get_comment_footer() return self.response def exceeds_max_length(self): """ Returns true if the current response exceeds the maximum comment length, returns false otherwise. """ return len(self.response) > MAXIMUM_MESSAGE_LENGTH def generate_overflow_response(self): """ Constructs and generates an overflow comment whenever the comment exceeds the character limit set by MAXIMUM_MESSAGE_LENGTH. Instead of posting the contents of the verse(s) in the comment, it links to webpages that contain the contents of the verse(s). """ comment = ("The contents of the verse(s) you quoted exceed the %d " "character limit. Instead, here are links to the " "verse(s)!\n\n" % MAXIMUM_MESSAGE_LENGTH) for verse in self.verse_list: if verse.translation == "JPS": overflow_link = verse.permalink else: if verse.verse is not None: overflow_link = ("https://www.biblegateway.com/passage/" "?search=%s+%s:%s&version=%s" % (verse.book, verse.chapter, verse.verse, verse.translation)) else: overflow_link = verse.permalink if verse.verse is not None: comment += ("- [%s %d:%s (%s)](%s)\n\n" % (verse.book, verse.chapter, verse.verse, verse.translation, overflow_link)) else: comment += ("- [%s %d (%s)](%s)\n\n" % (verse.book, verse.chapter, verse.translation, overflow_link)) return comment ''' def get_comment_footer(self): """ Returns the footer for the comment. """ return ("\n***\n[^Code](https://github.com/Team-VerseBot/versebot) ^|" " ^/r/VerseBot ^| [^Contact ^Devs](https://github.com/" "Team-VerseBot/versebot/issues) ^|" " [^Usage](https://github.com/Team-VerseBot/versebot/blob/" "master/README.md) ^|" " [^Changelog](https://github.com/Team-VerseBot/versebot/blob/" "master/CHANGELOG.md) ^|" " [^Stats](http://adamgrieger.com/versebot/) ^|" " [^Set ^a ^Default ^Translation](http://adamgrieger.com/" "versebot#defaults) \n\n" "^All ^texts ^provided ^by [^BibleGateway]" "(http://biblegateway.com) ^and [^Bible ^Hub]" "(http://biblehub.com)^. \n\n" " ^Mistake? ^%(user)s ^can [^edit](/message/compose/" "?to=%(bot)s&subject=edit+request&message={%(link)s} " "Please+enter+your+revised+verse+quotations+below+in+the+usual" "+bracketed+syntax.)" " ^or [^delete](/message/compose/?to=%(bot)s&subject=delete" "+request&message={%(link)s} " "This+action+cannot+be+reversed!) ^this ^comment." % {"user": self.message.author, "bot": REDDIT_USERNAME, "link": self.link}) '''<|fim▁end|>
class Response:
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>""" GraphLab Create offers several data structures for data analysis. Concise descriptions of the data structures and their methods are contained in the API documentation, along with a small number of simple examples. For more detailed descriptions and examples, please see the `User Guide <https://dato.com/learn/userguide/>`_, `API Translator <https://dato.com/learn/translator/>`_, `How-Tos <https://dato.com/learn/how-to/>`_, and data science `Gallery <https://dato.com/learn/gallery/>`_. """ ''' Copyright (C) 2015 Dato, Inc. All rights reserved. This software may be modified and distributed under the terms of the BSD license. See the LICENSE file for details.<|fim▁hole|>__all__ = ['sframe', 'sarray', 'sgraph', 'sketch', 'image'] from . import image from . import sframe from . import sarray from . import sgraph from . import sketch<|fim▁end|>
'''
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Gedit Better Defaults plugin # Copyright (C) 2017 Fabio Zendhi Nagao # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import gedit import gtk import re ui_str = """ <ui> <menubar name="MenuBar"> <menu name="EditMenu" action="Edit"> <placeholder name="EditOps_4"> <menuitem action="DuplicateLine" name="Duplicate line"/> </placeholder> </menu> </menubar> </ui> """ class BetterDefaultsWindowHelper: def __init__(self, plugin, window): self._window = window self._plugin = plugin self.install_ui() for view in self._window.get_views(): self.activate_view(view) for doc in self._window.get_documents(): self.activate_doc(doc) self._tab_added_id = self._window.connect("tab_added", self.on_tab_added) # self._key_press_id = self._window.connect("key-press-event", self.on_key_press_event) def deactivate(self): # self._window.disconnect(self._key_press_id) self._window.disconnect(self._tab_added_id) for doc in self._window.get_documents(): self.deactivate_doc(doc) for view in self._window.get_views(): self.deactivate_view(view) self.uninstall_ui() self._window = None self._plugin = None def update_ui(self): pass # # TODO: Use key press and button press events instead of update_ui # doc = self._window.get_active_document() # if doc: # bounds = doc.get_selection_bounds() # if bounds:<|fim▁hole|># content = doc.get_text(*bounds).decode("utf-8") # highlightable = re.compile(r"[\S\{\}\[\]\(\)]+", flags=re.UNICODE) # if highlightable.search(content): # doc.set_search_text(content, gedit.SEARCH_CASE_SENSITIVE) # else: # doc.set_search_text("", gedit.SEARCH_CASE_SENSITIVE) # else: # doc.set_search_text("", gedit.SEARCH_CASE_SENSITIVE) def install_ui(self): manager = self._window.get_ui_manager() self._action_group = gtk.ActionGroup("BetterDefaultsPluginActions") self._action_group.add_actions([ ( "DuplicateLine", None, _("Duplicate line"), "<Ctrl><Shift>d", _("Duplicate Line"), self.duplicate_line ) ]) manager.insert_action_group(self._action_group, -1) self._ui_id = manager.add_ui_from_string(ui_str) def uninstall_ui(self): manager = self._window.get_ui_manager() manager.remove_ui(self._ui_id) manager.remove_action_group(self._action_group) manager.ensure_update() def activate_view(self, view): view.set_smart_home_end(True) view.set_data("vscrolling_helper", (0.0, 0.0)) size_allocate_id = view.connect("size-allocate", self.on_size_allocate) view.set_data("on_size_allocate_id", size_allocate_id) va = view.get_vadjustment() value_change_id = va.connect("value_changed", self.on_value_changed) view.set_data("on_value_changed_id", value_change_id) def deactivate_view(self, view): va = view.get_vadjustment() va.disconnect( view.get_data("on_value_changed_id") ) view.disconnect( view.get_data("on_size_allocate_id") ) view.set_smart_home_end(False) def activate_doc(self, doc): save_id = doc.connect("save", self.on_document_save) doc.set_data("on_save_id", save_id) def deactivate_doc(self, doc): doc.disconnect( view.get_data("on_save_id") ) def on_tab_added(self, w, t): self.activate_view(t.get_view()) self.activate_doc(t.get_document()) def on_document_save(self, doc): piter = doc.get_end_iter() if piter.starts_line(): while piter.backward_char(): if not piter.ends_line(): piter.forward_to_line_end() break doc.delete(piter, doc.get_end_iter()) def on_size_allocate(self, view, allocation): va = view.get_vadjustment() vsz = va.get_upper() + ( va.get_page_size() / 2 ) if va.get_upper() > va.get_page_size(): va.set_upper(vsz) if va.get_value() < view.get_data("vscrolling_helper")[1]: va.set_value(view.get_data("vscrolling_helper")[1]) view.set_data("vscrolling_helper", (vsz, va.get_value())) def on_value_changed(self, adjustment): view = self._window.get_active_view() va = view.get_vadjustment() if( va.get_upper() == view.get_data("vscrolling_helper")[0] ): view.set_data( "vscrolling_helper", ( view.get_data("vscrolling_helper")[0], va.get_value() ) ) def duplicate_line(self, action): doc = self._window.get_active_document() doc.begin_user_action() liter = doc.get_iter_at_mark(doc.get_insert()) liter.set_line_offset(0); riter = doc.get_iter_at_mark(doc.get_insert()) f = riter.forward_line() line = doc.get_slice(liter, riter, True) if f: doc.insert(riter, line) else: doc.insert(riter, '\n' + line) doc.end_user_action() def enclose_selected(self, l, r): doc = self._window.get_active_document() (a, b) = doc.get_selection_bounds() doc.insert(b, r) (a, b) = doc.get_selection_bounds() doc.insert(a, l) def on_key_press_event(self, window, event): doc = self._window.get_active_document() bounds = doc.get_selection_bounds() if bounds: c = event.keyval if c == 123: self.enclose_selected('{', '}') elif c == 91: self.enclose_selected('[', ']') elif c == 40: self.enclose_selected('(', ')') elif c == 60: self.enclose_selected('<', '>') elif c == 65111: self.enclose_selected('"', '"') elif c == 65105: self.enclose_selected("'", "'") if c in [123, 91, 40, 60, 65111, 65105]: return True class BetterDefaultsPlugin(gedit.Plugin): WINDOW_DATA_KEY = "BetterDefaultsPluginWindowData" def __init__(self): gedit.Plugin.__init__(self) def activate(self, window): helper = BetterDefaultsWindowHelper(self, window) window.set_data(self.WINDOW_DATA_KEY, helper) def deactivate(self, window): window.get_data(self.WINDOW_DATA_KEY).deactivate() window.set_data(self.WINDOW_DATA_KEY, None) def update_ui(self, window): window.get_data(self.WINDOW_DATA_KEY).update_ui()<|fim▁end|>
<|file_name|>score_encoding_export.py<|end_file_name|><|fim▁begin|>############################################################################## # # OSIS stands for Open Student Information System. It's an application # designed to manage the core business of higher education institutions, # such as universities, faculties, institutes and professional schools. # The core business involves the administration of students, teachers, # courses, programs and so on. # # Copyright (C) 2015-2018 Université catholique de Louvain (http://www.uclouvain.be) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of<|fim▁hole|># GNU General Public License for more details. # # A copy of this license - GNU General Public License - is available # at the root of the source code of this program. If not, # see http://www.gnu.org/licenses/. # ############################################################################## from django.http import HttpResponse from openpyxl import Workbook from openpyxl.writer.excel import save_virtual_workbook from openpyxl.styles import Color, Style, PatternFill, Font, colors from django.utils.translation import ugettext_lazy as _ from django.utils import timezone from base import models as mdl from base.models.enums import exam_enrollment_justification_type HEADER = ['academic_year', 'session_title', 'learning_unit', 'program', 'registration_number', 'lastname', 'firstname', 'email', 'numbered_score', 'justification', 'end_date'] JUSTIFICATION_ALIASES = { exam_enrollment_justification_type.ABSENCE_JUSTIFIED : "M", exam_enrollment_justification_type.ABSENCE_UNJUSTIFIED : "S", exam_enrollment_justification_type.CHEATING : "T", } def export_xls(exam_enrollments): workbook = Workbook() worksheet = workbook.active worksheet.append([str(exam_enrollments[0].learning_unit_enrollment.learning_unit_year)]) worksheet.append([str('Session: %s' % exam_enrollments[0].session_exam.number_session)]) worksheet.append([str('')]) __display_creation_date_with_message_about_state(worksheet, row_number=4) __display_warning_about_students_deliberated(worksheet, row_number=5) worksheet.append([str('')]) __display_legends(worksheet) worksheet.append([str('')]) __columns_resizing(worksheet) header_translate_list = [str(_(elem)) for elem in HEADER] worksheet.append(header_translate_list) row_number = 11 for exam_enroll in exam_enrollments: student = exam_enroll.learning_unit_enrollment.student offer = exam_enroll.learning_unit_enrollment.offer person = mdl.person.find_by_id(student.person.id) end_date = __get_session_exam_deadline(exam_enroll) score = None if exam_enroll.score_final is not None: if exam_enroll.session_exam.learning_unit_year.decimal_scores: score = "{0:.2f}".format(exam_enroll.score_final) else: score = "{0:.0f}".format(exam_enroll.score_final) justification = JUSTIFICATION_ALIASES.get(exam_enroll.justification_final, "") worksheet.append([str(exam_enroll.learning_unit_enrollment.learning_unit_year.academic_year), str(exam_enroll.session_exam.number_session), exam_enroll.session_exam.learning_unit_year.acronym, offer.acronym, student.registration_id, person.last_name, person.first_name, person.email, score, str(justification), end_date]) row_number += 1 __coloring_non_editable(worksheet, row_number, score, exam_enroll.justification_final) lst_exam_enrollments = list(exam_enrollments) number_session = lst_exam_enrollments[0].session_exam.number_session learn_unit_acronym = lst_exam_enrollments[0].session_exam.learning_unit_year.acronym academic_year = lst_exam_enrollments[0].learning_unit_enrollment.learning_unit_year.academic_year filename = "session_%s_%s_%s.xlsx" % (str(academic_year.year), str(number_session), learn_unit_acronym) response = HttpResponse(save_virtual_workbook(workbook), content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet') response['Content-Disposition'] = 'attachment; filename=%s' % filename return response def __columns_resizing(ws): """ Definition of the columns sizes """ col_academic_year = ws.column_dimensions['A'] col_academic_year.width = 18 col_academic_year = ws.column_dimensions['C'] col_academic_year.width = 18 col_academic_year = ws.column_dimensions['E'] col_academic_year.width = 18 col_last_name = ws.column_dimensions['F'] col_last_name.width = 25 col_first_name = ws.column_dimensions['G'] col_first_name.width = 25 col_email = ws.column_dimensions['H'] col_email.width = 30 col_note = ws.column_dimensions['I'] col_note.width = 15 col_note = ws.column_dimensions['J'] col_note.width = 15 col_note = ws.column_dimensions['K'] col_note.width = 15 def __coloring_non_editable(ws, row_number, score, justification): """ Coloring of the non-editable columns """ pattern_fill_grey = PatternFill(patternType='solid', fgColor=Color('C1C1C1')) style_no_modification = Style(fill=pattern_fill_grey) column_number = 1 while column_number < 12: if column_number < 9 or column_number > 10: ws.cell(row=row_number, column=column_number).style = style_no_modification else: if not(score is None and justification is None): ws.cell(row=row_number, column=9).style = style_no_modification ws.cell(row=row_number, column=10).style = style_no_modification column_number += 1 def __display_creation_date_with_message_about_state(ws, row_number): date_format = str(_('date_format')) printing_date = timezone.now() printing_date = printing_date.strftime(date_format) ws.cell(row=row_number, column=1).value = str('%s' % (_('warn_user_data_can_change') % printing_date)) ws.cell(row=row_number, column=1).font = Font(color=colors.RED) def __display_warning_about_students_deliberated(ws, row_number): ws.cell(row=row_number, column=1).value = str(_('students_deliberated_are_not_shown')) ws.cell(row=row_number, column=1).font = Font(color=colors.RED) def __display_legends(ws): ws.append([ str(_('justification')), str(_('justification_values_accepted') % mdl.exam_enrollment.justification_label_authorized()) ]) ws.append([ str(''), str(_('justification_other_values') % justification_other_values()) ]) ws.append([ str(_('numbered_score')), str(_('score_legend') % "0 - 20") ]) def justification_other_values(): return "%s, %s" % (_('unjustified_absence_export_legend'), _('justified_absence_export_legend')) def __get_session_exam_deadline(exam_enroll): date_format = str(_('date_format')) deadline = None session_exam_deadline = mdl.exam_enrollment.get_session_exam_deadline(exam_enroll) if session_exam_deadline: deadline = session_exam_deadline.deadline_tutor_computed if session_exam_deadline.deadline_tutor_computed else\ session_exam_deadline.deadline return deadline.strftime(date_format) if deadline else "-"<|fim▁end|>
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
<|file_name|>events.js<|end_file_name|><|fim▁begin|>'use strict'; /* globals app, ajaxify, define, socket */ define('forum/topic/events', ['forum/topic/browsing', 'forum/topic/postTools', 'forum/topic/threadTools'], function(browsing, postTools, threadTools) { var Events = {}; var events = { 'event:update_users_in_room': browsing.onUpdateUsersInRoom, 'user.isOnline': browsing.onUserOnline, 'event:voted': updatePostVotesAndUserReputation, 'event:favourited': updateFavouriteCount, 'event:topic_deleted': toggleTopicDeleteState, 'event:topic_restored': toggleTopicDeleteState, 'event:topic_locked': toggleTopicLockedState, 'event:topic_unlocked': toggleTopicLockedState, 'event:topic_pinned': toggleTopicPinnedState, 'event:topic_unpinned': toggleTopicPinnedState, 'event:topic_moved': onTopicMoved, 'event:post_edited': onPostEdited, 'event:post_deleted': togglePostDeleteState, 'event:post_restored': togglePostDeleteState, 'posts.favourite': togglePostFavourite, 'posts.unfavourite': togglePostFavourite, 'posts.upvote': togglePostVote, 'posts.downvote': togglePostVote, 'posts.unvote': togglePostVote, 'event:topic.toggleReply': toggleReply, }; Events.init = function() { for(var eventName in events) { if (events.hasOwnProperty(eventName)) { socket.on(eventName, events[eventName]); } } }; Events.removeListeners = function() { for(var eventName in events) { if (events.hasOwnProperty(eventName)) { socket.removeListener(eventName, events[eventName]); } } }; function updatePostVotesAndUserReputation(data) { var votes = $('li[data-pid="' + data.post.pid + '"] .votes'), reputationElements = $('.reputation[data-uid="' + data.post.uid + '"]'); votes.html(data.post.votes).attr('data-votes', data.post.votes); reputationElements.html(data.user.reputation).attr('data-reputation', data.user.reputation); } function updateFavouriteCount(data) { $('li[data-pid="' + data.post.pid + '"] .favouriteCount').html(data.post.reputation).attr('data-favourites', data.post.reputation); } function toggleTopicDeleteState(data) { threadTools.setLockedState(data); threadTools.setDeleteState(data); } function toggleTopicLockedState(data) { threadTools.setLockedState(data); app.alertSuccess(data.isLocked ? '[[topic:topic_lock_success]]' : '[[topic:topic_unlock_success]]'); } function toggleTopicPinnedState(data) { threadTools.setPinnedState(data); app.alertSuccess(data.isPinned ? '[[topic:topic_pin_success]]' : '[[topic:topic_unpin_success]]'); } function onTopicMoved(data) { if (data && data.tid > 0) { ajaxify.go('topic/' + data.tid); } } function onPostEdited(data) { var editedPostEl = $('#content_' + data.pid), editedPostTitle = $('#topic_title_' + data.pid); if (editedPostTitle.length) { editedPostTitle.fadeOut(250, function() { editedPostTitle.html(data.title); editedPostTitle.fadeIn(250); }); } editedPostEl.fadeOut(250, function() { editedPostEl.html(data.content); editedPostEl.find('img').addClass('img-responsive'); editedPostEl.fadeIn(250); }); } function togglePostDeleteState(data) { var postEl = $('#post-container li[data-pid="' + data.pid + '"]'); if (postEl.length) { postEl.toggleClass('deleted'); var isDeleted = postEl.hasClass('deleted'); postTools.toggle(data.pid, isDeleted); if (!app.isAdmin && parseInt(data.uid, 10) !== parseInt(app.uid, 10)) { if (isDeleted) { translator.translate('[[topic:post_is_deleted]]', function(translated) { postEl.find('.post-content').html(translated); }); } else { postEl.find('.post-content').html(data.content); } }<|fim▁hole|> } } function togglePostFavourite(data) { var favBtn = $('li[data-pid="' + data.post.pid + '"] .favourite'); if (favBtn.length) { favBtn.addClass('btn-warning') .attr('data-favourited', data.isFavourited); var icon = favBtn.find('i'); var className = icon.attr('class'); if (data.isFavourited ? className.indexOf('-o') !== -1 : className.indexOf('-o') === -1) { icon.attr('class', data.isFavourited ? className.replace('-o', '') : className + '-o'); } } } function togglePostVote(data) { var post = $('li[data-pid="' + data.post.pid + '"]'); post.find('.upvote').toggleClass('btn-primary upvoted', data.upvote); post.find('.downvote').toggleClass('btn-primary downvoted', data.downvote); } function toggleReply(data) { $('.thread_active_users [data-uid="' + data.uid + '"]').toggleClass('replying', data.isReplying); } return Events; });<|fim▁end|>
postTools.updatePostCount();
<|file_name|>mu_minorticks.py<|end_file_name|><|fim▁begin|>from kapteyn import maputils from matplotlib import pyplot as plt<|fim▁hole|>fitsobj = maputils.FITSimage("m101.fits") fig = plt.figure() fig.subplots_adjust(left=0.18, bottom=0.10, right=0.90, top=0.90, wspace=0.95, hspace=0.20) for i in range(4): f = fig.add_subplot(2,2,i+1) mplim = fitsobj.Annotatedimage(f) if i == 0: majorgrat = mplim.Graticule() majorgrat.setp_gratline(visible=False) elif i == 1: majorgrat = mplim.Graticule(offsetx=True, unitsx='ARCMIN') majorgrat.setp_gratline(visible=False) elif i == 2: majorgrat = mplim.Graticule(skyout='galactic', unitsx='ARCMIN') majorgrat.setp_gratline(color='b') else: majorgrat = mplim.Graticule(skyout='galactic', offsetx=True, unitsx='ARCMIN') majorgrat.setp_gratline(color='b') majorgrat.setp_tickmark(markersize=10) majorgrat.setp_ticklabel(fontsize=6) majorgrat.setp_plotaxis(plotaxis=[0,1], fontsize=10) minorgrat = mplim.Minortickmarks(majorgrat, 3, 5, color="#aa44dd", markersize=3, markeredgewidth=2) maputils.showall() plt.show()<|fim▁end|>
<|file_name|>computed_margin_test.rs<|end_file_name|><|fim▁begin|>#[macro_use] extern crate yoga; use yoga::{Direction, Node}; use yoga::prelude::*; #[test] fn test_computed_layout_margin() { let mut root = Node::new(); style!(root, Width(100 pt), Height(100 pt), MarginStart(10 %) ); root.calculate_layout(100.0, 100.0, Direction::LTR); assert_eq!(10.0, root.get_layout_margin_left()); assert_eq!(0.0, root.get_layout_margin_right()); root.calculate_layout(100.0, 100.0, Direction::RTL); assert_eq!(0.0, root.get_layout_margin_left()); assert_eq!(10.0, root.get_layout_margin_right());<|fim▁hole|>}<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # <|fim▁hole|><|fim▁end|>
from .config import Config
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>/* * Copyright (c) Meta Platforms, Inc. and affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ use crate::definitions::{Argument, Directive, *}; use crate::errors::SchemaError; use crate::graphql_schema::Schema; use common::{Diagnostic, DiagnosticsResult, Location, SourceLocationKey, WithLocation}; use graphql_syntax::*; use intern::string_key::{Intern, StringKey}; use std::collections::{BTreeMap, HashMap}; fn todo_add_location<T>(error: SchemaError) -> DiagnosticsResult<T> { Err(vec![Diagnostic::error(error, Location::generated())]) } #[derive(Debug)] pub struct InMemorySchema { query_type: Option<ObjectID>, mutation_type: Option<ObjectID>, subscription_type: Option<ObjectID>, type_map: TypeMap, clientid_field: FieldID, strongid_field: FieldID, typename_field: FieldID, fetch_token_field: FieldID, is_fulfilled_field: FieldID, clientid_field_name: StringKey, strongid_field_name: StringKey, typename_field_name: StringKey, fetch_token_field_name: StringKey, is_fulfilled_field_name: StringKey, string_type: Option<Type>, id_type: Option<Type>, unchecked_argument_type_sentinel: Option<TypeReference>, directives: HashMap<StringKey, Directive>, enums: Vec<Enum>, fields: Vec<Field>, input_objects: Vec<InputObject>, interfaces: Vec<Interface>, objects: Vec<Object>, scalars: Vec<Scalar>, unions: Vec<Union>, } impl Schema for InMemorySchema { fn query_type(&self) -> Option<Type> { self.query_type.map(Type::Object) } fn mutation_type(&self) -> Option<Type> { self.mutation_type.map(Type::Object) } fn subscription_type(&self) -> Option<Type> { self.subscription_type.map(Type::Object) } fn clientid_field(&self) -> FieldID { self.clientid_field } fn strongid_field(&self) -> FieldID { self.strongid_field } fn typename_field(&self) -> FieldID { self.typename_field } fn fetch_token_field(&self) -> FieldID { self.fetch_token_field } fn is_fulfilled_field(&self) -> FieldID { self.is_fulfilled_field } fn get_type(&self, type_name: StringKey) -> Option<Type> { self.type_map.get(&type_name).copied() } fn get_directive(&self, name: StringKey) -> Option<&Directive> { self.directives.get(&name) } fn input_object(&self, id: InputObjectID) -> &InputObject { &self.input_objects[id.as_usize()] } fn enum_(&self, id: EnumID) -> &Enum { &self.enums[id.as_usize()] } fn scalar(&self, id: ScalarID) -> &Scalar { &self.scalars[id.as_usize()] } fn field(&self, id: FieldID) -> &Field { &self.fields[id.as_usize()] } fn object(&self, id: ObjectID) -> &Object { &self.objects[id.as_usize()] } fn union(&self, id: UnionID) -> &Union { &self.unions[id.as_usize()] } fn interface(&self, id: InterfaceID) -> &Interface { &self.interfaces[id.as_usize()] } fn get_type_name(&self, type_: Type) -> StringKey { match type_ { Type::Enum(id) => self.enums[id.as_usize()].name, Type::InputObject(id) => self.input_objects[id.as_usize()].name, Type::Interface(id) => self.interfaces[id.as_usize()].name, Type::Object(id) => self.objects[id.as_usize()].name.item, Type::Scalar(id) => self.scalars[id.as_usize()].name, Type::Union(id) => self.unions[id.as_usize()].name, } } fn is_extension_type(&self, type_: Type) -> bool { match type_ { Type::Enum(id) => self.enums[id.as_usize()].is_extension, Type::Interface(id) => self.interfaces[id.as_usize()].is_extension, Type::Object(id) => self.objects[id.as_usize()].is_extension, Type::Scalar(id) => self.scalars[id.as_usize()].is_extension, Type::Union(id) => self.unions[id.as_usize()].is_extension, Type::InputObject(_) => false, } } fn is_string(&self, type_: Type) -> bool { type_ == self.string_type.unwrap() } fn is_id(&self, type_: Type) -> bool { type_ == self.id_type.unwrap() } fn named_field(&self, parent_type: Type, name: StringKey) -> Option<FieldID> { // Special case for __typename and __id fields, which should not be in the list of type fields // but should be fine to select. let can_have_typename = matches!( parent_type, Type::Object(_) | Type::Interface(_) | Type::Union(_) ); if can_have_typename { if name == self.typename_field_name { return Some(self.typename_field); } // TODO(inanc): Also check if the parent type is fetchable? if name == self.fetch_token_field_name { return Some(self.fetch_token_field); } if name == self.clientid_field_name { return Some(self.clientid_field); } if name == self.strongid_field_name { return Some(self.strongid_field); } } let fields = match parent_type { Type::Object(id) => { let object = &self.objects[id.as_usize()]; &object.fields } Type::Interface(id) => { let interface = &self.interfaces[id.as_usize()]; &interface.fields } // Unions don't have any fields, but can have selections like __typename // or a field with @fixme_fat_interface Type::Union(_) => return None, _ => panic!( "Cannot get field {} on type '{:?}', this type does not have fields", name, self.get_type_name(parent_type) ), }; fields .iter() .find(|field_id| { let field = &self.fields[field_id.as_usize()]; field.name.item == name }) .cloned() } /// A value that represents a type of unchecked arguments where we don't /// have a type to instantiate the argument. /// /// TODO: we probably want to replace this with a proper `Unknown` type. fn unchecked_argument_type_sentinel(&self) -> &TypeReference { self.unchecked_argument_type_sentinel.as_ref().unwrap() } fn snapshot_print(&self) -> String { let Self { query_type, mutation_type, subscription_type, directives, clientid_field: _clientid_field, strongid_field: _strongid_field, typename_field: _typename_field, fetch_token_field: _fetch_token_field, is_fulfilled_field: _is_fulfilled_field, clientid_field_name: _clientid_field_name, strongid_field_name: _strongid_field_name, typename_field_name: _typename_field_name, fetch_token_field_name: _fetch_token_field_name, is_fulfilled_field_name: _is_fulfilled_field_name, string_type: _string_type,<|fim▁hole|> enums, fields, input_objects, interfaces, objects, scalars, unions, } = self; let ordered_type_map: BTreeMap<_, _> = type_map.iter().collect(); let mut ordered_directives = directives.values().collect::<Vec<&Directive>>(); ordered_directives.sort_by_key(|dir| dir.name.lookup()); format!( r#"Schema {{ query_type: {:#?} mutation_type: {:#?} subscription_type: {:#?} directives: {:#?} type_map: {:#?} enums: {:#?} fields: {:#?} input_objects: {:#?} interfaces: {:#?} objects: {:#?} scalars: {:#?} unions: {:#?} }}"#, query_type, mutation_type, subscription_type, ordered_directives, ordered_type_map, enums, fields, input_objects, interfaces, objects, scalars, unions, ) } fn input_objects<'a>(&'a self) -> Box<dyn Iterator<Item = &'a InputObject> + 'a> { Box::new(self.input_objects.iter()) } fn enums<'a>(&'a self) -> Box<dyn Iterator<Item = &'a Enum> + 'a> { Box::new(self.enums.iter()) } fn scalars<'a>(&'a self) -> Box<dyn Iterator<Item = &'a Scalar> + 'a> { Box::new(self.scalars.iter()) } fn fields<'a>(&'a self) -> Box<dyn Iterator<Item = &'a Field> + 'a> { Box::new(self.fields.iter()) } fn objects<'a>(&'a self) -> Box<dyn Iterator<Item = &'a Object> + 'a> { Box::new(self.objects.iter()) } fn unions<'a>(&'a self) -> Box<dyn Iterator<Item = &'a Union> + 'a> { Box::new(self.unions.iter()) } fn interfaces<'a>(&'a self) -> Box<dyn Iterator<Item = &'a Interface> + 'a> { Box::new(self.interfaces.iter()) } } impl InMemorySchema { pub fn get_directive_mut(&mut self, name: StringKey) -> Option<&mut Directive> { self.directives.get_mut(&name) } pub fn get_type_map(&self) -> impl Iterator<Item = (&StringKey, &Type)> { self.type_map.iter() } pub fn get_directives(&self) -> impl Iterator<Item = &Directive> { self.directives.values() } /// Returns all directives applicable for a given location(Query, Field, etc). pub fn directives_for_location(&self, location: DirectiveLocation) -> Vec<&Directive> { self.directives .values() .filter(|directive| directive.locations.contains(&location)) .collect() } pub fn get_fields(&self) -> impl Iterator<Item = &Field> { self.fields.iter() } pub fn get_interfaces(&self) -> impl Iterator<Item = &Interface> { self.interfaces.iter() } pub fn get_enums(&self) -> impl Iterator<Item = &Enum> { self.enums.iter() } pub fn get_objects(&self) -> impl Iterator<Item = &Object> { self.objects.iter() } pub fn has_directive(&self, directive_name: StringKey) -> bool { self.directives.contains_key(&directive_name) } pub fn has_type(&self, type_name: StringKey) -> bool { self.type_map.contains_key(&type_name) } pub fn add_directive(&mut self, directive: Directive) -> DiagnosticsResult<()> { if self.directives.contains_key(&directive.name) { return todo_add_location(SchemaError::DuplicateDirectiveDefinition(directive.name)); } self.directives.insert(directive.name, directive); Ok(()) } pub fn add_field(&mut self, field: Field) -> DiagnosticsResult<FieldID> { Ok(self.build_field(field)) } pub fn add_enum(&mut self, enum_: Enum) -> DiagnosticsResult<EnumID> { if self.type_map.contains_key(&enum_.name) { return todo_add_location(SchemaError::DuplicateType(enum_.name)); } let index: u32 = self.enums.len().try_into().unwrap(); let name = enum_.name; self.enums.push(enum_); self.type_map.insert(name, Type::Enum(EnumID(index))); Ok(EnumID(index)) } pub fn add_input_object( &mut self, input_object: InputObject, ) -> DiagnosticsResult<InputObjectID> { if self.type_map.contains_key(&input_object.name) { return todo_add_location(SchemaError::DuplicateType(input_object.name)); } let index: u32 = self.input_objects.len().try_into().unwrap(); let name = input_object.name; self.input_objects.push(input_object); self.type_map .insert(name, Type::InputObject(InputObjectID(index))); Ok(InputObjectID(index)) } pub fn add_interface(&mut self, interface: Interface) -> DiagnosticsResult<InterfaceID> { if self.type_map.contains_key(&interface.name) { return todo_add_location(SchemaError::DuplicateType(interface.name)); } let index: u32 = self.interfaces.len().try_into().unwrap(); let name = interface.name; self.interfaces.push(interface); self.type_map .insert(name, Type::Interface(InterfaceID(index))); Ok(InterfaceID(index)) } pub fn add_object(&mut self, object: Object) -> DiagnosticsResult<ObjectID> { if self.type_map.contains_key(&object.name.item) { return Err(vec![Diagnostic::error( SchemaError::DuplicateType(object.name.item), object.name.location, )]); } let index: u32 = self.objects.len().try_into().unwrap(); let name = object.name; self.objects.push(object); self.type_map .insert(name.item, Type::Object(ObjectID(index))); Ok(ObjectID(index)) } pub fn add_scalar(&mut self, scalar: Scalar) -> DiagnosticsResult<ScalarID> { if self.type_map.contains_key(&scalar.name) { return todo_add_location(SchemaError::DuplicateType(scalar.name)); } let index: u32 = self.scalars.len().try_into().unwrap(); let name = scalar.name; self.scalars.push(scalar); self.type_map.insert(name, Type::Scalar(ScalarID(index))); Ok(ScalarID(index)) } pub fn add_union(&mut self, union: Union) -> DiagnosticsResult<UnionID> { if self.type_map.contains_key(&union.name) { return todo_add_location(SchemaError::DuplicateType(union.name)); } let index: u32 = self.unions.len().try_into().unwrap(); let name = union.name; self.unions.push(union); self.type_map.insert(name, Type::Union(UnionID(index))); Ok(UnionID(index)) } pub fn add_field_to_interface( &mut self, interface_id: InterfaceID, field_id: FieldID, ) -> DiagnosticsResult<InterfaceID> { let interface = self.interfaces.get_mut(interface_id.as_usize()).unwrap(); interface.fields.push(field_id); Ok(interface_id) } pub fn add_field_to_object( &mut self, obj_id: ObjectID, field_id: FieldID, ) -> DiagnosticsResult<ObjectID> { let object = self.objects.get_mut(obj_id.as_usize()).unwrap(); object.fields.push(field_id); Ok(obj_id) } pub fn add_interface_to_object( &mut self, obj_id: ObjectID, interface_id: InterfaceID, ) -> DiagnosticsResult<ObjectID> { let object = self.objects.get_mut(obj_id.as_usize()).unwrap(); object.interfaces.push(interface_id); Ok(obj_id) } pub fn add_parent_interface_to_interface( &mut self, interface_id: InterfaceID, parent_interface_id: InterfaceID, ) -> DiagnosticsResult<InterfaceID> { let interface = self.interfaces.get_mut(interface_id.as_usize()).unwrap(); interface.interfaces.push(parent_interface_id); Ok(interface_id) } pub fn add_implementing_object_to_interface( &mut self, interface_id: InterfaceID, object_id: ObjectID, ) -> DiagnosticsResult<InterfaceID> { let interface = self.interfaces.get_mut(interface_id.as_usize()).unwrap(); interface.implementing_objects.push(object_id); Ok(interface_id) } pub fn add_member_to_union( &mut self, union_id: UnionID, object_id: ObjectID, ) -> DiagnosticsResult<UnionID> { let union = self.unions.get_mut(union_id.as_usize()).unwrap(); union.members.push(object_id); Ok(union_id) } /// Sets argument definitions for a given input object. /// Any existing argument definitions will be erased. pub fn set_input_object_args( &mut self, input_object_id: InputObjectID, fields: ArgumentDefinitions, ) -> DiagnosticsResult<InputObjectID> { let input_object = self .input_objects .get_mut(input_object_id.as_usize()) .unwrap(); input_object.fields = fields; Ok(input_object_id) } /// Sets argument definitions for a given field. /// Any existing argument definitions on the field will be erased. pub fn set_field_args( &mut self, field_id: FieldID, args: ArgumentDefinitions, ) -> DiagnosticsResult<FieldID> { let field = self.fields.get_mut(field_id.as_usize()).unwrap(); field.arguments = args; Ok(field_id) } /// Replaces the definition of interface type, but keeps the same id. /// Existing references to the old type now reference the replacement. pub fn replace_interface( &mut self, id: InterfaceID, interface: Interface, ) -> DiagnosticsResult<()> { if id.as_usize() >= self.interfaces.len() { return todo_add_location(SchemaError::UnknownTypeID( id.as_usize(), String::from("Interface"), )); } self.type_map .remove(&self.get_type_name(Type::Interface(id))); self.type_map.insert(interface.name, Type::Interface(id)); self.interfaces[id.as_usize()] = interface; Ok(()) } /// Replaces the definition of object type, but keeps the same id. /// Existing references to the old type now reference the replacement. pub fn replace_object(&mut self, id: ObjectID, object: Object) -> DiagnosticsResult<()> { if id.as_usize() >= self.objects.len() { return todo_add_location(SchemaError::UnknownTypeID( id.as_usize(), String::from("Object"), )); } self.type_map.remove(&self.get_type_name(Type::Object(id))); self.type_map.insert(object.name.item, Type::Object(id)); self.objects[id.as_usize()] = object; Ok(()) } /// Replaces the definition of enum type, but keeps the same id. /// Existing references to the old type now reference the replacement. pub fn replace_enum(&mut self, id: EnumID, enum_: Enum) -> DiagnosticsResult<()> { if id.as_usize() >= self.enums.len() { return todo_add_location(SchemaError::UnknownTypeID( id.as_usize(), String::from("Enum"), )); } self.type_map.remove(&self.get_type_name(Type::Enum(id))); self.type_map.insert(enum_.name, Type::Enum(id)); self.enums[id.as_usize()] = enum_; Ok(()) } /// Replaces the definition of input object type, but keeps the same id. /// Existing references to the old type now reference the replacement. pub fn replace_input_object( &mut self, id: InputObjectID, input_object: InputObject, ) -> DiagnosticsResult<()> { if id.as_usize() >= self.enums.len() { return todo_add_location(SchemaError::UnknownTypeID( id.as_usize(), String::from("Input Object"), )); } self.type_map .remove(&self.get_type_name(Type::InputObject(id))); self.type_map .insert(input_object.name, Type::InputObject(id)); self.input_objects[id.as_usize()] = input_object; Ok(()) } /// Replaces the definition of union type, but keeps the same id. /// Existing references to the old type now reference the replacement. pub fn replace_union(&mut self, id: UnionID, union: Union) -> DiagnosticsResult<()> { if id.as_usize() >= self.enums.len() { return todo_add_location(SchemaError::UnknownTypeID( id.as_usize(), String::from("Union"), )); } self.type_map.remove(&self.get_type_name(Type::Union(id))); self.type_map.insert(union.name, Type::Union(id)); self.unions[id.as_usize()] = union; Ok(()) } /// Replaces the definition of field, but keeps the same id. /// Existing references to the old field now reference the replacement. pub fn replace_field(&mut self, id: FieldID, field: Field) -> DiagnosticsResult<()> { let id = id.as_usize(); if id >= self.fields.len() { return Err(vec![Diagnostic::error( SchemaError::UnknownTypeID(id, String::from("Field")), field.name.location, )]); } self.fields[id] = field; Ok(()) } /// Creates an uninitialized, invalid schema which can then be added to using the add_* /// methods. Note that we still bake in some assumptions about the clientid and typename /// fields, but in practice this is not an issue. pub fn create_uninitialized() -> InMemorySchema { InMemorySchema { query_type: None, mutation_type: None, subscription_type: None, type_map: HashMap::new(), clientid_field: FieldID(0), strongid_field: FieldID(0), typename_field: FieldID(0), fetch_token_field: FieldID(0), is_fulfilled_field: FieldID(0), clientid_field_name: "__id".intern(), strongid_field_name: "strong_id__".intern(), typename_field_name: "__typename".intern(), fetch_token_field_name: "__token".intern(), is_fulfilled_field_name: "is_fulfilled__".intern(), string_type: None, id_type: None, unchecked_argument_type_sentinel: None, directives: HashMap::new(), enums: Vec::new(), fields: Vec::new(), input_objects: Vec::new(), interfaces: Vec::new(), objects: Vec::new(), scalars: Vec::new(), unions: Vec::new(), } } pub fn build( schema_documents: &[SchemaDocument], client_schema_documents: &[SchemaDocument], ) -> DiagnosticsResult<Self> { let schema_definitions: Vec<&TypeSystemDefinition> = schema_documents .iter() .flat_map(|document| &document.definitions) .collect(); let client_definitions: Vec<&TypeSystemDefinition> = client_schema_documents .iter() .flat_map(|document| &document.definitions) .collect(); // Step 1: build the type_map from type names to type keys let mut type_map = HashMap::with_capacity(schema_definitions.len() + client_definitions.len()); let mut next_object_id = 0; let mut next_interface_id = 0; let mut next_union_id = 0; let mut next_input_object_id = 0; let mut next_enum_id = 0; let mut next_scalar_id = 0; let mut field_count = 0; let mut directive_count = 0; for definition in schema_definitions.iter().chain(&client_definitions) { match definition { TypeSystemDefinition::SchemaDefinition { .. } => {} TypeSystemDefinition::DirectiveDefinition { .. } => { directive_count += 1; } TypeSystemDefinition::ObjectTypeDefinition(ObjectTypeDefinition { name, fields, .. }) => { type_map.insert(name.value, Type::Object(ObjectID(next_object_id))); field_count += len_of_option_list(fields); next_object_id += 1; } TypeSystemDefinition::InterfaceTypeDefinition(InterfaceTypeDefinition { name, fields, .. }) => { type_map.insert(name.value, Type::Interface(InterfaceID(next_interface_id))); field_count += len_of_option_list(fields); next_interface_id += 1; } TypeSystemDefinition::UnionTypeDefinition(UnionTypeDefinition { name, .. }) => { type_map.insert(name.value, Type::Union(UnionID(next_union_id))); next_union_id += 1; } TypeSystemDefinition::InputObjectTypeDefinition(InputObjectTypeDefinition { name, .. }) => { type_map.insert( name.value, Type::InputObject(InputObjectID(next_input_object_id)), ); next_input_object_id += 1; } TypeSystemDefinition::EnumTypeDefinition(EnumTypeDefinition { name, .. }) => { type_map.insert(name.value, Type::Enum(EnumID(next_enum_id))); next_enum_id += 1; } TypeSystemDefinition::ScalarTypeDefinition(ScalarTypeDefinition { name, .. }) => { type_map.insert(name.value, Type::Scalar(ScalarID(next_scalar_id))); next_scalar_id += 1; } TypeSystemDefinition::ObjectTypeExtension { .. } => {} TypeSystemDefinition::InterfaceTypeExtension { .. } => {} TypeSystemDefinition::SchemaExtension { .. } => todo!("SchemaExtension"), TypeSystemDefinition::EnumTypeExtension { .. } => todo!("EnumTypeExtension"), TypeSystemDefinition::UnionTypeExtension { .. } => todo!("UnionTypeExtension"), TypeSystemDefinition::InputObjectTypeExtension { .. } => { todo!("InputObjectTypeExtension") } TypeSystemDefinition::ScalarTypeExtension { .. } => todo!("ScalarTypeExtension"), } } // Step 2: define operation types, directives, and types let string_type = *type_map .get(&"String".intern()) .expect("Missing String type"); let id_type = *type_map.get(&"ID".intern()).expect("Missing ID type"); let unchecked_argument_type_sentinel = Some(TypeReference::Named( *type_map .get(&"Boolean".intern()) .expect("Missing Boolean type"), )); let mut schema = InMemorySchema { query_type: None, mutation_type: None, subscription_type: None, type_map, clientid_field: FieldID(0), // dummy value, overwritten later strongid_field: FieldID(0), // dummy value, overwritten later typename_field: FieldID(0), // dummy value, overwritten later fetch_token_field: FieldID(0), // dummy value, overwritten later is_fulfilled_field: FieldID(0), // dummy value, overwritten later clientid_field_name: "__id".intern(), strongid_field_name: "strong_id__".intern(), typename_field_name: "__typename".intern(), fetch_token_field_name: "__token".intern(), is_fulfilled_field_name: "is_fulfilled__".intern(), string_type: Some(string_type), id_type: Some(id_type), unchecked_argument_type_sentinel, directives: HashMap::with_capacity(directive_count), enums: Vec::with_capacity(next_enum_id.try_into().unwrap()), fields: Vec::with_capacity(field_count), input_objects: Vec::with_capacity(next_input_object_id.try_into().unwrap()), interfaces: Vec::with_capacity(next_interface_id.try_into().unwrap()), objects: Vec::with_capacity(next_object_id.try_into().unwrap()), scalars: Vec::with_capacity(next_scalar_id.try_into().unwrap()), unions: Vec::with_capacity(next_union_id.try_into().unwrap()), }; for document in schema_documents { for definition in &document.definitions { schema.add_definition(definition, &document.location.source_location(), false)?; } } for document in client_schema_documents { for definition in &document.definitions { schema.add_definition(definition, &document.location.source_location(), true)?; } } for document in schema_documents.iter().chain(client_schema_documents) { for definition in &document.definitions { if let TypeSystemDefinition::ObjectTypeDefinition(ObjectTypeDefinition { name, interfaces, .. }) = definition { let object_id = match schema.type_map.get(&name.value) { Some(Type::Object(id)) => id, _ => unreachable!("Must be an Object type"), }; for interface in interfaces { let type_ = schema.type_map.get(&interface.value).unwrap(); match type_ { Type::Interface(id) => { let interface = schema.interfaces.get_mut(id.as_usize()).unwrap(); interface.implementing_objects.push(*object_id) } _ => unreachable!("Must be an interface"), } } } } } schema.load_defaults(); Ok(schema) } fn load_defaults(&mut self) { self.load_default_root_types(); self.load_default_typename_field(); self.load_default_fetch_token_field(); self.load_default_clientid_field(); self.load_default_strongid_field(); self.load_default_is_fulfilled_field(); } // In case the schema doesn't define a query, mutation or subscription // type, but there is a Query, Mutation, or Subscription object type // defined, default to those. // This is not standard GraphQL behavior, and we might want to remove // this at some point. fn load_default_root_types(&mut self) { if self.query_type.is_none() { if let Some(Type::Object(id)) = self.type_map.get(&"Query".intern()) { self.query_type = Some(*id); } } if self.mutation_type.is_none() { if let Some(Type::Object(id)) = self.type_map.get(&"Mutation".intern()) { self.mutation_type = Some(*id); } } if self.subscription_type.is_none() { if let Some(Type::Object(id)) = self.type_map.get(&"Subscription".intern()) { self.subscription_type = Some(*id); } } } fn load_default_typename_field(&mut self) { let string_type = *self .type_map .get(&"String".intern()) .expect("Missing String type"); let typename_field_id = self.fields.len(); self.typename_field = FieldID(typename_field_id.try_into().unwrap()); self.fields.push(Field { name: WithLocation::generated(self.typename_field_name), is_extension: false, arguments: ArgumentDefinitions::new(Default::default()), type_: TypeReference::NonNull(Box::new(TypeReference::Named(string_type))), directives: Vec::new(), parent_type: None, description: None, }); } fn load_default_fetch_token_field(&mut self) { let id_type = *self.type_map.get(&"ID".intern()).expect("Missing ID type"); let fetch_token_field_id = self.fields.len(); self.fetch_token_field = FieldID(fetch_token_field_id.try_into().unwrap()); self.fields.push(Field { name: WithLocation::generated(self.fetch_token_field_name), is_extension: false, arguments: ArgumentDefinitions::new(Default::default()), type_: TypeReference::NonNull(Box::new(TypeReference::Named(id_type))), directives: Vec::new(), parent_type: None, description: None, }); } fn load_default_clientid_field(&mut self) { let id_type = *self.type_map.get(&"ID".intern()).expect("Missing ID type"); let clientid_field_id = self.fields.len(); self.clientid_field = FieldID(clientid_field_id.try_into().unwrap()); self.fields.push(Field { name: WithLocation::generated(self.clientid_field_name), is_extension: true, arguments: ArgumentDefinitions::new(Default::default()), type_: TypeReference::NonNull(Box::new(TypeReference::Named(id_type))), directives: Vec::new(), parent_type: None, description: None, }); } fn load_default_strongid_field(&mut self) { let id_type = *self.type_map.get(&"ID".intern()).expect("Missing ID type"); let strongid_field_id = self.fields.len(); self.strongid_field = FieldID(strongid_field_id.try_into().unwrap()); self.fields.push(Field { name: WithLocation::generated(self.strongid_field_name), is_extension: true, arguments: ArgumentDefinitions::new(Default::default()), type_: TypeReference::Named(id_type), directives: Vec::new(), parent_type: None, description: None, }); } fn load_default_is_fulfilled_field(&mut self) { let string_type = *self .type_map .get(&"String".intern()) .expect("Missing String type"); let is_fulfilled_field_id = self.fields.len(); self.is_fulfilled_field = FieldID(is_fulfilled_field_id.try_into().unwrap()); self.fields.push(Field { name: WithLocation::generated(self.is_fulfilled_field_name), is_extension: true, arguments: ArgumentDefinitions::new(vec![Argument { name: "name".intern(), type_: TypeReference::NonNull(Box::new(TypeReference::Named(string_type))), default_value: None, description: None, }]), type_: TypeReference::NonNull(Box::new(TypeReference::Named(string_type))), directives: Vec::new(), parent_type: None, description: None, }); } /// Add additional object extensions to the schema after its initial /// creation. pub fn add_object_type_extension( &mut self, object_extension: ObjectTypeExtension, location_key: SourceLocationKey, is_extension: bool, ) -> DiagnosticsResult<()> { self.add_definition( &TypeSystemDefinition::ObjectTypeExtension(object_extension), &location_key, is_extension, ) } /// Add additional interface extensions to the schema after its initial /// creation. pub fn add_interface_type_extension( &mut self, interface_extension: InterfaceTypeExtension, location_key: SourceLocationKey, is_extension: bool, ) -> DiagnosticsResult<()> { self.add_definition( &TypeSystemDefinition::InterfaceTypeExtension(interface_extension), &location_key, is_extension, ) } fn add_definition( &mut self, definition: &TypeSystemDefinition, location_key: &SourceLocationKey, is_extension: bool, ) -> DiagnosticsResult<()> { match definition { TypeSystemDefinition::SchemaDefinition(SchemaDefinition { operation_types, directives: _directives, }) => { for OperationTypeDefinition { operation, type_ } in &operation_types.items { let operation_id = self.build_object_id(type_.value)?; match operation { OperationType::Query => { if let Some(prev_query_type) = self.query_type { return Err(vec![Diagnostic::error( SchemaError::DuplicateOperationDefinition( *operation, type_.value, expect_object_type_name(&self.type_map, prev_query_type), ), Location::new(*location_key, type_.span), )]); } else { self.query_type = Some(operation_id); } } OperationType::Mutation => { if let Some(prev_mutation_type) = self.mutation_type { return Err(vec![Diagnostic::error( SchemaError::DuplicateOperationDefinition( *operation, type_.value, expect_object_type_name(&self.type_map, prev_mutation_type), ), Location::new(*location_key, type_.span), )]); } else { self.mutation_type = Some(operation_id); } } OperationType::Subscription => { if let Some(prev_subscription_type) = self.subscription_type { return Err(vec![Diagnostic::error( SchemaError::DuplicateOperationDefinition( *operation, type_.value, expect_object_type_name( &self.type_map, prev_subscription_type, ), ), Location::new(*location_key, type_.span), )]); } else { self.subscription_type = Some(operation_id); } } } } } TypeSystemDefinition::DirectiveDefinition(DirectiveDefinition { name, arguments, repeatable, locations, description, }) => { if self.directives.contains_key(&name.value) { let str_name = name.value.lookup(); if str_name != "skip" && str_name != "include" { // TODO(T63941319) @skip and @include directives are duplicated in our schema return Err(vec![Diagnostic::error( SchemaError::DuplicateDirectiveDefinition(name.value), Location::new(*location_key, name.span), )]); } } let arguments = self.build_arguments(arguments)?; self.directives.insert( name.value, Directive { name: name.value, arguments, locations: locations.clone(), repeatable: *repeatable, is_extension, description: description.as_ref().map(|node| node.value), }, ); } TypeSystemDefinition::ObjectTypeDefinition(ObjectTypeDefinition { name, interfaces, fields, directives, }) => { let parent_id = Type::Object(ObjectID(self.objects.len() as u32)); let fields = if is_extension { self.build_extend_fields( fields, &mut HashMap::with_capacity(len_of_option_list(fields)), *location_key, Some(parent_id), )? } else { self.build_fields(fields, *location_key, Some(parent_id))? }; let interfaces = interfaces .iter() .map(|name| self.build_interface_id(name, location_key)) .collect::<DiagnosticsResult<Vec<_>>>()?; let directives = self.build_directive_values(directives); self.objects.push(Object { name: WithLocation::new(Location::new(*location_key, name.span), name.value), fields, is_extension, interfaces, directives, description: None, }); } TypeSystemDefinition::InterfaceTypeDefinition(InterfaceTypeDefinition { name, interfaces, directives, fields, }) => { let parent_id = Type::Interface(InterfaceID(self.interfaces.len() as u32)); let fields = if is_extension { self.build_extend_fields( fields, &mut HashMap::with_capacity(len_of_option_list(fields)), *location_key, Some(parent_id), )? } else { self.build_fields(fields, *location_key, Some(parent_id))? }; let interfaces = interfaces .iter() .map(|name| self.build_interface_id(name, location_key)) .collect::<DiagnosticsResult<Vec<_>>>()?; let directives = self.build_directive_values(directives); self.interfaces.push(Interface { name: name.value, implementing_objects: vec![], is_extension, fields, directives, interfaces, description: None, }); } TypeSystemDefinition::UnionTypeDefinition(UnionTypeDefinition { name, directives, members, }) => { let members = members .iter() .map(|name| self.build_object_id(name.value)) .collect::<DiagnosticsResult<Vec<_>>>()?; let directives = self.build_directive_values(directives); self.unions.push(Union { name: name.value, is_extension, members, directives, description: None, }); } TypeSystemDefinition::InputObjectTypeDefinition(InputObjectTypeDefinition { name, fields, directives, }) => { let fields = self.build_arguments(fields)?; let directives = self.build_directive_values(directives); self.input_objects.push(InputObject { name: name.value, fields, directives, description: None, }); } TypeSystemDefinition::EnumTypeDefinition(EnumTypeDefinition { name, directives, values, }) => { let directives = self.build_directive_values(directives); let values = if let Some(values) = values { values .items .iter() .map(|enum_def| EnumValue { value: enum_def.name.value, directives: self.build_directive_values(&enum_def.directives), }) .collect() } else { Vec::new() }; self.enums.push(Enum { name: name.value, is_extension, values, directives, description: None, }); } TypeSystemDefinition::ScalarTypeDefinition(ScalarTypeDefinition { name, directives, }) => { let directives = self.build_directive_values(directives); self.scalars.push(Scalar { name: name.value, is_extension, directives, description: None, }) } TypeSystemDefinition::ObjectTypeExtension(ObjectTypeExtension { name, interfaces, fields, directives, }) => match self.type_map.get(&name.value).cloned() { Some(Type::Object(id)) => { let index = id.as_usize(); let obj = self.objects.get(index).ok_or_else(|| { vec![Diagnostic::error( SchemaError::ExtendUndefinedType(name.value), Location::new(*location_key, name.span), )] })?; let field_ids = &obj.fields; let mut existing_fields = HashMap::with_capacity(field_ids.len() + len_of_option_list(fields)); for field_id in field_ids { let field_name = self.fields[field_id.as_usize()].name; existing_fields.insert(field_name.item, field_name.location); } let client_fields = self.build_extend_fields( fields, &mut existing_fields, *location_key, Some(Type::Object(id)), )?; self.objects[index].fields.extend(client_fields); let built_interfaces = interfaces .iter() .map(|name| self.build_interface_id(name, location_key)) .collect::<DiagnosticsResult<Vec<_>>>()?; extend_without_duplicates( &mut self.objects[index].interfaces, built_interfaces, ); let built_directives = self.build_directive_values(directives); extend_without_duplicates( &mut self.objects[index].directives, built_directives, ); } _ => { return Err(vec![Diagnostic::error( SchemaError::ExtendUndefinedType(name.value), Location::new(*location_key, name.span), )]); } }, TypeSystemDefinition::InterfaceTypeExtension(InterfaceTypeExtension { name, fields, directives, .. }) => match self.type_map.get(&name.value).cloned() { Some(Type::Interface(id)) => { let index = id.as_usize(); let interface = self.interfaces.get(index).ok_or_else(|| { vec![Diagnostic::error( SchemaError::ExtendUndefinedType(name.value), Location::new(*location_key, name.span), )] })?; let field_ids = &interface.fields; let mut existing_fields = HashMap::with_capacity(field_ids.len() + len_of_option_list(fields)); for field_id in field_ids { let field_name = self.fields[field_id.as_usize()].name; existing_fields.insert(field_name.item, field_name.location); } let client_fields = self.build_extend_fields( fields, &mut existing_fields, *location_key, Some(Type::Interface(id)), )?; self.interfaces[index].fields.extend(client_fields); let built_directives = self.build_directive_values(directives); extend_without_duplicates( &mut self.interfaces[index].directives, built_directives, ); } _ => { return Err(vec![Diagnostic::error( SchemaError::ExtendUndefinedType(name.value), Location::new(*location_key, name.span), )]); } }, TypeSystemDefinition::SchemaExtension { .. } => todo!("SchemaExtension"), TypeSystemDefinition::EnumTypeExtension { .. } => todo!("EnumTypeExtension"), TypeSystemDefinition::UnionTypeExtension { .. } => todo!("UnionTypeExtension"), TypeSystemDefinition::InputObjectTypeExtension { .. } => { todo!("InputObjectTypeExtension") } TypeSystemDefinition::ScalarTypeExtension { .. } => todo!("ScalarTypeExtension"), } Ok(()) } fn build_object_id(&mut self, name: StringKey) -> DiagnosticsResult<ObjectID> { match self.type_map.get(&name) { Some(Type::Object(id)) => Ok(*id), Some(non_object_type) => { todo_add_location(SchemaError::ExpectedObjectReference(name, *non_object_type)) } None => todo_add_location(SchemaError::UndefinedType(name)), } } fn build_interface_id( &mut self, name: &Identifier, location_key: &SourceLocationKey, ) -> DiagnosticsResult<InterfaceID> { match self.type_map.get(&name.value) { Some(Type::Interface(id)) => Ok(*id), Some(non_interface_type) => Err(vec![Diagnostic::error( SchemaError::ExpectedInterfaceReference(name.value, *non_interface_type), Location::new(*location_key, name.span), )]), None => Err(vec![Diagnostic::error( SchemaError::UndefinedType(name.value), Location::new(*location_key, name.span), )]), } } fn build_field(&mut self, field: Field) -> FieldID { let field_index = self.fields.len().try_into().unwrap(); self.fields.push(field); FieldID(field_index) } fn build_fields( &mut self, field_defs: &Option<List<FieldDefinition>>, field_location_key: SourceLocationKey, parent_type: Option<Type>, ) -> DiagnosticsResult<Vec<FieldID>> { if let Some(field_defs) = field_defs { field_defs .items .iter() .map(|field_def| { let arguments = self.build_arguments(&field_def.arguments)?; let type_ = self.build_type_reference(&field_def.type_)?; let directives = self.build_directive_values(&field_def.directives); let description = field_def.description.as_ref().map(|desc| desc.value); Ok(self.build_field(Field { name: WithLocation::new( Location::new(field_location_key, field_def.name.span), field_def.name.value, ), is_extension: false, arguments, type_, directives, parent_type, description, })) }) .collect() } else { Ok(Vec::new()) } } fn build_extend_fields( &mut self, field_defs: &Option<List<FieldDefinition>>, existing_fields: &mut HashMap<StringKey, Location>, source_location_key: SourceLocationKey, parent_type: Option<Type>, ) -> DiagnosticsResult<Vec<FieldID>> { if let Some(field_defs) = field_defs { let mut field_ids: Vec<FieldID> = Vec::with_capacity(field_defs.items.len()); for field_def in &field_defs.items { let field_name = field_def.name.value; let field_location = Location::new(source_location_key, field_def.name.span); if let Some(prev_location) = existing_fields.insert(field_name, field_location) { return Err(vec![ Diagnostic::error(SchemaError::DuplicateField(field_name), field_location) .annotate("previously defined here", prev_location), ]); } let arguments = self.build_arguments(&field_def.arguments)?; let directives = self.build_directive_values(&field_def.directives); let type_ = self.build_type_reference(&field_def.type_)?; let description = field_def.description.as_ref().map(|desc| desc.value); field_ids.push(self.build_field(Field { name: WithLocation::new(field_location, field_name), is_extension: true, arguments, type_, directives, parent_type, description, })); } Ok(field_ids) } else { Ok(Vec::new()) } } fn build_arguments( &mut self, arg_defs: &Option<List<InputValueDefinition>>, ) -> DiagnosticsResult<ArgumentDefinitions> { if let Some(arg_defs) = arg_defs { let arg_defs: DiagnosticsResult<Vec<Argument>> = arg_defs .items .iter() .map(|arg_def| { Ok(Argument { name: arg_def.name.value, type_: self.build_input_object_reference(&arg_def.type_)?, default_value: arg_def.default_value.clone(), description: None, }) }) .collect(); Ok(ArgumentDefinitions(arg_defs?)) } else { Ok(ArgumentDefinitions(Vec::new())) } } fn build_input_object_reference( &mut self, ast_type: &TypeAnnotation, ) -> DiagnosticsResult<TypeReference> { Ok(match ast_type { TypeAnnotation::Named(named_type) => { let type_ = self.type_map.get(&named_type.name.value).ok_or_else(|| { vec![Diagnostic::error( SchemaError::UndefinedType(named_type.name.value), Location::new(SourceLocationKey::generated(), named_type.name.span), )] })?; if !(type_.is_enum() || type_.is_scalar() || type_.is_input_object()) { return Err(vec![Diagnostic::error( SchemaError::ExpectedInputType(named_type.name.value), Location::new(SourceLocationKey::generated(), named_type.name.span), )]); } TypeReference::Named(*type_) } TypeAnnotation::NonNull(of_type) => { TypeReference::NonNull(Box::new(self.build_input_object_reference(&of_type.type_)?)) } TypeAnnotation::List(of_type) => { TypeReference::List(Box::new(self.build_input_object_reference(&of_type.type_)?)) } }) } fn build_type_reference( &mut self, ast_type: &TypeAnnotation, ) -> DiagnosticsResult<TypeReference> { Ok(match ast_type { TypeAnnotation::Named(named_type) => { TypeReference::Named(*self.type_map.get(&named_type.name.value).ok_or_else(|| { vec![Diagnostic::error( SchemaError::UndefinedType(named_type.name.value), Location::generated(), )] })?) } TypeAnnotation::NonNull(of_type) => { TypeReference::NonNull(Box::new(self.build_type_reference(&of_type.type_)?)) } TypeAnnotation::List(of_type) => { TypeReference::List(Box::new(self.build_type_reference(&of_type.type_)?)) } }) } fn build_directive_values(&mut self, directives: &[ConstantDirective]) -> Vec<DirectiveValue> { directives .iter() .map(|directive| { let arguments = if let Some(arguments) = &directive.arguments { arguments .items .iter() .map(|argument| ArgumentValue { name: argument.name.value, value: argument.value.clone(), }) .collect() } else { Vec::new() }; DirectiveValue { name: directive.name.value, arguments, } }) .collect() } } /// Extends the `target` with `extensions` ignoring items that are already in /// `target`. fn extend_without_duplicates<T: PartialEq>( target: &mut Vec<T>, extensions: impl IntoIterator<Item = T>, ) { for extension in extensions { if !target.contains(&extension) { target.push(extension); } } } fn len_of_option_list<T>(option_list: &Option<List<T>>) -> usize { option_list.as_ref().map_or(0, |list| list.items.len()) } fn expect_object_type_name(type_map: &TypeMap, object_id: ObjectID) -> StringKey { *type_map .iter() .find(|(_, type_)| match type_ { Type::Object(id_) => id_ == &object_id, _ => false, }) .expect("Missing object in type_map") .0 } #[cfg(test)] mod tests { use super::*; #[test] fn test_extend_without_duplicates() { let mut target = vec![10, 11]; extend_without_duplicates(&mut target, vec![1, 10, 100]); assert_eq!(target, vec![10, 11, 1, 100]); } }<|fim▁end|>
id_type: _id_type, unchecked_argument_type_sentinel: _unchecked_argument_type_sentinel, type_map,
<|file_name|>message.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3 # This file is part of Epoptes, http://epoptes.org # Copyright 2012-2018 the Epoptes team, see AUTHORS. # SPDX-License-Identifier: GPL-3.0-or-later """ Display a simple window with a message. """ import os import sys from _common import gettext as _ from gi.repository import Gtk class MessageWindow(Gtk.Window): """Display a simple window with a message.""" def __init__(self, text, title="Epoptes", markup=True, icon_name="dialog-information"): super().__init__(title=title, icon_name=icon_name) self.set_position(Gtk.WindowPosition.CENTER) grid = Gtk.Grid(column_spacing=10, row_spacing=10, margin=10) self.add(grid)<|fim▁hole|> grid.add(image) # Always load the plain text first in case the markup parsing fails label = Gtk.Label( label=text, selectable=True, hexpand=True, vexpand=True, halign=Gtk.Align.START, valign=Gtk.Align.START) if markup: label.set_markup(text) grid.add(label) button = Gtk.Button.new_from_stock(Gtk.STOCK_CLOSE) button.set_hexpand(False) button.set_halign(Gtk.Align.END) button.connect("clicked", Gtk.main_quit) grid.attach(button, 1, 1, 2, 1) self.set_focus_child(button) accelgroup = Gtk.AccelGroup() key, modifier = Gtk.accelerator_parse('Escape') accelgroup.connect( key, modifier, Gtk.AccelFlags.VISIBLE, Gtk.main_quit) self.add_accel_group(accelgroup) def main(): """Run the module from the command line.""" if len(sys.argv) <= 1 or len(sys.argv) > 5: print(_("Usage: {} text [title] [markup] [icon_name]").format( os.path.basename(__file__)), file=sys.stderr) exit(1) text = sys.argv[1] if len(sys.argv) > 2 and sys.argv[2]: title = sys.argv[2] else: title = "Epoptes" if len(sys.argv) > 3 and sys.argv[3]: markup = sys.argv[3].lower() == "true" else: markup = True if len(sys.argv) > 4: icon_name = sys.argv[4] else: icon_name = "dialog-information" window = MessageWindow(text, title, markup, icon_name) window.connect("destroy", Gtk.main_quit) window.show_all() Gtk.main() if __name__ == '__main__': main()<|fim▁end|>
image = Gtk.Image.new_from_icon_name(icon_name, Gtk.IconSize.DIALOG)
<|file_name|>camera_try.py<|end_file_name|><|fim▁begin|># quick demo of some python image filters # using raspberry pi camera<|fim▁hole|>import Tkinter as tk from picamera import PiCamera from time import sleep from PIL import Image,ImageFilter,ImageChops,ImageTk imagefile = "image.jpg" w = 320 h = 240 lastfilter = "none" camera = PiCamera() def takephoto(): camera.capture(imagefile) image1 = Image.open(imagefile) return image1 def photoloop(): count = 0 while (count < 9): sleep(0.5) image1 = newphoto() if lastfilter is not "none": dofilter(lastfilter,image1) count = count + 1 def newphoto(): global image1 image1 = takephoto() tkimage1 = ImageTk.PhotoImage(image1) panel1.configure(image=tkimage1) panel1.image = tkimage1 def invert(): global image1 image1= ImageChops.invert(image1) tkimage1 = ImageTk.PhotoImage(image1) panel1.configure(image=tkimage1) panel1.image = tkimage1 def grayscale(): global image1 r, g, b = image1.split() image1 = Image.merge("RGB", (g,g,g)) tkimage1 = ImageTk.PhotoImage(image1) panel1.configure(image=tkimage1) panel1.image = tkimage1 def dofilter (theimage,thefilter): lastfilter = thefilter global image1 image1 = image1.filter(thefilter) tkimage1 = ImageTk.PhotoImage(image1) panel1.configure(image=tkimage1) panel1.image = tkimage1 # Setup a window root = tk.Tk() root.title('Image') image1 = takephoto() tkimage1 = ImageTk.PhotoImage(image1) w = tkimage1.width() h = tkimage1.height() root.geometry("%dx%d+%d+%d" % (w, h, 0, 0)) # root has no image argument, so use a label as a panel panel1 = tk.Label(root, image=tkimage1) panel1.pack(side='top', fill='both', expand='yes') # save the panel's image from 'garbage collection' panel1.image = tkimage1 # Add some buttons buttonrow = tk.Frame(root) buttonrow.place(y=0,x=0) button = tk.Button(buttonrow, text='CAMERA',command = lambda: newphoto()) button.pack(side='left',) button = tk.Button(buttonrow, text='LOOP',command = lambda: photoloop()) button.pack(side='left',) button = tk.Button(buttonrow, text='INVERT',command = lambda: invert()) button.pack(side='left',) button = tk.Button(buttonrow, text='GRAY',command = lambda: grayscale()) button.pack(side='left',) # add some filter buttons button = tk.Button(buttonrow, text='BLUR',command = lambda: dofilter(image1,ImageFilter.BLUR)) button.pack(side='left') button = tk.Button(buttonrow, text='CONTOUR',command = lambda: dofilter(image1,ImageFilter.CONTOUR)) button.pack(side='left') button = tk.Button(buttonrow, text='FIND_EDGES',command = lambda: dofilter(image1,ImageFilter.FIND_EDGES)) button.pack(side='left') button = tk.Button(buttonrow, text='EMBOSS',command = lambda: dofilter(image1,ImageFilter.EMBOSS)) button.pack(side='left') button = tk.Button(buttonrow, text='EDGE_ENHANCE',command = lambda: dofilter(image1,ImageFilter.EDGE_ENHANCE)) button.pack(side='left') button = tk.Button(buttonrow, text='CLOSE',command = lambda: root.destroy()) button.pack(side='left') root.mainloop()<|fim▁end|>
<|file_name|>mrml_utils.cpp<|end_file_name|><|fim▁begin|>/* This file is part of the KDE project Copyright (C) 2002 Carsten Pfeiffer <[email protected]> This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; see the file COPYING. If not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ #include <dcopclient.h> #include <kapplication.h> #include <kprocess.h> #include <kstaticdeleter.h> #include "watcher_stub.h" #include "mrml_utils.h" // after 100 of no use, terminate the mrmld #define TIMEOUT 100 // how often to restart the mrmld in case of failure #define NUM_RESTARTS 5 using namespace KMrml; KStaticDeleter<Util> utils_sd; Util *Util::s_self = 0L; Util::Util() { // we need our own dcopclient, when used in kio_mrml if ( !DCOPClient::mainClient() ) { DCOPClient::setMainClient( new DCOPClient() ); if ( !DCOPClient::mainClient()->attach() ) qWarning( "kio_mrml: Can't attach to DCOP Server."); } } Util::~Util() { if ( this == s_self ) s_self = 0L; } Util *Util::self() { if ( !s_self ) s_self = utils_sd.setObject( new Util() ); return s_self; } bool Util::requiresLocalServerFor( const KURL& url ) { return url.host().isEmpty() || url.host() == "localhost"; } bool Util::startLocalServer( const Config& config ) { if ( config.serverStartedIndividually() ) return true; DCOPClient *client = DCOPClient::mainClient(); // ### check if it's already running (add dcop method to Watcher) Watcher_stub watcher( client, "kded", "daemonwatcher"); return ( watcher.requireDaemon( client->appId(), "mrmld", config.mrmldCommandline(), TIMEOUT, NUM_RESTARTS ) && watcher.ok() ); } void Util::unrequireLocalServer() {<|fim▁hole|> watcher.unrequireDaemon( client->appId(), "mrmld" ); }<|fim▁end|>
DCOPClient *client = DCOPClient::mainClient(); Watcher_stub watcher( client, "kded", "daemonwatcher");
<|file_name|>EditPresentationSourceEvent.java<|end_file_name|><|fim▁begin|>/* * EditPresentationSourceEvent.java * * Copyright (C) 2020 by RStudio, PBC * * Unless you have received this program directly from RStudio pursuant * to the terms of a commercial license agreement with RStudio, then * this program is licensed to you under the terms of version 3 of the * GNU Affero General Public License. This program is distributed WITHOUT * ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT, * MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the * AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details. *<|fim▁hole|> */ package org.rstudio.studio.client.workbench.views.source.events; import org.rstudio.core.client.files.FileSystemItem; import com.google.gwt.event.shared.EventHandler; import com.google.gwt.event.shared.GwtEvent; public class EditPresentationSourceEvent extends GwtEvent<EditPresentationSourceEvent.Handler> { public interface Handler extends EventHandler { void onEditPresentationSource(EditPresentationSourceEvent e); } public EditPresentationSourceEvent(FileSystemItem sourceFile, int slideIndex) { sourceFile_ = sourceFile; slideIndex_ = slideIndex; } public FileSystemItem getSourceFile() { return sourceFile_; } public int getSlideIndex() { return slideIndex_; } @Override public Type<Handler> getAssociatedType() { return TYPE; } @Override protected void dispatch(Handler handler) { handler.onEditPresentationSource(this); } private final FileSystemItem sourceFile_; private final int slideIndex_; public static final Type<Handler> TYPE = new Type<>(); }<|fim▁end|>
<|file_name|>types.ts<|end_file_name|><|fim▁begin|>import { MikroOrmModuleSyncOptions } from '@mikro-orm/nestjs'; /** * connection options not to be used in module options for the in memory db module as it is overriding the connection */ type ConnectionOptions = 'type' | 'driver' | 'clientUrl' | 'dbName' | 'user' | 'password';<|fim▁hole|> export type MongoDatabaseModuleOptions = Omit<MikroOrmModuleSyncOptions, ConnectionOptions>;<|fim▁end|>
<|file_name|>setup.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from setuptools import find_packages, setup from eventlet import __version__ from os import path setup( name='eventlet', version=__version__, description='Highly concurrent networking library', author='Linden Lab', author_email='[email protected]', url='http://eventlet.net', packages=find_packages(exclude=['benchmarks', 'tests', 'tests.*']), install_requires=( 'greenlet >= 0.3', ), zip_safe=False, long_description=open( path.join( path.dirname(__file__), 'README.rst' ) ).read(), test_suite='nose.collector', classifiers=[ "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Operating System :: MacOS :: MacOS X", "Operating System :: POSIX", "Operating System :: Microsoft :: Windows", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Topic :: Internet", "Topic :: Software Development :: Libraries :: Python Modules", "Intended Audience :: Developers", "Development Status :: 4 - Beta", ] )<|fim▁end|>
#!/usr/bin/env python
<|file_name|>build_tfidf.py<|end_file_name|><|fim▁begin|>import pandas as pd import numpy as np import re from gensim import corpora, models, similarities from gensim.parsing.preprocessing import STOPWORDS def split(text): ''' Split the input text into words/tokens; ignoring stopwords and empty strings ''' delimiters = ".", ",", ";", ":", "-", "(", ")", " ", "\t" regexPattern = '|'.join(map(re.escape, delimiters)) return [word for word in re.split(regexPattern, text.lower()) if word not in STOPWORDS and word != ""] def main(): # Load data df_train = pd.read_csv('data/train.csv', encoding="ISO-8859-1")<|fim▁hole|> df_desc = pd.read_csv('data/product_descriptions.csv', encoding="ISO-8859-1") df_attr = pd.read_csv('data/attributes_combined.csv', encoding="ISO-8859-1") # split the texts titles = [split(line) for line in df_train["product_title"]] descs = [split(line) for line in df_desc["product_description"]] attrs = [[str(line)] if isinstance(line, float) else split(line) for line in df_attr["attr_value"]] queries = [split(line) for line in df_train["search_term"]] texts = np.concatenate((titles, descs, attrs, queries)) # remove infrequent words from collections import defaultdict frequency = defaultdict(int) for text in texts: for token in text: frequency[token] += 1 texts = [[token for token in text if frequency[token] > 2] for text in texts] # build dictionary dictionary = corpora.Dictionary(texts) dictionary.save('homedepot.dict') print dictionary # actually build a bag-of-words corpus corpus = [dictionary.doc2bow(text) for text in texts] corpora.MmCorpus.serialize('homedepot.mm', corpus) # build Tf-idf model tfidf = models.TfidfModel(corpus) tfidf.save('homedepot.tfidf') if __name__ == "__main__": main()<|fim▁end|>
<|file_name|>TranslationCache.rs<|end_file_name|><|fim▁begin|>extern crate libc; use std::mem; use std::marker; use std::ops::{Index, IndexMut}; extern{ fn memset(s: *mut libc::c_void, c: libc::uint32_t, n: libc::size_t) -> *mut libc::c_void; } pub struct TranslationCache { pub page : *mut u8 } unsafe impl Send for TranslationCache {} unsafe impl Sync for TranslationCache {} const PAGE_SIZE: usize = 4096; impl Index<usize> for TranslationCache { type Output = u8; fn index(&self, _index: usize) -> &u8 { unsafe {&*self.page.offset(_index as isize) } } } impl IndexMut<usize> for TranslationCache {<|fim▁hole|>} impl TranslationCache { pub fn new(num_pages: usize) -> TranslationCache { let page : *mut u8; unsafe { let cache_size = num_pages * PAGE_SIZE; let mut _page : *mut libc::c_void = mem::uninitialized(); libc::posix_memalign(&mut _page, PAGE_SIZE, cache_size); libc::mprotect(_page, cache_size, libc::PROT_EXEC | libc::PROT_READ | libc::PROT_WRITE); memset(_page, 0xC3, cache_size); page = mem::transmute(_page); } TranslationCache { page: page } } }<|fim▁end|>
fn index_mut(&mut self, _index: usize) -> &mut u8 { unsafe {&mut *self.page.offset(_index as isize) } }
<|file_name|>civ_battleroyal_leader_civ_adder.py<|end_file_name|><|fim▁begin|>import sys def get_civ_leader(civ_leader_file): """Reads in a file with civs mapped to leaders and add it to a dict. """ return {leader.strip('\n'): country for line in civ_leader_file for (country, leader) in [line.split('\t')]} def get_all_names(civ_leader): """Reads in all leader names in the leader_civ dict to filter narrator text. """ return[k for keys in civ_leader for k in keys.split()] def find_best_leader_match(input_lines): """Return best leader according to input. Finds the best matched leader name for inputted list of words (containing at least one leadername) (useful when narrator use shortned leader names and more leaders share some of their name (Khan as an example).""" best_match = 0 matched_key = None for leader in civ_leader.keys(): matches = 0 for split_name in leader.split(): for split_input in input_lines: if(split_input == split_name): matches+=1 if(matches>best_match): matched_key = leader best_match = matches elif(matches==best_match and matches!=0): matched_key = None if(matched_key is not None): return civ_leader[matched_key] def insert_civ_names(input_lines, all_names): """Inserts civ names in parenthesis. Reads in a text file from narrators and searches for leader names and adds civ in brackets""" out = [] for line in input_lines: new_line = [] split_line = line.split(' ') start_word_num = 0 word_num = 0 while word_num < len(split_line): word=split_line[word_num] if(word[-1] in '.,:;?!+-='): punct = word[-1] word = word[:-1] else: punct = '' w = 0 leader = [] if(word in all_names and word != 'I'): while(word in all_names): leader.append(word) w += 1<|fim▁hole|> word = split_line[word_num + w] civ = find_best_leader_match(leader) if civ is not False: new_line.extend( (' '.join(split_line[start_word_num:word_num]), ' {} ({}){} '.format(' '.join(leader), civ, punct))) start_word_num = word_num + len(leader) word_num = word_num + len(leader) else: word_num += 1 else: word_num += 1 new_line.append(' '.join(split_line[start_word_num:])) out.append(''.join(new_line)) return(''.join(out)) def print_help(): print ( 'For Civilization Battle Royal Mk.II community at ' 'reddit/r/civbattleroyale - Flair up!\n' 'This python script takes in a plain text file as the only argument.\n' 'It adds civilization names in brackets to leader names (from the ' 'civBR_civ_leader.tsv).\n' 'Outputs a new text-file with a suffix: "_with_civs".\n' 'Made by vincentrose88') if __name__ == '__main__': if len(sys.argv) == 1: print_help() exit(0) elif(sys.argv[1] in ('-h', '--help', None)): print_help() exit(0) elif(sys.argv[1]=='-t' or sys.argv[1]=='--test'): input_file = 'data/test_data.txt' else: input_file = str(sys.argv[1]) input_lines = open(input_file,'r').readlines() civ_leader_file = open('civBR_civ_leader.tsv','r') civ_leader = get_civ_leader(civ_leader_file) all_names = get_all_names(civ_leader) output = insert_civ_names(input_lines, all_names) updated_file = open(input_file + '_with_civs', 'w') updated_file.write(output)<|fim▁end|>
<|file_name|>DataStore.java<|end_file_name|><|fim▁begin|>package bpmn; public class DataStore extends Artifact { public DataStore() { super(); } <|fim▁hole|> public String toString() { return "BPMN data store"; } }<|fim▁end|>
public DataStore(int xPos, int yPos, String text) { super(); setText(text); }
<|file_name|>orders.js<|end_file_name|><|fim▁begin|>'use strict' const {Product, Review, User, Category, Tag, Order, Address, LineItem} = require('APP/db') const Promise = require('bluebird') const {assertAdminOrSelfForOrder, mustBeLoggedIn} = require('APP/server/auth.filters.js') module.exports = require('express').Router() // fetch all of the orders for admin order list view, make sure the user requesting this is an admin .param('id', (req, res, next, id) => { Order.findById(id, { include: [{model: User, include: [{model: Address}]}, {model: LineItem}]}) .then(order => { // if no order send 404 req.order = order next() })<|fim▁hole|> Order.findAll({include: [{model: User}]}) .then(orders => res.json(orders)) .catch(next) }) // get a single order to display on the admin single order view and user single order view .get('/:id', (req, res, next) => { // res.json(order) // if allowed Order.findById(req.params.id, { include: [{model: LineItem, include: {model: Product}}, {model: User, include: [{model: Address}]}]}) .then(order => res.json(order)) .catch(next) }) // update the status of the order .put('/:id', assertAdminOrSelfForOrder, (req, res, next) => { req.order.update(req.body) .then(updatedOrder => res.json(updatedOrder)) // Order.update(req.body, {where: {id: req.params.id}}) // .then(([count, order]) => res.json(order)) .catch(next) })<|fim▁end|>
.catch(next) }) .get('/', (req, res, next) => {
<|file_name|>toolset_requirements.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # Copyright 2014 Steven Watanabe # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) # Test the handling of toolset.add-requirements import BoostBuild t = BoostBuild.Tester(pass_toolset=0, ignore_toolset_requirements=False) t.write('jamroot.jam', '''<|fim▁hole|>rule test-rule ( properties * ) { return <define>TEST_INDIRECT_CONDITIONAL ; } toolset.add-requirements <define>TEST_MACRO <conditional>@test-rule <link>shared:<define>TEST_CONDITIONAL ; rule check-requirements ( target : sources * : properties * ) { local macros = TEST_MACRO TEST_CONDITIONAL TEST_INDIRECT_CONDITIONAL ; for local m in $(macros) { if ! <define>$(m) in $(properties) { errors.error $(m) not defined ; } } } make test : : @check-requirements ; ''') t.run_build_system() t.cleanup()<|fim▁end|>
import toolset ; import errors ;
<|file_name|>container_sorter.go<|end_file_name|><|fim▁begin|>package hijackhelpers import ( "strings" "github.com/concourse/atc" ) type ContainerSorter []atc.Container func (sorter ContainerSorter) Len() int { return len(sorter) } func (sorter ContainerSorter) Swap(i, j int) { sorter[i], sorter[j] = sorter[j], sorter[i] } func (sorter ContainerSorter) Less(i, j int) bool { switch { case sorter[i].BuildID < sorter[j].BuildID: return true case sorter[i].BuildID > sorter[j].BuildID: return false case strings.Compare(sorter[i].ResourceName, sorter[j].ResourceName) == -1:<|fim▁hole|> return true case strings.Compare(sorter[i].StepName, sorter[j].StepName) == 1: return false case strings.Compare(sorter[i].Type, sorter[j].Type) == -1: return true default: return false } }<|fim▁end|>
return true case strings.Compare(sorter[i].ResourceName, sorter[j].ResourceName) == 1: return false case strings.Compare(sorter[i].StepName, sorter[j].StepName) == -1:
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|> if django.VERSION >= (3, 2): # The declaration is only needed for older Django versions pass else: default_app_config = ( "wagtail.contrib.simple_translation.apps.SimpleTranslationAppConfig" )<|fim▁end|>
import django
<|file_name|>strategy_test.go<|end_file_name|><|fim▁begin|>/* Copyright 2016 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package poddisruptionbudget import ( "testing" "k8s.io/kubernetes/pkg/api" "k8s.io/kubernetes/pkg/api/unversioned" "k8s.io/kubernetes/pkg/apis/policy" "k8s.io/kubernetes/pkg/util/intstr" )<|fim▁hole|> t.Errorf("PodDisruptionBudget must be namespace scoped") } if Strategy.AllowCreateOnUpdate() { t.Errorf("PodDisruptionBudget should not allow create on update") } validSelector := map[string]string{"a": "b"} pdb := &policy.PodDisruptionBudget{ ObjectMeta: api.ObjectMeta{Name: "abc", Namespace: api.NamespaceDefault}, Spec: policy.PodDisruptionBudgetSpec{ MinAvailable: intstr.FromInt(3), Selector: &unversioned.LabelSelector{MatchLabels: validSelector}, }, } Strategy.PrepareForCreate(ctx, pdb) errs := Strategy.Validate(ctx, pdb) if len(errs) != 0 { t.Errorf("Unexpected error validating %v", errs) } newPdb := &policy.PodDisruptionBudget{ ObjectMeta: api.ObjectMeta{Name: pdb.Name, Namespace: pdb.Namespace}, Spec: pdb.Spec, Status: policy.PodDisruptionBudgetStatus{ PodDisruptionsAllowed: 1, CurrentHealthy: 3, DesiredHealthy: 3, ExpectedPods: 3, }, } // Nothing in Spec changes: OK Strategy.PrepareForUpdate(ctx, newPdb, pdb) errs = Strategy.ValidateUpdate(ctx, newPdb, pdb) if len(errs) != 0 { t.Errorf("Unexpected error updating PodDisruptionBudget.") } // Changing the selector? No. newPdb.Spec.Selector = &unversioned.LabelSelector{MatchLabels: map[string]string{"a": "bar"}} Strategy.PrepareForUpdate(ctx, newPdb, pdb) errs = Strategy.ValidateUpdate(ctx, newPdb, pdb) if len(errs) == 0 { t.Errorf("Expected a validation error since updates are disallowed on poddisruptionbudgets.") } newPdb.Spec.Selector = pdb.Spec.Selector // Changing MinAvailable? Also no. newPdb.Spec.MinAvailable = intstr.FromString("28%") Strategy.PrepareForUpdate(ctx, newPdb, pdb) errs = Strategy.ValidateUpdate(ctx, newPdb, pdb) if len(errs) == 0 { t.Errorf("Expected a validation error since updates are disallowed on poddisruptionbudgets.") } } func TestPodDisruptionBudgetStatusStrategy(t *testing.T) { ctx := api.NewDefaultContext() if !StatusStrategy.NamespaceScoped() { t.Errorf("PodDisruptionBudgetStatus must be namespace scoped") } if StatusStrategy.AllowCreateOnUpdate() { t.Errorf("PodDisruptionBudgetStatus should not allow create on update") } validSelector := map[string]string{"a": "b"} oldPdb := &policy.PodDisruptionBudget{ ObjectMeta: api.ObjectMeta{Name: "abc", Namespace: api.NamespaceDefault, ResourceVersion: "10"}, Spec: policy.PodDisruptionBudgetSpec{ Selector: &unversioned.LabelSelector{MatchLabels: validSelector}, MinAvailable: intstr.FromInt(3), }, Status: policy.PodDisruptionBudgetStatus{ PodDisruptionsAllowed: 1, CurrentHealthy: 3, DesiredHealthy: 3, ExpectedPods: 3, }, } newPdb := &policy.PodDisruptionBudget{ ObjectMeta: api.ObjectMeta{Name: "abc", Namespace: api.NamespaceDefault, ResourceVersion: "9"}, Spec: policy.PodDisruptionBudgetSpec{ Selector: &unversioned.LabelSelector{MatchLabels: validSelector}, MinAvailable: intstr.FromInt(2), }, Status: policy.PodDisruptionBudgetStatus{ PodDisruptionsAllowed: 0, CurrentHealthy: 2, DesiredHealthy: 3, ExpectedPods: 3, }, } StatusStrategy.PrepareForUpdate(ctx, newPdb, oldPdb) if newPdb.Status.CurrentHealthy != 2 { t.Errorf("PodDisruptionBudget status updates should allow change of CurrentHealthy: %v", newPdb.Status.CurrentHealthy) } if newPdb.Spec.MinAvailable.IntValue() != 3 { t.Errorf("PodDisruptionBudget status updates should not clobber spec: %v", newPdb.Spec) } errs := StatusStrategy.ValidateUpdate(ctx, newPdb, oldPdb) if len(errs) != 0 { t.Errorf("Unexpected error %v", errs) } }<|fim▁end|>
func TestPodDisruptionBudgetStrategy(t *testing.T) { ctx := api.NewDefaultContext() if !Strategy.NamespaceScoped() {
<|file_name|>base.py<|end_file_name|><|fim▁begin|># Authors: # Jason Gerard DeRose <[email protected]> # # Copyright (C) 2008 Red Hat # see file 'COPYING' for use and warranty information # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """ Foundational classes and functions. """ import re from constants import NAME_REGEX, NAME_ERROR from constants import TYPE_ERROR, SET_ERROR, DEL_ERROR, OVERRIDE_ERROR class ReadOnly(object): """ Base class for classes that can be locked into a read-only state. Be forewarned that Python does not offer true read-only attributes for user-defined classes. Do *not* rely upon the read-only-ness of this class for security purposes! The point of this class is not to make it impossible to set or to delete attributes after an instance is locked, but to make it impossible to do so *accidentally*. Rather than constantly reminding our programmers of things like, for example, "Don't set any attributes on this ``FooBar`` instance because doing so wont be thread-safe", this class offers a real way to enforce read-only attribute usage. For example, before a `ReadOnly` instance is locked, you can set and delete its attributes as normal: >>> class Person(ReadOnly): ... pass ... >>> p = Person() >>> p.name = 'John Doe' >>> p.phone = '123-456-7890' >>> del p.phone But after an instance is locked, you cannot set its attributes: >>> p.__islocked__() # Is this instance locked? False >>> p.__lock__() # This will lock the instance >>> p.__islocked__() True >>> p.department = 'Engineering' Traceback (most recent call last): ... AttributeError: locked: cannot set Person.department to 'Engineering' Nor can you deleted its attributes: >>> del p.name Traceback (most recent call last): ... AttributeError: locked: cannot delete Person.name However, as noted at the start, there are still obscure ways in which attributes can be set or deleted on a locked `ReadOnly` instance. For example: >>> object.__setattr__(p, 'department', 'Engineering') >>> p.department 'Engineering' >>> object.__delattr__(p, 'name') >>> hasattr(p, 'name') False But again, the point is that a programmer would never employ the above techniques *accidentally*. Lastly, this example aside, you should use the `lock()` function rather than the `ReadOnly.__lock__()` method. And likewise, you should use the `islocked()` function rather than the `ReadOnly.__islocked__()` method. For example: >>> readonly = ReadOnly() >>> islocked(readonly) False >>> lock(readonly) is readonly # lock() returns the instance True >>> islocked(readonly) True """ __locked = False def __lock__(self): """ Put this instance into a read-only state. After the instance has been locked, attempting to set or delete an attribute will raise an AttributeError. """ assert self.__locked is False, '__lock__() can only be called once' self.__locked = True def __islocked__(self): """ Return True if instance is locked, otherwise False. """ return self.__locked def __setattr__(self, name, value): """ If unlocked, set attribute named ``name`` to ``value``. If this instance is locked, an AttributeError will be raised. :param name: Name of attribute to set. :param value: Value to assign to attribute. """ if self.__locked: raise AttributeError( SET_ERROR % (self.__class__.__name__, name, value) ) return object.__setattr__(self, name, value) def __delattr__(self, name): """ If unlocked, delete attribute named ``name``. If this instance is locked, an AttributeError will be raised. :param name: Name of attribute to delete. """ if self.__locked: raise AttributeError( DEL_ERROR % (self.__class__.__name__, name) ) return object.__delattr__(self, name) def lock(instance): """ Lock an instance of the `ReadOnly` class or similar. This function can be used to lock instances of any class that implements the same locking API as the `ReadOnly` class. For example, this function can lock instances of the `config.Env` class. So that this function can be easily used within an assignment, ``instance`` is returned after it is locked. For example: >>> readonly = ReadOnly() >>> readonly is lock(readonly) True >>> readonly.attr = 'This wont work' Traceback (most recent call last): ... AttributeError: locked: cannot set ReadOnly.attr to 'This wont work' Also see the `islocked()` function. :param instance: The instance of `ReadOnly` (or similar) to lock. """ assert instance.__islocked__() is False, 'already locked: %r' % instance instance.__lock__() assert instance.__islocked__() is True, 'failed to lock: %r' % instance return instance def islocked(instance): """ Return ``True`` if ``instance`` is locked. This function can be used on an instance of the `ReadOnly` class or an instance of any other class implemented the same locking API. For example: >>> readonly = ReadOnly() >>> islocked(readonly) False >>> readonly.__lock__() >>> islocked(readonly) True Also see the `lock()` function. :param instance: The instance of `ReadOnly` (or similar) to interrogate. """ assert ( hasattr(instance, '__lock__') and callable(instance.__lock__) ), 'no __lock__() method: %r' % instance return instance.__islocked__() def check_name(name): """ Verify that ``name`` is suitable for a `NameSpace` member name. In short, ``name`` must be a valid lower-case Python identifier that neither starts nor ends with an underscore. Otherwise an exception is raised. This function will raise a ``ValueError`` if ``name`` does not match the `constants.NAME_REGEX` regular expression. For example: >>> check_name('MyName') Traceback (most recent call last): ... ValueError: name must match '^[a-z][_a-z0-9]*[a-z0-9]$|^[a-z]$'; got 'MyName' Also, this function will raise a ``TypeError`` if ``name`` is not an ``str`` instance. For example: >>> check_name(u'my_name') Traceback (most recent call last): ... TypeError: name: need a <type 'str'>; got u'my_name' (a <type 'unicode'>) So that `check_name()` can be easily used within an assignment, ``name`` is returned unchanged if it passes the check. For example: >>> n = check_name('my_name') >>> n 'my_name' :param name: Identifier to test. """ if type(name) is not str: raise TypeError( TYPE_ERROR % ('name', str, name, type(name)) ) if re.match(NAME_REGEX, name) is None: raise ValueError( NAME_ERROR % (NAME_REGEX, name) ) return name class NameSpace(ReadOnly): """ A read-only name-space with handy container behaviours. A `NameSpace` instance is an ordered, immutable mapping object whose values can also be accessed as attributes. A `NameSpace` instance is constructed from an iterable providing its *members*, which are simply arbitrary objects with a ``name`` attribute whose value: 1. Is unique among the members 2. Passes the `check_name()` function Beyond that, no restrictions are placed on the members: they can be classes or instances, and of any type. The members can be accessed as attributes on the `NameSpace` instance or through a dictionary interface. For example, say we create a `NameSpace` instance from a list containing a single member, like this: >>> class my_member(object): ... name = 'my_name' ... >>> namespace = NameSpace([my_member]) >>> namespace NameSpace(<1 member>, sort=True) We can then access ``my_member`` both as an attribute and as a dictionary item: >>> my_member is namespace.my_name # As an attribute True >>> my_member is namespace['my_name'] # As dictionary item True For a more detailed example, say we create a `NameSpace` instance from a generator like this: >>> class Member(object): ... def __init__(self, i): ... self.i = i ... self.name = 'member%d' % i ... def __repr__(self): ... return 'Member(%d)' % self.i ... >>> ns = NameSpace(Member(i) for i in xrange(3)) >>> ns NameSpace(<3 members>, sort=True) As above, the members can be accessed as attributes and as dictionary items: >>> ns.member0 is ns['member0'] True >>> ns.member1 is ns['member1'] True >>> ns.member2 is ns['member2'] True Members can also be accessed by index and by slice. For example: >>> ns[0] Member(0) >>> ns[-1] Member(2) >>> ns[1:] (Member(1), Member(2)) (Note that slicing a `NameSpace` returns a ``tuple``.) `NameSpace` instances provide standard container emulation for membership testing, counting, and iteration. For example: >>> 'member3' in ns # Is there a member named 'member3'? False >>> 'member2' in ns # But there is a member named 'member2' True >>> len(ns) # The number of members 3 >>> list(ns) # Iterate through the member names ['member0', 'member1', 'member2'] Although not a standard container feature, the `NameSpace.__call__()` method provides a convenient (and efficient) way to iterate through the *members* (as opposed to the member names). Think of it like an ordered version of the ``dict.itervalues()`` method. For example: >>> list(ns[name] for name in ns) # One way to do it [Member(0), Member(1), Member(2)] >>> list(ns()) # A more efficient, simpler way to do it [Member(0), Member(1), Member(2)] Another convenience method is `NameSpace.__todict__()`, which will return a copy of the ``dict`` mapping the member names to the members. For example: >>> ns.__todict__() {'member1': Member(1), 'member0': Member(0), 'member2': Member(2)} As `NameSpace.__init__()` locks the instance, `NameSpace` instances are read-only from the get-go. An ``AttributeError`` is raised if you try to set *any* attribute on a `NameSpace` instance. For example: >>> ns.member3 = Member(3) # Lets add that missing 'member3' Traceback (most recent call last): ... AttributeError: locked: cannot set NameSpace.member3 to Member(3) (For information on the locking protocol, see the `ReadOnly` class, of which `NameSpace` is a subclass.) By default the members will be sorted alphabetically by the member name. For example: >>> sorted_ns = NameSpace([Member(7), Member(3), Member(5)]) >>> sorted_ns NameSpace(<3 members>, sort=True) >>> list(sorted_ns) ['member3', 'member5', 'member7'] >>> sorted_ns[0] Member(3) But if the instance is created with the ``sort=False`` keyword argument, the original order of the members is preserved. For example: >>> unsorted_ns = NameSpace([Member(7), Member(3), Member(5)], sort=False) >>> unsorted_ns NameSpace(<3 members>, sort=False) >>> list(unsorted_ns) ['member7', 'member3', 'member5'] >>> unsorted_ns[0] Member(7) The `NameSpace` class is used in many places throughout freeIPA. For a few examples, see the `plugable.API` and the `frontend.Command` classes. """ def __init__(self, members, sort=True, name_attr='name'): """ :param members: An iterable providing the members. :param sort: Whether to sort the members by member name. """ if type(sort) is not bool: raise TypeError( TYPE_ERROR % ('sort', bool, sort, type(sort)) ) self.__sort = sort if sort: self.__members = tuple( sorted(members, key=lambda m: getattr(m, name_attr)) ) else: self.__members = tuple(members) self.__names = tuple(getattr(m, name_attr) for m in self.__members) self.__map = dict() for member in self.__members: name = check_name(getattr(member, name_attr)) if name in self.__map: raise AttributeError(OVERRIDE_ERROR % (self.__class__.__name__, name, self.__map[name], member) ) assert not hasattr(self, name), 'Ouch! Has attribute %r' % name self.__map[name] = member setattr(self, name, member) lock(self) def __len__(self): """ Return the number of members. """ return len(self.__members) def __iter__(self): """ Iterate through the member names. If this instance was created with ``sort=False``, the names will be in the same order as the members were passed to the constructor; otherwise<|fim▁hole|> """ for name in self.__names: yield name def __call__(self): """ Iterate through the members. If this instance was created with ``sort=False``, the members will be in the same order as they were passed to the constructor; otherwise the members will be in alphabetical order by name (which is the default). This method is like an ordered version of ``dict.itervalues()``. """ for member in self.__members: yield member def __contains__(self, name): """ Return ``True`` if namespace has a member named ``name``. """ return name in self.__map def __getitem__(self, key): """ Return a member by name or index, or return a slice of members. :param key: The name or index of a member, or a slice object. """ if isinstance(key, basestring): return self.__map[key] if type(key) in (int, slice): return self.__members[key] raise TypeError( TYPE_ERROR % ('key', (str, int, slice), key, type(key)) ) def __repr__(self): """ Return a pseudo-valid expression that could create this instance. """ cnt = len(self) if cnt == 1: m = 'member' else: m = 'members' return '%s(<%d %s>, sort=%r)' % ( self.__class__.__name__, cnt, m, self.__sort, ) def __todict__(self): """ Return a copy of the private dict mapping member name to member. """ return dict(self.__map)<|fim▁end|>
the names will be in alphabetical order (which is the default). This method is like an ordered version of ``dict.iterkeys()``.
<|file_name|>button.js<|end_file_name|><|fim▁begin|>(function(Button, set) { set += ": "; test(set + "Test btn class added", function() { var $elements; $elements = new Button($("<a>").add("<button>").add("<input>")); ok($elements.hasClass("btn"), "Buttons should be of the btn class"); }); test(set + "Test button creation, no arguments", function() { var button = new Button(); ok(button.hasClass("btn"), "Button should be of the btn class"); ok(button.is("button"), "Button should be a <button> element"); equal(button.attr("type"), "button", "Type attribute should be 'button'"); }); test(set + "Test default buttons", function() { var button = new Button(); button.primary(); equal(button.attr("class"), "btn " + button.options.PRIMARY, "Primary style button should be of the btn and '" + button.options.PRIMARY + "' classes only"); button.info(); equal(button.attr("class"), "btn " + button.options.INFO, "Info style button should be of the btn and '" + button.options.INFO + "' classes only"); button.success(); equal(button.attr("class"), "btn " + button.options.SUCCESS, "Success style button should be of the btn and '" + button.options.SUCCESS + "' classes only"); button.warning(); equal(button.attr("class"), "btn " + button.options.WARNING, "Warning style button should be of the btn and '" + button.options.WARNING + "' classes only"); button.danger(); equal(button.attr("class"), "btn " + button.options.DANGER, "Danger style button should be of the btn and '" + button.options.DANGER + "' classes only"); button.link(); equal(button.attr("class"), "btn " + button.options.LINK, "Link style button should be of the btn and '" + button.options.LINK + "' classes only"); button.defaultStyle(); equal(button.attr("class"), "btn", "Default button should be of the btn class only"); }); test(set+ "Test button sizes", function() { var button = new Button(); button.large(); equal(button.attr("class"), "btn " + button.sizes.LARGE, "Large button should be of the btn and '" + button.sizes.LARGE + "' classes only"); button.defaultSize(); equal(button.attr("class"), "btn", "Default button should be of the btn class only"); button.small(); equal(button.attr("class"), "btn " + button.sizes.SMALL, "Small button should be of the btn and '" + button.sizes.SMALL + "' classes only"); button.extraSmall(); equal(button.attr("class"), "btn " + button.sizes.EXTRASMALL, "Extra small button should be of the btn and '" + button.sizes.EXTRASMALL + "' classes only"); }); test(set + "Test block-level button", function () { var button = new Button(); button.block(); equal(button.attr("class"), "btn " + button.BLOCK, "Block level button should be of the btn and '" + button.BLOCK + "' classes only");<|fim▁hole|> }); test(set + "Test disable <button> and <input> elements", function() { var buttons, hasClass; buttons = new Button($("<button>").add("<input>")); hasClass = false; buttons.disable(); equal(buttons.attr("disabled"), "disabled", "Disabled elements should have the disabled attribute value of 'disabled'"); buttons.map(function() { hasClass = hasClass || $(this).hasClass(Button.prototype.DISABLED); }); equal(hasClass, false, "Elements should not be of the '" + Button.prototype.DISABLED + "' class"); }); test(set + "Test disable <a> element", function() { var button = new Button("<a>"); button.disable(); ok(button.attr("disabled") === undefined, "Disabled anchor elements should not have the disabled attribute"); ok(button.hasClass(button.DISABLED), "Disabled anchor elements should be of the '" + button.DISABLED + "' class"); }); test(set + "Test set text", function() { var anotherButton, buttons, functionExpectedValue, oldText, text; oldText = "Hello, tester!"; text = "Hello, world!"; buttons = new Button( $("<button>") .add("<input type='button'>") .add("<input type='submit'>") .add("<input type='reset'>") .add("<input type='text'>")); //not a real button anotherButton = new Button("<button>" + oldText + "</button>"); buttons.text(text); //.eq() returns a jQuery object, so no worries about the Button.prototype.text() calls, here! equal(buttons.eq(0).text(), text, "<button> element should have text() set to " + text); equal(buttons.eq(0).attr("value"), undefined, "<button> element should not have the value attribute set"); equal(buttons.eq(1).text(), "", "<input> button element should have text() set to an empty string"); equal(buttons.eq(1).attr("value"), text, "<input> button element should have the value attribute set to " + text); equal(buttons.eq(2).text(), "", "<input> submit element should have text() set to an empty string"); equal(buttons.eq(2).attr("value"), text, "<input> submit element should have the value attribute set to " + text); equal(buttons.eq(3).text(), "", "<input> reset element should have text() set to an empty string"); equal(buttons.eq(3).attr("value"), text, "<input> reset element should have the value attribute set to " + text); equal(buttons.eq(4).text(), text, "<input> text (not a button) element should have text() set to " + text); equal(buttons.eq(4).attr("value"), undefined, "<input> text (not a button) element should not have the value attribute set"); anotherButton.text(function(index, old) { return "" + text + "-" + index + "-" + old; }); functionExpectedValue = "" + text + "-0-" + oldText; equal($.fn.text.apply(anotherButton, []), functionExpectedValue, "Setting text with this function should return '" + functionExpectedValue + "' for text()"); }); test(set + "Test get text", function() { var buttons, text; text = "Hello, world, again!"; buttons = { button: new Button("<button>" + text + "</button>"), input: { button: new Button("<input type='button' value='" + text + "'>"), submit: new Button("<input type='submit' value='" + text + "'>"), reset: new Button("<input type='reset' value='" + text + "'>"), notAButton: new Button("<input type='text' value='" + text + "'>") }, anchor: new Button("<a>" + text + "</a>") }; equal(buttons.button.text(), text, "<button> element should have text() return '" + text + "'"); equal(buttons.input.button.text(), text, "<input> button element should have text() return '" + text + "'"); equal(buttons.input.submit.text(), text, "<input> submit element should have text() return '" + text + "'"); equal(buttons.input.reset.text(), text, "<input> reset element should have text() return '" + text + "'"); equal(buttons.input.notAButton.text(), "", "<input> text (not a button) element should have text() return an empty string"); equal(buttons.anchor.text(), text, "<a> element should have text() return '" + text + "'"); }); })(uk.co.stevenmeyer.bootstrap.css.Button, "css.Button");<|fim▁end|>
<|file_name|>webhtml.go<|end_file_name|><|fim▁begin|>// Copyright 2017 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package driver import ( "html/template" "github.com/google/pprof/third_party/d3flamegraph" ) // addTemplates adds a set of template definitions to templates. func addTemplates(templates *template.Template) { template.Must(templates.Parse(`{{define "d3flamegraphscript"}}` + d3flamegraph.JSSource + `{{end}}`)) template.Must(templates.Parse(`{{define "d3flamegraphcss"}}` + d3flamegraph.CSSSource + `{{end}}`)) template.Must(templates.Parse(` {{define "css"}} <style type="text/css"> * { margin: 0; padding: 0; box-sizing: border-box; } html, body { height: 100%; } body { font-family: 'Roboto', -apple-system, BlinkMacSystemFont, 'Segoe UI', Helvetica, Arial, sans-serif, 'Apple Color Emoji', 'Segoe UI Emoji', 'Segoe UI Symbol'; font-size: 13px; line-height: 1.4; display: flex; flex-direction: column; } a { color: #2a66d9; } .header { display: flex; align-items: center; height: 44px; min-height: 44px; background-color: #eee; color: #212121; padding: 0 1rem; } .header > div { margin: 0 0.125em; } .header .title h1 { font-size: 1.75em; margin-right: 1rem; margin-bottom: 4px; } .header .title a { color: #212121; text-decoration: none; } .header .title a:hover { text-decoration: underline; } .header .description { width: 100%; text-align: right; white-space: nowrap; } @media screen and (max-width: 799px) { .header input { display: none; } } #detailsbox { display: none; z-index: 1; position: fixed; top: 40px; right: 20px; background-color: #ffffff; box-shadow: 0 1px 5px rgba(0,0,0,.3); line-height: 24px; padding: 1em; text-align: left; } .header input { background: white url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24' style='pointer-events:none;display:block;width:100%25;height:100%25;fill:%23757575'%3E%3Cpath d='M15.5 14h-.79l-.28-.27C15.41 12.59 16 11.11 16 9.5 16 5.91 13.09 3 9.5 3S3 5.91 3 9.5 5.91 16 9.5 16c1.61.0 3.09-.59 4.23-1.57l.27.28v.79l5 4.99L20.49 19l-4.99-5zm-6 0C7.01 14 5 11.99 5 9.5S7.01 5 9.5 5 14 7.01 14 9.5 11.99 14 9.5 14z'/%3E%3C/svg%3E") no-repeat 4px center/20px 20px; border: 1px solid #d1d2d3; border-radius: 2px 0 0 2px; padding: 0.25em; padding-left: 28px; margin-left: 1em; font-family: 'Roboto', 'Noto', sans-serif; font-size: 1em; line-height: 24px; color: #212121; } .downArrow { border-top: .36em solid #ccc; border-left: .36em solid transparent; border-right: .36em solid transparent; margin-bottom: .05em; margin-left: .5em; transition: border-top-color 200ms; } .menu-item { height: 100%; text-transform: uppercase; font-family: 'Roboto Medium', -apple-system, BlinkMacSystemFont, 'Segoe UI', Helvetica, Arial, sans-serif, 'Apple Color Emoji', 'Segoe UI Emoji', 'Segoe UI Symbol'; position: relative; } .menu-item .menu-name:hover { opacity: 0.75; } .menu-item .menu-name:hover .downArrow { border-top-color: #666; } .menu-name { height: 100%; padding: 0 0.5em; display: flex; align-items: center; justify-content: center; } .menu-name a { text-decoration: none; color: #212121; } .submenu { display: none; z-index: 1; margin-top: -4px; min-width: 10em; position: absolute; left: 0px; background-color: white; box-shadow: 0 1px 5px rgba(0,0,0,.3); font-size: 100%; text-transform: none; } .menu-item, .submenu { user-select: none; -moz-user-select: none; -ms-user-select: none; -webkit-user-select: none; } .submenu hr { border: 0; border-top: 2px solid #eee; } .submenu a { display: block; padding: .5em 1em; text-decoration: none; } .submenu a:hover, .submenu a.active { color: white; background-color: #6b82d6; } .submenu a.disabled { color: gray; pointer-events: none; } .menu-check-mark { position: absolute; left: 2px; } .menu-delete-btn { position: absolute; right: 2px; } {{/* Used to disable events when a modal dialog is displayed */}} #dialog-overlay { display: none; position: fixed; left: 0px; top: 0px; width: 100%; height: 100%; background-color: rgba(1,1,1,0.1); } .dialog { {{/* Displayed centered horizontally near the top */}} display: none; position: fixed; margin: 0px; top: 60px; left: 50%; transform: translateX(-50%); z-index: 3; font-size: 125%; background-color: #ffffff; box-shadow: 0 1px 5px rgba(0,0,0,.3); } .dialog-header { font-size: 120%; border-bottom: 1px solid #CCCCCC; width: 100%; text-align: center; background: #EEEEEE; user-select: none; } .dialog-footer { border-top: 1px solid #CCCCCC; width: 100%; text-align: right; padding: 10px; } .dialog-error { margin: 10px; color: red; } .dialog input { margin: 10px; font-size: inherit; } .dialog button { margin-left: 10px; font-size: inherit; } #save-dialog, #delete-dialog { width: 50%; max-width: 20em; } #delete-prompt { padding: 10px; } #content { overflow-y: scroll; padding: 1em; } #top { overflow-y: scroll; } #graph { overflow: hidden; } #graph svg { width: 100%; height: auto; padding: 10px; } #content.source .filename { margin-top: 0; margin-bottom: 1em; font-size: 120%; } #content.source pre { margin-bottom: 3em; } table { border-spacing: 0px; width: 100%; padding-bottom: 1em; white-space: nowrap; } table thead { font-family: 'Roboto Medium', -apple-system, BlinkMacSystemFont, 'Segoe UI', Helvetica, Arial, sans-serif, 'Apple Color Emoji', 'Segoe UI Emoji', 'Segoe UI Symbol'; } table tr th { position: sticky; top: 0; background-color: #ddd; text-align: right; padding: .3em .5em; } table tr td { padding: .3em .5em; text-align: right; } #top table tr th:nth-child(6), #top table tr th:nth-child(7), #top table tr td:nth-child(6), #top table tr td:nth-child(7) { text-align: left; } #top table tr td:nth-child(6) { width: 100%; text-overflow: ellipsis; overflow: hidden; white-space: nowrap; } #flathdr1, #flathdr2, #cumhdr1, #cumhdr2, #namehdr { cursor: ns-resize; } .hilite { background-color: #ebf5fb; font-weight: bold; } </style> {{end}} {{define "header"}} <div class="header"> <div class="title"> <h1><a href="./">pprof</a></h1> </div> <div id="view" class="menu-item"> <div class="menu-name"> View <i class="downArrow"></i> </div> <div class="submenu"> <a title="{{.Help.top}}" href="./top" id="topbtn">Top</a> <a title="{{.Help.graph}}" href="./" id="graphbtn">Graph</a> <a title="{{.Help.flamegraph}}" href="./flamegraph" id="flamegraph">Flame Graph</a> <a title="{{.Help.peek}}" href="./peek" id="peek">Peek</a> <a title="{{.Help.list}}" href="./source" id="list">Source</a> <a title="{{.Help.disasm}}" href="./disasm" id="disasm">Disassemble</a> </div> </div> {{$sampleLen := len .SampleTypes}} {{if gt $sampleLen 1}} <div id="sample" class="menu-item"> <div class="menu-name"> Sample <i class="downArrow"></i> </div> <div class="submenu"> {{range .SampleTypes}} <a href="?si={{.}}" id="{{.}}">{{.}}</a> {{end}} </div> </div> {{end}} <div id="refine" class="menu-item"> <div class="menu-name"> Refine <i class="downArrow"></i> </div> <div class="submenu"> <a title="{{.Help.focus}}" href="?" id="focus">Focus</a> <a title="{{.Help.ignore}}" href="?" id="ignore">Ignore</a> <a title="{{.Help.hide}}" href="?" id="hide">Hide</a> <a title="{{.Help.show}}" href="?" id="show">Show</a> <a title="{{.Help.show_from}}" href="?" id="show-from">Show from</a> <hr> <a title="{{.Help.reset}}" href="?">Reset</a> </div> </div> <div id="config" class="menu-item"> <div class="menu-name"> Config <i class="downArrow"></i> </div> <div class="submenu"> <a title="{{.Help.save_config}}" id="save-config">Save as ...</a> <hr> {{range .Configs}} <a href="{{.URL}}"> {{if .Current}}<span class="menu-check-mark">✓</span>{{end}} {{.Name}} {{if .UserConfig}}<span class="menu-delete-btn" data-config={{.Name}}>🗙</span>{{end}} </a> {{end}} </div> </div> <div id="download" class="menu-item"> <div class="menu-name"> <a href="./download">Download</a> </div> </div> <div> <input id="search" type="text" placeholder="Search regexp" autocomplete="off" autocapitalize="none" size=40> </div> <div class="description"> <a title="{{.Help.details}}" href="#" id="details">{{.Title}}</a> <div id="detailsbox"> {{range .Legend}}<div>{{.}}</div>{{end}} </div> </div> </div> <div id="dialog-overlay"></div> <div class="dialog" id="save-dialog"> <div class="dialog-header">Save options as</div> <datalist id="config-list"> {{range .Configs}}{{if .UserConfig}}<option value="{{.Name}}" />{{end}}{{end}} </datalist> <input id="save-name" type="text" list="config-list" placeholder="New config" /> <div class="dialog-footer"> <span class="dialog-error" id="save-error"></span> <button id="save-cancel">Cancel</button> <button id="save-confirm">Save</button> </div> </div> <div class="dialog" id="delete-dialog"> <div class="dialog-header" id="delete-dialog-title">Delete config</div> <div id="delete-prompt"></div> <div class="dialog-footer"> <span class="dialog-error" id="delete-error"></span> <button id="delete-cancel">Cancel</button> <button id="delete-confirm">Delete</button> </div> </div> <div id="errors">{{range .Errors}}<div>{{.}}</div>{{end}}</div> {{end}} {{define "graph" -}} <!DOCTYPE html> <html> <head> <meta charset="utf-8"> <title>{{.Title}}</title> {{template "css" .}} </head> <body> {{template "header" .}} <div id="graph"> {{.HTMLBody}} </div> {{template "script" .}} <script>viewer(new URL(window.location.href), {{.Nodes}});</script> </body> </html> {{end}} {{define "script"}} <script> // Make svg pannable and zoomable. // Call clickHandler(t) if a click event is caught by the pan event handlers. function initPanAndZoom(svg, clickHandler) { 'use strict'; // Current mouse/touch handling mode const IDLE = 0; const MOUSEPAN = 1; const TOUCHPAN = 2; const TOUCHZOOM = 3; let mode = IDLE; // State needed to implement zooming. let currentScale = 1.0; const initWidth = svg.viewBox.baseVal.width; const initHeight = svg.viewBox.baseVal.height; // State needed to implement panning. let panLastX = 0; // Last event X coordinate let panLastY = 0; // Last event Y coordinate let moved = false; // Have we seen significant movement let touchid = null; // Current touch identifier // State needed for pinch zooming let touchid2 = null; // Second id for pinch zooming let initGap = 1.0; // Starting gap between two touches let initScale = 1.0; // currentScale when pinch zoom started let centerPoint = null; // Center point for scaling // Convert event coordinates to svg coordinates. function toSvg(x, y) { const p = svg.createSVGPoint(); p.x = x; p.y = y; let m = svg.getCTM(); if (m == null) m = svg.getScreenCTM(); // Firefox workaround. return p.matrixTransform(m.inverse());<|fim▁hole|> // Change the scaling for the svg to s, keeping the point denoted // by u (in svg coordinates]) fixed at the same screen location. function rescale(s, u) { // Limit to a good range. if (s < 0.2) s = 0.2; if (s > 10.0) s = 10.0; currentScale = s; // svg.viewBox defines the visible portion of the user coordinate // system. So to magnify by s, divide the visible portion by s, // which will then be stretched to fit the viewport. const vb = svg.viewBox; const w1 = vb.baseVal.width; const w2 = initWidth / s; const h1 = vb.baseVal.height; const h2 = initHeight / s; vb.baseVal.width = w2; vb.baseVal.height = h2; // We also want to adjust vb.baseVal.x so that u.x remains at same // screen X coordinate. In other words, want to change it from x1 to x2 // so that: // (u.x - x1) / w1 = (u.x - x2) / w2 // Simplifying that, we get // (u.x - x1) * (w2 / w1) = u.x - x2 // x2 = u.x - (u.x - x1) * (w2 / w1) vb.baseVal.x = u.x - (u.x - vb.baseVal.x) * (w2 / w1); vb.baseVal.y = u.y - (u.y - vb.baseVal.y) * (h2 / h1); } function handleWheel(e) { if (e.deltaY == 0) return; // Change scale factor by 1.1 or 1/1.1 rescale(currentScale * (e.deltaY < 0 ? 1.1 : (1/1.1)), toSvg(e.offsetX, e.offsetY)); } function setMode(m) { mode = m; touchid = null; touchid2 = null; } function panStart(x, y) { moved = false; panLastX = x; panLastY = y; } function panMove(x, y) { let dx = x - panLastX; let dy = y - panLastY; if (Math.abs(dx) <= 2 && Math.abs(dy) <= 2) return; // Ignore tiny moves moved = true; panLastX = x; panLastY = y; // Firefox workaround: get dimensions from parentNode. const swidth = svg.clientWidth || svg.parentNode.clientWidth; const sheight = svg.clientHeight || svg.parentNode.clientHeight; // Convert deltas from screen space to svg space. dx *= (svg.viewBox.baseVal.width / swidth); dy *= (svg.viewBox.baseVal.height / sheight); svg.viewBox.baseVal.x -= dx; svg.viewBox.baseVal.y -= dy; } function handleScanStart(e) { if (e.button != 0) return; // Do not catch right-clicks etc. setMode(MOUSEPAN); panStart(e.clientX, e.clientY); e.preventDefault(); svg.addEventListener('mousemove', handleScanMove); } function handleScanMove(e) { if (e.buttons == 0) { // Missed an end event, perhaps because mouse moved outside window. setMode(IDLE); svg.removeEventListener('mousemove', handleScanMove); return; } if (mode == MOUSEPAN) panMove(e.clientX, e.clientY); } function handleScanEnd(e) { if (mode == MOUSEPAN) panMove(e.clientX, e.clientY); setMode(IDLE); svg.removeEventListener('mousemove', handleScanMove); if (!moved) clickHandler(e.target); } // Find touch object with specified identifier. function findTouch(tlist, id) { for (const t of tlist) { if (t.identifier == id) return t; } return null; } // Return distance between two touch points function touchGap(t1, t2) { const dx = t1.clientX - t2.clientX; const dy = t1.clientY - t2.clientY; return Math.hypot(dx, dy); } function handleTouchStart(e) { if (mode == IDLE && e.changedTouches.length == 1) { // Start touch based panning const t = e.changedTouches[0]; setMode(TOUCHPAN); touchid = t.identifier; panStart(t.clientX, t.clientY); e.preventDefault(); } else if (mode == TOUCHPAN && e.touches.length == 2) { // Start pinch zooming setMode(TOUCHZOOM); const t1 = e.touches[0]; const t2 = e.touches[1]; touchid = t1.identifier; touchid2 = t2.identifier; initScale = currentScale; initGap = touchGap(t1, t2); centerPoint = toSvg((t1.clientX + t2.clientX) / 2, (t1.clientY + t2.clientY) / 2); e.preventDefault(); } } function handleTouchMove(e) { if (mode == TOUCHPAN) { const t = findTouch(e.changedTouches, touchid); if (t == null) return; if (e.touches.length != 1) { setMode(IDLE); return; } panMove(t.clientX, t.clientY); e.preventDefault(); } else if (mode == TOUCHZOOM) { // Get two touches; new gap; rescale to ratio. const t1 = findTouch(e.touches, touchid); const t2 = findTouch(e.touches, touchid2); if (t1 == null || t2 == null) return; const gap = touchGap(t1, t2); rescale(initScale * gap / initGap, centerPoint); e.preventDefault(); } } function handleTouchEnd(e) { if (mode == TOUCHPAN) { const t = findTouch(e.changedTouches, touchid); if (t == null) return; panMove(t.clientX, t.clientY); setMode(IDLE); e.preventDefault(); if (!moved) clickHandler(t.target); } else if (mode == TOUCHZOOM) { setMode(IDLE); e.preventDefault(); } } svg.addEventListener('mousedown', handleScanStart); svg.addEventListener('mouseup', handleScanEnd); svg.addEventListener('touchstart', handleTouchStart); svg.addEventListener('touchmove', handleTouchMove); svg.addEventListener('touchend', handleTouchEnd); svg.addEventListener('wheel', handleWheel, true); } function initMenus() { 'use strict'; let activeMenu = null; let activeMenuHdr = null; function cancelActiveMenu() { if (activeMenu == null) return; activeMenu.style.display = 'none'; activeMenu = null; activeMenuHdr = null; } // Set click handlers on every menu header. for (const menu of document.getElementsByClassName('submenu')) { const hdr = menu.parentElement; if (hdr == null) return; if (hdr.classList.contains('disabled')) return; function showMenu(e) { // menu is a child of hdr, so this event can fire for clicks // inside menu. Ignore such clicks. if (e.target.parentElement != hdr) return; activeMenu = menu; activeMenuHdr = hdr; menu.style.display = 'block'; } hdr.addEventListener('mousedown', showMenu); hdr.addEventListener('touchstart', showMenu); } // If there is an active menu and a down event outside, retract the menu. for (const t of ['mousedown', 'touchstart']) { document.addEventListener(t, (e) => { // Note: to avoid unnecessary flicker, if the down event is inside // the active menu header, do not retract the menu. if (activeMenuHdr != e.target.closest('.menu-item')) { cancelActiveMenu(); } }, { passive: true, capture: true }); } // If there is an active menu and an up event inside, retract the menu. document.addEventListener('mouseup', (e) => { if (activeMenu == e.target.closest('.submenu')) { cancelActiveMenu(); } }, { passive: true, capture: true }); } function sendURL(method, url, done) { fetch(url.toString(), {method: method}) .then((response) => { done(response.ok); }) .catch((error) => { done(false); }); } // Initialize handlers for saving/loading configurations. function initConfigManager() { 'use strict'; // Initialize various elements. function elem(id) { const result = document.getElementById(id); if (!result) console.warn('element ' + id + ' not found'); return result; } const overlay = elem('dialog-overlay'); const saveDialog = elem('save-dialog'); const saveInput = elem('save-name'); const saveError = elem('save-error'); const delDialog = elem('delete-dialog'); const delPrompt = elem('delete-prompt'); const delError = elem('delete-error'); let currentDialog = null; let currentDeleteTarget = null; function showDialog(dialog) { if (currentDialog != null) { overlay.style.display = 'none'; currentDialog.style.display = 'none'; } currentDialog = dialog; if (dialog != null) { overlay.style.display = 'block'; dialog.style.display = 'block'; } } function cancelDialog(e) { showDialog(null); } // Show dialog for saving the current config. function showSaveDialog(e) { saveError.innerText = ''; showDialog(saveDialog); saveInput.focus(); } // Commit save config. function commitSave(e) { const name = saveInput.value; const url = new URL(document.URL); // Set path relative to existing path. url.pathname = new URL('./saveconfig', document.URL).pathname; url.searchParams.set('config', name); saveError.innerText = ''; sendURL('POST', url, (ok) => { if (!ok) { saveError.innerText = 'Save failed'; } else { showDialog(null); location.reload(); // Reload to show updated config menu } }); } function handleSaveInputKey(e) { if (e.key === 'Enter') commitSave(e); } function deleteConfig(e, elem) { e.preventDefault(); const config = elem.dataset.config; delPrompt.innerText = 'Delete ' + config + '?'; currentDeleteTarget = elem; showDialog(delDialog); } function commitDelete(e, elem) { if (!currentDeleteTarget) return; const config = currentDeleteTarget.dataset.config; const url = new URL('./deleteconfig', document.URL); url.searchParams.set('config', config); delError.innerText = ''; sendURL('DELETE', url, (ok) => { if (!ok) { delError.innerText = 'Delete failed'; return; } showDialog(null); // Remove menu entry for this config. if (currentDeleteTarget && currentDeleteTarget.parentElement) { currentDeleteTarget.parentElement.remove(); } }); } // Bind event on elem to fn. function bind(event, elem, fn) { if (elem == null) return; elem.addEventListener(event, fn); if (event == 'click') { // Also enable via touch. elem.addEventListener('touchstart', fn); } } bind('click', elem('save-config'), showSaveDialog); bind('click', elem('save-cancel'), cancelDialog); bind('click', elem('save-confirm'), commitSave); bind('keydown', saveInput, handleSaveInputKey); bind('click', elem('delete-cancel'), cancelDialog); bind('click', elem('delete-confirm'), commitDelete); // Activate deletion button for all config entries in menu. for (const del of Array.from(document.getElementsByClassName('menu-delete-btn'))) { bind('click', del, (e) => { deleteConfig(e, del); }); } } function viewer(baseUrl, nodes) { 'use strict'; // Elements const search = document.getElementById('search'); const graph0 = document.getElementById('graph0'); const svg = (graph0 == null ? null : graph0.parentElement); const toptable = document.getElementById('toptable'); let regexpActive = false; let selected = new Map(); let origFill = new Map(); let searchAlarm = null; let buttonsEnabled = true; function handleDetails(e) { e.preventDefault(); const detailsText = document.getElementById('detailsbox'); if (detailsText != null) { if (detailsText.style.display === 'block') { detailsText.style.display = 'none'; } else { detailsText.style.display = 'block'; } } } function handleKey(e) { if (e.keyCode != 13) return; setHrefParams(window.location, function (params) { params.set('f', search.value); }); e.preventDefault(); } function handleSearch() { // Delay expensive processing so a flurry of key strokes is handled once. if (searchAlarm != null) { clearTimeout(searchAlarm); } searchAlarm = setTimeout(selectMatching, 300); regexpActive = true; updateButtons(); } function selectMatching() { searchAlarm = null; let re = null; if (search.value != '') { try { re = new RegExp(search.value); } catch (e) { // TODO: Display error state in search box return; } } function match(text) { return re != null && re.test(text); } // drop currently selected items that do not match re. selected.forEach(function(v, n) { if (!match(nodes[n])) { unselect(n, document.getElementById('node' + n)); } }) // add matching items that are not currently selected. if (nodes) { for (let n = 0; n < nodes.length; n++) { if (!selected.has(n) && match(nodes[n])) { select(n, document.getElementById('node' + n)); } } } updateButtons(); } function toggleSvgSelect(elem) { // Walk up to immediate child of graph0 while (elem != null && elem.parentElement != graph0) { elem = elem.parentElement; } if (!elem) return; // Disable regexp mode. regexpActive = false; const n = nodeId(elem); if (n < 0) return; if (selected.has(n)) { unselect(n, elem); } else { select(n, elem); } updateButtons(); } function unselect(n, elem) { if (elem == null) return; selected.delete(n); setBackground(elem, false); } function select(n, elem) { if (elem == null) return; selected.set(n, true); setBackground(elem, true); } function nodeId(elem) { const id = elem.id; if (!id) return -1; if (!id.startsWith('node')) return -1; const n = parseInt(id.slice(4), 10); if (isNaN(n)) return -1; if (n < 0 || n >= nodes.length) return -1; return n; } function setBackground(elem, set) { // Handle table row highlighting. if (elem.nodeName == 'TR') { elem.classList.toggle('hilite', set); return; } // Handle svg element highlighting. const p = findPolygon(elem); if (p != null) { if (set) { origFill.set(p, p.style.fill); p.style.fill = '#ccccff'; } else if (origFill.has(p)) { p.style.fill = origFill.get(p); } } } function findPolygon(elem) { if (elem.localName == 'polygon') return elem; for (const c of elem.children) { const p = findPolygon(c); if (p != null) return p; } return null; } // convert a string to a regexp that matches that string. function quotemeta(str) { return str.replace(/([\\\.?+*\[\](){}|^$])/g, '\\$1'); } function setSampleIndexLink(id) { const elem = document.getElementById(id); if (elem != null) { setHrefParams(elem, function (params) { params.set("si", id); }); } } // Update id's href to reflect current selection whenever it is // liable to be followed. function makeSearchLinkDynamic(id) { const elem = document.getElementById(id); if (elem == null) return; // Most links copy current selection into the 'f' parameter, // but Refine menu links are different. let param = 'f'; if (id == 'ignore') param = 'i'; if (id == 'hide') param = 'h'; if (id == 'show') param = 's'; if (id == 'show-from') param = 'sf'; // We update on mouseenter so middle-click/right-click work properly. elem.addEventListener('mouseenter', updater); elem.addEventListener('touchstart', updater); function updater() { // The selection can be in one of two modes: regexp-based or // list-based. Construct regular expression depending on mode. let re = regexpActive ? search.value : Array.from(selected.keys()).map(key => quotemeta(nodes[key])).join('|'); setHrefParams(elem, function (params) { if (re != '') { // For focus/show/show-from, forget old parameter. For others, add to re. if (param != 'f' && param != 's' && param != 'sf' && params.has(param)) { const old = params.get(param); if (old != '') { re += '|' + old; } } params.set(param, re); } else { params.delete(param); } }); } } function setHrefParams(elem, paramSetter) { let url = new URL(elem.href); url.hash = ''; // Copy params from this page's URL. const params = url.searchParams; for (const p of new URLSearchParams(window.location.search)) { params.set(p[0], p[1]); } // Give the params to the setter to modify. paramSetter(params); elem.href = url.toString(); } function handleTopClick(e) { // Walk back until we find TR and then get the Name column (index 5) let elem = e.target; while (elem != null && elem.nodeName != 'TR') { elem = elem.parentElement; } if (elem == null || elem.children.length < 6) return; e.preventDefault(); const tr = elem; const td = elem.children[5]; if (td.nodeName != 'TD') return; const name = td.innerText; const index = nodes.indexOf(name); if (index < 0) return; // Disable regexp mode. regexpActive = false; if (selected.has(index)) { unselect(index, elem); } else { select(index, elem); } updateButtons(); } function updateButtons() { const enable = (search.value != '' || selected.size != 0); if (buttonsEnabled == enable) return; buttonsEnabled = enable; for (const id of ['focus', 'ignore', 'hide', 'show', 'show-from']) { const link = document.getElementById(id); if (link != null) { link.classList.toggle('disabled', !enable); } } } // Initialize button states updateButtons(); // Setup event handlers initMenus(); if (svg != null) { initPanAndZoom(svg, toggleSvgSelect); } if (toptable != null) { toptable.addEventListener('mousedown', handleTopClick); toptable.addEventListener('touchstart', handleTopClick); } const ids = ['topbtn', 'graphbtn', 'flamegraph', 'peek', 'list', 'disasm', 'focus', 'ignore', 'hide', 'show', 'show-from']; ids.forEach(makeSearchLinkDynamic); const sampleIDs = [{{range .SampleTypes}}'{{.}}', {{end}}]; sampleIDs.forEach(setSampleIndexLink); // Bind action to button with specified id. function addAction(id, action) { const btn = document.getElementById(id); if (btn != null) { btn.addEventListener('click', action); btn.addEventListener('touchstart', action); } } addAction('details', handleDetails); initConfigManager(); search.addEventListener('input', handleSearch); search.addEventListener('keydown', handleKey); // Give initial focus to main container so it can be scrolled using keys. const main = document.getElementById('bodycontainer'); if (main) { main.focus(); } } </script> {{end}} {{define "top" -}} <!DOCTYPE html> <html> <head> <meta charset="utf-8"> <title>{{.Title}}</title> {{template "css" .}} <style type="text/css"> </style> </head> <body> {{template "header" .}} <div id="top"> <table id="toptable"> <thead> <tr> <th id="flathdr1">Flat</th> <th id="flathdr2">Flat%</th> <th>Sum%</th> <th id="cumhdr1">Cum</th> <th id="cumhdr2">Cum%</th> <th id="namehdr">Name</th> <th>Inlined?</th> </tr> </thead> <tbody id="rows"></tbody> </table> </div> {{template "script" .}} <script> function makeTopTable(total, entries) { const rows = document.getElementById('rows'); if (rows == null) return; // Store initial index in each entry so we have stable node ids for selection. for (let i = 0; i < entries.length; i++) { entries[i].Id = 'node' + i; } // Which column are we currently sorted by and in what order? let currentColumn = ''; let descending = false; sortBy('Flat'); function sortBy(column) { // Update sort criteria if (column == currentColumn) { descending = !descending; // Reverse order } else { currentColumn = column; descending = (column != 'Name'); } // Sort according to current criteria. function cmp(a, b) { const av = a[currentColumn]; const bv = b[currentColumn]; if (av < bv) return -1; if (av > bv) return +1; return 0; } entries.sort(cmp); if (descending) entries.reverse(); function addCell(tr, val) { const td = document.createElement('td'); td.textContent = val; tr.appendChild(td); } function percent(v) { return (v * 100.0 / total).toFixed(2) + '%'; } // Generate rows const fragment = document.createDocumentFragment(); let sum = 0; for (const row of entries) { const tr = document.createElement('tr'); tr.id = row.Id; sum += row.Flat; addCell(tr, row.FlatFormat); addCell(tr, percent(row.Flat)); addCell(tr, percent(sum)); addCell(tr, row.CumFormat); addCell(tr, percent(row.Cum)); addCell(tr, row.Name); addCell(tr, row.InlineLabel); fragment.appendChild(tr); } rows.textContent = ''; // Remove old rows rows.appendChild(fragment); } // Make different column headers trigger sorting. function bindSort(id, column) { const hdr = document.getElementById(id); if (hdr == null) return; const fn = function() { sortBy(column) }; hdr.addEventListener('click', fn); hdr.addEventListener('touch', fn); } bindSort('flathdr1', 'Flat'); bindSort('flathdr2', 'Flat'); bindSort('cumhdr1', 'Cum'); bindSort('cumhdr2', 'Cum'); bindSort('namehdr', 'Name'); } viewer(new URL(window.location.href), {{.Nodes}}); makeTopTable({{.Total}}, {{.Top}}); </script> </body> </html> {{end}} {{define "sourcelisting" -}} <!DOCTYPE html> <html> <head> <meta charset="utf-8"> <title>{{.Title}}</title> {{template "css" .}} {{template "weblistcss" .}} {{template "weblistjs" .}} </head> <body> {{template "header" .}} <div id="content" class="source"> {{.HTMLBody}} </div> {{template "script" .}} <script>viewer(new URL(window.location.href), null);</script> </body> </html> {{end}} {{define "plaintext" -}} <!DOCTYPE html> <html> <head> <meta charset="utf-8"> <title>{{.Title}}</title> {{template "css" .}} </head> <body> {{template "header" .}} <div id="content"> <pre> {{.TextBody}} </pre> </div> {{template "script" .}} <script>viewer(new URL(window.location.href), null);</script> </body> </html> {{end}} {{define "flamegraph" -}} <!DOCTYPE html> <html> <head> <meta charset="utf-8"> <title>{{.Title}}</title> {{template "css" .}} <style type="text/css">{{template "d3flamegraphcss" .}}</style> <style type="text/css"> .flamegraph-content { width: 90%; min-width: 80%; margin-left: 5%; } .flamegraph-details { height: 1.2em; width: 90%; min-width: 90%; margin-left: 5%; padding: 15px 0 35px; } </style> </head> <body> {{template "header" .}} <div id="bodycontainer"> <div id="flamegraphdetails" class="flamegraph-details"></div> <div class="flamegraph-content"> <div id="chart"></div> </div> </div> {{template "script" .}} <script>viewer(new URL(window.location.href), {{.Nodes}});</script> <script>{{template "d3flamegraphscript" .}}</script> <script> var data = {{.FlameGraph}}; var width = document.getElementById('chart').clientWidth; var flameGraph = flamegraph() .width(width) .cellHeight(18) .minFrameSize(1) .transitionDuration(750) .inverted(true) .sort(true) .title('') .tooltip(false) .setDetailsElement(document.getElementById('flamegraphdetails')); // <full name> (percentage, value) flameGraph.label((d) => d.data.f + ' (' + d.data.p + ', ' + d.data.l + ')'); flameGraph.setColorHue('warm'); select('#chart') .datum(data) .call(flameGraph); function clear() { flameGraph.clear(); } function resetZoom() { flameGraph.resetZoom(); } window.addEventListener('resize', function() { var width = document.getElementById('chart').clientWidth; var graphs = document.getElementsByClassName('d3-flame-graph'); if (graphs.length > 0) { graphs[0].setAttribute('width', width); } flameGraph.width(width); flameGraph.resetZoom(); }, true); var search = document.getElementById('search'); var searchAlarm = null; function selectMatching() { searchAlarm = null; if (search.value != '') { flameGraph.search(search.value); } else { flameGraph.clear(); } } function handleSearch() { // Delay expensive processing so a flurry of key strokes is handled once. if (searchAlarm != null) { clearTimeout(searchAlarm); } searchAlarm = setTimeout(selectMatching, 300); } search.addEventListener('input', handleSearch); </script> </body> </html> {{end}} `)) }<|fim▁end|>
}
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
# -*- coding: utf-8 -*- from . import stock_return_picking
<|file_name|>asn2wrs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # # asn2wrs.py # ASN.1 to Wireshark dissector compiler # Copyright 2004 Tomas Kukosa # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, and/or sell copies of the Software, and to permit persons # to whom the Software is furnished to do so, provided that the above # copyright notice(s) and this permission notice appear in all copies of # the Software and that both the above copyright notice(s) and this # permission notice appear in supporting documentation. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT # OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR # HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL # INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING # FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, # NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION # WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. # # Except as contained in this notice, the name of a copyright holder # shall not be used in advertising or otherwise to promote the sale, use # or other dealings in this Software without prior written authorization # of the copyright holder. """ASN.1 to Wireshark dissector compiler""" # # Compiler from ASN.1 specification to the Wireshark dissector # # Based on ASN.1 to Python compiler from Aaron S. Lav's PyZ3950 package licensed under the X Consortium license # http://www.pobox.com/~asl2/software/PyZ3950/ # (ASN.1 to Python compiler functionality is broken but not removed, it could be revived if necessary) # # It requires Dave Beazley's PLY parsing package licensed under the LGPL (tested with version 2.3) # http://www.dabeaz.com/ply/ # # # ITU-T Recommendation X.680 (07/2002), # Information technology - Abstract Syntax Notation One (ASN.1): Specification of basic notation # # ITU-T Recommendation X.681 (07/2002), # Information technology - Abstract Syntax Notation One (ASN.1): Information object specification # # ITU-T Recommendation X.682 (07/2002), # Information technology - Abstract Syntax Notation One (ASN.1): Constraint specification # # ITU-T Recommendation X.683 (07/2002), # Information technology - Abstract Syntax Notation One (ASN.1): Parameterization of ASN.1 specifications # # ITU-T Recommendation X.880 (07/1994), # Information technology - Remote Operations: Concepts, model and notation # import warnings import re import sys import os import os.path import time import getopt import traceback import lex import yacc if sys.version_info[0] < 3: from string import maketrans # OID name -> number conversion table oid_names = { '/itu-t' : 0, '/itu' : 0, '/ccitt' : 0, '/itu-r' : 0, '0/recommendation' : 0, '0.0/a' : 1, '0.0/b' : 2, '0.0/c' : 3, '0.0/d' : 4, '0.0/e' : 5, '0.0/f' : 6, '0.0/g' : 7, '0.0/h' : 8, '0.0/i' : 9, '0.0/j' : 10, '0.0/k' : 11, '0.0/l' : 12, '0.0/m' : 13, '0.0/n' : 14, '0.0/o' : 15, '0.0/p' : 16, '0.0/q' : 17, '0.0/r' : 18, '0.0/s' : 19, '0.0/t' : 20, '0.0/tseries' : 20, '0.0/u' : 21, '0.0/v' : 22, '0.0/w' : 23, '0.0/x' : 24, '0.0/y' : 25, '0.0/z' : 26, '0/question' : 1, '0/administration' : 2, '0/network-operator' : 3, '0/identified-organization' : 4, '0/r-recommendation' : 5, '0/data' : 9, '/iso' : 1, '1/standard' : 0, '1/registration-authority' : 1, '1/member-body' : 2, '1/identified-organization' : 3, '/joint-iso-itu-t' : 2, '/joint-iso-ccitt' : 2, '2/presentation' : 0, '2/asn1' : 1, '2/association-control' : 2, '2/reliable-transfer' : 3, '2/remote-operations' : 4, '2/ds' : 5, '2/directory' : 5, '2/mhs' : 6, '2/mhs-motis' : 6, '2/ccr' : 7, '2/oda' : 8, '2/ms' : 9, '2/osi-management' : 9, '2/transaction-processing' : 10, '2/dor' : 11, '2/distinguished-object-reference' : 11, '2/reference-data-transfe' : 12, '2/network-layer' : 13, '2/network-layer-management' : 13, '2/transport-layer' : 14, '2/transport-layer-management' : 14, '2/datalink-layer' : 15, '2/datalink-layer-managemen' : 15, '2/datalink-layer-management-information' : 15, '2/country' : 16, '2/registration-procedures' : 17, '2/registration-procedure' : 17, '2/physical-layer' : 18, '2/physical-layer-management' : 18, '2/mheg' : 19, '2/genericULS' : 20, '2/generic-upper-layers-security' : 20, '2/guls' : 20, '2/transport-layer-security-protocol' : 21, '2/network-layer-security-protocol' : 22, '2/international-organizations' : 23, '2/internationalRA' : 23, '2/sios' : 24, '2/uuid' : 25, '2/odp' : 26, '2/upu' : 40, } ITEM_FIELD_NAME = '_item' UNTAG_TYPE_NAME = '_untag' def asn2c(id): return id.replace('-', '_').replace('.', '_').replace('&', '_') input_file = None g_conform = None lexer = None in_oid = False class LexError(Exception): def __init__(self, tok, filename=None): self.tok = tok self.filename = filename self.msg = "Unexpected character %r" % (self.tok.value[0]) Exception.__init__(self, self.msg) def __repr__(self): return "%s:%d: %s" % (self.filename, self.tok.lineno, self.msg) __str__ = __repr__ class ParseError(Exception): def __init__(self, tok, filename=None): self.tok = tok self.filename = filename self.msg = "Unexpected token %s(%r)" % (self.tok.type, self.tok.value) Exception.__init__(self, self.msg) def __repr__(self): return "%s:%d: %s" % (self.filename, self.tok.lineno, self.msg) __str__ = __repr__ class DuplicateError(Exception): def __init__(self, type, ident): self.type = type self.ident = ident self.msg = "Duplicate %s for %s" % (self.type, self.ident) Exception.__init__(self, self.msg) def __repr__(self): return self.msg __str__ = __repr__ class CompError(Exception): def __init__(self, msg): self.msg = msg Exception.__init__(self, self.msg) def __repr__(self): return self.msg __str__ = __repr__ states = ( ('braceignore','exclusive'), ) precedence = ( ('left', 'UNION', 'BAR'), ('left', 'INTERSECTION', 'CIRCUMFLEX'), ) # 11 ASN.1 lexical items static_tokens = { r'::=' : 'ASSIGNMENT', # 11.16 Assignment lexical item r'\.\.' : 'RANGE', # 11.17 Range separator r'\.\.\.' : 'ELLIPSIS', # 11.18 Ellipsis r'\[\[' : 'LVERBRACK', # 11.19 Left version brackets r'\]\]' : 'RVERBRACK', # 11.20 Right version brackets # 11.26 Single character lexical items r'\{' : 'LBRACE', r'\}' : 'RBRACE', r'<' : 'LT', #r'>' : 'GT', r',' : 'COMMA', r'\.' : 'DOT', r'\(' : 'LPAREN', r'\)' : 'RPAREN', r'\[' : 'LBRACK', r'\]' : 'RBRACK', r'-' : 'MINUS', r':' : 'COLON', #r'=' : 'EQ', #r'"' : 'QUOTATION', #r"'" : 'APOSTROPHE', r';' : 'SEMICOLON', r'@' : 'AT', r'\!' : 'EXCLAMATION', r'\^' : 'CIRCUMFLEX', r'\&' : 'AMPERSAND', r'\|' : 'BAR' } # 11.27 Reserved words # all keys in reserved_words must start w/ upper case reserved_words = { 'ABSENT' : 'ABSENT', 'ABSTRACT-SYNTAX' : 'ABSTRACT_SYNTAX', 'ALL' : 'ALL', 'APPLICATION' : 'APPLICATION', 'AUTOMATIC' : 'AUTOMATIC', 'BEGIN' : 'BEGIN', 'BIT' : 'BIT', 'BOOLEAN' : 'BOOLEAN', 'BY' : 'BY', 'CHARACTER' : 'CHARACTER', 'CHOICE' : 'CHOICE', 'CLASS' : 'CLASS', 'COMPONENT' : 'COMPONENT', 'COMPONENTS' : 'COMPONENTS', 'CONSTRAINED' : 'CONSTRAINED', 'CONTAINING' : 'CONTAINING', 'DEFAULT' : 'DEFAULT', 'DEFINITIONS' : 'DEFINITIONS', 'EMBEDDED' : 'EMBEDDED', # 'ENCODED' : 'ENCODED', 'END' : 'END', 'ENUMERATED' : 'ENUMERATED', # 'EXCEPT' : 'EXCEPT', 'EXPLICIT' : 'EXPLICIT', 'EXPORTS' : 'EXPORTS', # 'EXTENSIBILITY' : 'EXTENSIBILITY', 'EXTERNAL' : 'EXTERNAL', 'FALSE' : 'FALSE', 'FROM' : 'FROM', 'GeneralizedTime' : 'GeneralizedTime', 'IDENTIFIER' : 'IDENTIFIER', 'IMPLICIT' : 'IMPLICIT', # 'IMPLIED' : 'IMPLIED', 'IMPORTS' : 'IMPORTS', 'INCLUDES' : 'INCLUDES', 'INSTANCE' : 'INSTANCE', 'INTEGER' : 'INTEGER', 'INTERSECTION' : 'INTERSECTION', 'MAX' : 'MAX', 'MIN' : 'MIN', 'MINUS-INFINITY' : 'MINUS_INFINITY', 'NULL' : 'NULL', 'OBJECT' : 'OBJECT', 'ObjectDescriptor' : 'ObjectDescriptor', 'OCTET' : 'OCTET', 'OF' : 'OF', 'OPTIONAL' : 'OPTIONAL', 'PATTERN' : 'PATTERN', 'PDV' : 'PDV', 'PLUS-INFINITY' : 'PLUS_INFINITY', 'PRESENT' : 'PRESENT', 'PRIVATE' : 'PRIVATE', 'REAL' : 'REAL', 'RELATIVE-OID' : 'RELATIVE_OID', 'SEQUENCE' : 'SEQUENCE', 'SET' : 'SET', 'SIZE' : 'SIZE', 'STRING' : 'STRING', 'SYNTAX' : 'SYNTAX', 'TAGS' : 'TAGS', 'TRUE' : 'TRUE', 'TYPE-IDENTIFIER' : 'TYPE_IDENTIFIER', 'UNION' : 'UNION', 'UNIQUE' : 'UNIQUE', 'UNIVERSAL' : 'UNIVERSAL', 'UTCTime' : 'UTCTime', 'WITH' : 'WITH', # X.208 obsolete but still used 'ANY' : 'ANY', 'DEFINED' : 'DEFINED', } for k in list(static_tokens.keys()): if static_tokens [k] == None: static_tokens [k] = k StringTypes = ['Numeric', 'Printable', 'IA5', 'BMP', 'Universal', 'UTF8', 'Teletex', 'T61', 'Videotex', 'Graphic', 'ISO646', 'Visible', 'General'] for s in StringTypes: reserved_words[s + 'String'] = s + 'String' tokens = list(static_tokens.values()) \ + list(reserved_words.values()) \ + ['BSTRING', 'HSTRING', 'QSTRING', 'UCASE_IDENT', 'LCASE_IDENT', 'LCASE_IDENT_ASSIGNED', 'CLASS_IDENT', 'REAL_NUMBER', 'NUMBER', 'PYQUOTE'] cur_mod = __import__ (__name__) # XXX blech! for (k, v) in list(static_tokens.items ()): cur_mod.__dict__['t_' + v] = k # 11.10 Binary strings def t_BSTRING (t): r"'[01]*'B" return t # 11.12 Hexadecimal strings def t_HSTRING (t): r"'[0-9A-Fa-f]*'H" return t def t_QSTRING (t): r'"([^"]|"")*"' return t def t_UCASE_IDENT (t): r"[A-Z](-[a-zA-Z0-9]|[a-zA-Z0-9])*" # can't end w/ '-' if (is_class_ident(t.value)): t.type = 'CLASS_IDENT' if (is_class_syntax(t.value)): t.type = t.value t.type = reserved_words.get(t.value, t.type) return t lcase_ident_assigned = {} def t_LCASE_IDENT (t): r"[a-z](-[a-zA-Z0-9]|[a-zA-Z0-9])*" # can't end w/ '-' if (not in_oid and (t.value in lcase_ident_assigned)): t.type = 'LCASE_IDENT_ASSIGNED' return t # 11.9 Real numbers def t_REAL_NUMBER (t): r"[0-9]+\.[0-9]*(?!\.)" return t # 11.8 Numbers def t_NUMBER (t): r"0|([1-9][0-9]*)" return t # 11.6 Comments pyquote_str = 'PYQUOTE' def t_COMMENT(t): r"--(-[^\-\n]|[^\-\n])*(--|\n|-\n|$|-$)" if (t.value.find("\n") >= 0) : t.lexer.lineno += 1 if t.value[2:2+len (pyquote_str)] == pyquote_str: t.value = t.value[2+len(pyquote_str):] t.value = t.value.lstrip () t.type = pyquote_str return t return None t_ignore = " \t\r" def t_NEWLINE(t): r'\n+' t.lexer.lineno += t.value.count("\n") def t_error(t): global input_file raise LexError(t, input_file) # state 'braceignore' def t_braceignore_lbrace(t): r'\{' t.lexer.level +=1 def t_braceignore_rbrace(t): r'\}' t.lexer.level -=1 # If closing brace, return token if t.lexer.level == 0: t.type = 'RBRACE' return t def t_braceignore_QSTRING (t): r'"([^"]|"")*"' t.lexer.lineno += t.value.count("\n") def t_braceignore_COMMENT(t): r"--(-[^\-\n]|[^\-\n])*(--|\n|-\n|$|-$)" if (t.value.find("\n") >= 0) : t.lexer.lineno += 1 def t_braceignore_nonspace(t): r'[^\s\{\}\"-]+|-(?!-)' t_braceignore_ignore = " \t\r" def t_braceignore_NEWLINE(t): r'\n+' t.lexer.lineno += t.value.count("\n") def t_braceignore_error(t): t.lexer.skip(1) class Ctx: def __init__ (self, defined_dict, indent = 0): self.tags_def = 'EXPLICIT' # default = explicit self.indent_lev = 0 self.assignments = {} self.dependencies = {} self.pyquotes = [] self.defined_dict = defined_dict self.name_ctr = 0 def spaces (self): return " " * (4 * self.indent_lev) def indent (self): self.indent_lev += 1 def outdent (self): self.indent_lev -= 1 assert (self.indent_lev >= 0) def register_assignment (self, ident, val, dependencies): if ident in self.assignments: raise DuplicateError("assignment", ident) if ident in self.defined_dict: raise Exception("cross-module duplicates for %s" % ident) self.defined_dict [ident] = 1 self.assignments[ident] = val self.dependencies [ident] = dependencies return "" # return "#%s depends on %s" % (ident, str (dependencies)) def register_pyquote (self, val): self.pyquotes.append (val) return "" def output_assignments (self): already_output = {} text_list = [] assign_keys = list(self.assignments.keys()) to_output_count = len (assign_keys) while True: any_output = 0 for (ident, val) in list(self.assignments.items ()): if ident in already_output: continue ok = 1 for d in self.dependencies [ident]: if ((d not in already_output) and (d in assign_keys)): ok = 0 if ok: text_list.append ("%s=%s" % (ident, self.assignments [ident])) already_output [ident] = 1 any_output = 1 to_output_count -= 1 assert (to_output_count >= 0) if not any_output: if to_output_count == 0: break # OK, we detected a cycle cycle_list = [] for ident in list(self.assignments.keys ()): if ident not in already_output: depend_list = [d for d in self.dependencies[ident] if d in assign_keys] cycle_list.append ("%s(%s)" % (ident, ",".join (depend_list))) text_list.append ("# Cycle XXX " + ",".join (cycle_list)) for (ident, val) in list(self.assignments.items ()): if ident not in already_output: text_list.append ("%s=%s" % (ident, self.assignments [ident])) break return "\n".join (text_list) def output_pyquotes (self): return "\n".join (self.pyquotes) def make_new_name (self): self.name_ctr += 1 return "_compiler_generated_name_%d" % (self.name_ctr,) #--- Flags for EXPORT, USER_DEFINED, NO_EMIT, MAKE_ENUM ------------------------------- EF_TYPE = 0x0001 EF_VALS = 0x0002 EF_ENUM = 0x0004 EF_WS_DLL = 0x0010 # exported from shared library EF_EXTERN = 0x0020 EF_NO_PROT = 0x0040 EF_NO_TYPE = 0x0080 EF_UCASE = 0x0100 EF_TABLE = 0x0400 EF_DEFINE = 0x0800 EF_MODULE = 0x1000 #--- common dependency computation --- # Input : list of items # dictionary with lists of dependency # # # Output : list of two outputs: # [0] list of items in dependency # [1] list of cycle dependency cycles def dependency_compute(items, dependency, map_fn = lambda t: t, ignore_fn = lambda t: False): item_ord = [] item_cyc = [] x = {} # already emitted #print '# Dependency computation' for t in items: if map_fn(t) in x: #print 'Continue: %s : %s' % (t, (map_fn(t)) continue stack = [t] stackx = {t : dependency.get(t, [])[:]} #print 'Push: %s : %s' % (t, str(stackx[t])) while stack: if stackx[stack[-1]]: # has dependencies d = stackx[stack[-1]].pop(0) if map_fn(d) in x or ignore_fn(d): continue if d in stackx: # cyclic dependency c = stack[:] c.reverse() c = [d] + c[0:c.index(d)+1] c.reverse() item_cyc.append(c) #print 'Cyclic: %s ' % (' -> '.join(c)) continue stack.append(d) stackx[d] = dependency.get(d, [])[:] #print 'Push: %s : %s' % (d, str(stackx[d])) else: #print 'Pop: %s' % (stack[-1]) del stackx[stack[-1]] e = map_fn(stack.pop()) if e in x: continue #print 'Add: %s' % (e) item_ord.append(e) x[e] = True return (item_ord, item_cyc) # Given a filename, return a relative path from epan/dissectors def rel_dissector_path(filename): path_parts = os.path.abspath(filename).split(os.sep) while (len(path_parts) > 3 and path_parts[0] != 'asn1'): path_parts.pop(0) path_parts.insert(0, '..') path_parts.insert(0, '..') return '/'.join(path_parts) #--- EthCtx ------------------------------------------------------------------- class EthCtx: def __init__(self, conform, output, indent = 0): self.conform = conform self.output = output self.conform.ectx = self self.output.ectx = self self.encoding = 'per' self.aligned = False self.default_oid_variant = '' self.default_opentype_variant = '' self.default_containing_variant = '_pdu_new' self.default_embedded_pdv_cb = None self.default_external_type_cb = None self.remove_prefix = None self.srcdir = None self.emitted_pdu = {} self.module = {} self.module_ord = [] self.all_type_attr = {} self.all_tags = {} self.all_vals = {} def encp(self): # encoding protocol encp = self.encoding return encp # Encoding def Per(self): return self.encoding == 'per' def Ber(self): return self.encoding == 'ber' def Aligned(self): return self.aligned def Unaligned(self): return not self.aligned def NeedTags(self): return self.tag_opt or self.Ber() def NAPI(self): return False # disable planned features def Module(self): # current module name return self.modules[-1][0] def groups(self): return self.group_by_prot or (self.conform.last_group > 0) def dbg(self, d): if (self.dbgopt.find(d) >= 0): return True else: return False def value_max(self, a, b): if (a == 'MAX') or (b == 'MAX'): return 'MAX'; if a == 'MIN': return b; if b == 'MIN': return a; try: if (int(a) > int(b)): return a else: return b except (ValueError, TypeError): pass return "MAX((%s),(%s))" % (a, b) def value_min(self, a, b): if (a == 'MIN') or (b == 'MIN'): return 'MIN'; if a == 'MAX': return b; if b == 'MAX': return a; try: if (int(a) < int(b)): return a else: return b except (ValueError, TypeError): pass return "MIN((%s),(%s))" % (a, b) def value_get_eth(self, val): if isinstance(val, Value): return val.to_str(self) ethname = val if val in self.value: ethname = self.value[val]['ethname'] return ethname def value_get_val(self, nm): val = asn2c(nm) if nm in self.value: if self.value[nm]['import']: v = self.get_val_from_all(nm, self.value[nm]['import']) if v is None: msg = 'Need value of imported value identifier %s from %s (%s)' % (nm, self.value[nm]['import'], self.value[nm]['proto']) warnings.warn_explicit(msg, UserWarning, '', 0) else: val = v else: val = self.value[nm]['value'] if isinstance (val, Value): val = val.to_str(self) else: msg = 'Need value of unknown value identifier %s' % (nm) warnings.warn_explicit(msg, UserWarning, '', 0) return val def eth_get_type_attr(self, type): #print "eth_get_type_attr(%s)" % (type) types = [type] while (not self.type[type]['import']): val = self.type[type]['val'] #print val ttype = type while (val.type == 'TaggedType'): val = val.val ttype += '/' + UNTAG_TYPE_NAME if (val.type != 'Type_Ref'): if (type != ttype): types.append(ttype) break type = val.val types.append(type) attr = {} #print " ", types while len(types): t = types.pop() if (self.type[t]['import']): attr.update(self.type[t]['attr']) attr.update(self.eth_get_type_attr_from_all(t, self.type[t]['import'])) elif (self.type[t]['val'].type == 'SelectionType'): val = self.type[t]['val'] (ftype, display) = val.eth_ftype(self) attr.update({ 'TYPE' : ftype, 'DISPLAY' : display, 'STRINGS' : val.eth_strings(), 'BITMASK' : '0' }); else: attr.update(self.type[t]['attr']) attr.update(self.eth_type[self.type[t]['ethname']]['attr']) #print " ", attr return attr def eth_get_type_attr_from_all(self, type, module): attr = {} if module in self.all_type_attr and type in self.all_type_attr[module]: attr = self.all_type_attr[module][type] return attr def get_ttag_from_all(self, type, module): ttag = None if module in self.all_tags and type in self.all_tags[module]: ttag = self.all_tags[module][type] return ttag def get_val_from_all(self, nm, module): val = None if module in self.all_vals and nm in self.all_vals[module]: val = self.all_vals[module][nm] return val def get_obj_repr(self, ident, flds=[], not_flds=[]): def set_type_fn(cls, field, fnfield): obj[fnfield + '_fn'] = 'NULL' obj[fnfield + '_pdu'] = 'NULL' if field in val and isinstance(val[field], Type_Ref): p = val[field].eth_type_default_pars(self, '') obj[fnfield + '_fn'] = p['TYPE_REF_FN'] obj[fnfield + '_fn'] = obj[fnfield + '_fn'] % p # one iteration if (self.conform.check_item('PDU', cls + '.' + field)): obj[fnfield + '_pdu'] = 'dissect_' + self.field[val[field].val]['ethname'] return # end of get_type_fn() obj = { '_name' : ident, '_ident' : asn2c(ident)} obj['_class'] = self.oassign[ident].cls obj['_module'] = self.oassign[ident].module val = self.oassign[ident].val for f in flds: if f not in val: return None for f in not_flds: if f in val: return None for f in list(val.keys()): if isinstance(val[f], Node): obj[f] = val[f].fld_obj_repr(self) else: obj[f] = str(val[f]) if (obj['_class'] == 'TYPE-IDENTIFIER') or (obj['_class'] == 'ABSTRACT-SYNTAX'): set_type_fn(obj['_class'], '&Type', '_type') if (obj['_class'] == 'OPERATION'): set_type_fn(obj['_class'], '&ArgumentType', '_argument') set_type_fn(obj['_class'], '&ResultType', '_result') if (obj['_class'] == 'ERROR'): set_type_fn(obj['_class'], '&ParameterType', '_parameter') return obj #--- eth_reg_module ----------------------------------------------------------- def eth_reg_module(self, module): #print "eth_reg_module(module='%s')" % (module) name = module.get_name() self.modules.append([name, module.get_proto(self)]) if name in self.module: raise DuplicateError("module", name) self.module[name] = [] self.module_ord.append(name) #--- eth_module_dep_add ------------------------------------------------------------ def eth_module_dep_add(self, module, dep): self.module[module].append(dep) #--- eth_exports ------------------------------------------------------------ def eth_exports(self, exports): self.exports_all = False if ((len(exports) == 1) and (exports[0] == 'ALL')): self.exports_all = True return for e in (exports): if isinstance(e, Type_Ref): self.exports.append(e.val) elif isinstance(e, Class_Ref): self.cexports.append(e.val) else: self.vexports.append(e) #--- eth_reg_assign --------------------------------------------------------- def eth_reg_assign(self, ident, val, virt=False): #print "eth_reg_assign(ident='%s')" % (ident) if ident in self.assign: raise DuplicateError("assignment", ident) self.assign[ident] = { 'val' : val , 'virt' : virt } self.assign_ord.append(ident) if (self.exports_all): self.exports.append(ident) #--- eth_reg_vassign -------------------------------------------------------- def eth_reg_vassign(self, vassign): ident = vassign.ident #print "eth_reg_vassign(ident='%s')" % (ident) if ident in self.vassign: raise DuplicateError("value assignment", ident) self.vassign[ident] = vassign self.vassign_ord.append(ident) if (self.exports_all): self.vexports.append(ident) #--- eth_reg_oassign -------------------------------------------------------- def eth_reg_oassign(self, oassign): ident = oassign.ident #print "eth_reg_oassign(ident='%s')" % (ident) if ident in self.oassign: if self.oassign[ident] == oassign: return # OK - already defined else: raise DuplicateError("information object assignment", ident) self.oassign[ident] = oassign self.oassign_ord.append(ident) self.oassign_cls.setdefault(oassign.cls, []).append(ident) #--- eth_import_type -------------------------------------------------------- def eth_import_type(self, ident, mod, proto): #print "eth_import_type(ident='%s', mod='%s', prot='%s')" % (ident, mod, proto) if ident in self.type: #print "already defined '%s' import=%s, module=%s" % (ident, str(self.type[ident]['import']), self.type[ident].get('module', '-')) if not self.type[ident]['import'] and (self.type[ident]['module'] == mod) : return # OK - already defined elif self.type[ident]['import'] and (self.type[ident]['import'] == mod) : return # OK - already imported else: raise DuplicateError("type", ident) self.type[ident] = {'import' : mod, 'proto' : proto, 'ethname' : '' } self.type[ident]['attr'] = { 'TYPE' : 'FT_NONE', 'DISPLAY' : 'BASE_NONE', 'STRINGS' : 'NULL', 'BITMASK' : '0' } mident = "$%s$%s" % (mod, ident) if (self.conform.check_item('TYPE_ATTR', mident)): self.type[ident]['attr'].update(self.conform.use_item('TYPE_ATTR', mident)) else: self.type[ident]['attr'].update(self.conform.use_item('TYPE_ATTR', ident)) if (self.conform.check_item('IMPORT_TAG', mident)): self.conform.copy_item('IMPORT_TAG', ident, mident) self.type_imp.append(ident) #--- dummy_import_type -------------------------------------------------------- def dummy_import_type(self, ident): # dummy imported if ident in self.type: raise Exception("Try to dummy import for existing type :%s" % ident) ethtype = asn2c(ident) self.type[ident] = {'import' : 'xxx', 'proto' : 'xxx', 'ethname' : ethtype } self.type[ident]['attr'] = { 'TYPE' : 'FT_NONE', 'DISPLAY' : 'BASE_NONE', 'STRINGS' : 'NULL', 'BITMASK' : '0' } self.eth_type[ethtype] = { 'import' : 'xxx', 'proto' : 'xxx' , 'attr' : {}, 'ref' : []} print("Dummy imported: %s (%s)" % (ident, ethtype)) return ethtype #--- eth_import_class -------------------------------------------------------- def eth_import_class(self, ident, mod, proto): #print "eth_import_class(ident='%s', mod='%s', prot='%s')" % (ident, mod, proto) if ident in self.objectclass: #print "already defined import=%s, module=%s" % (str(self.objectclass[ident]['import']), self.objectclass[ident]['module']) if not self.objectclass[ident]['import'] and (self.objectclass[ident]['module'] == mod) : return # OK - already defined elif self.objectclass[ident]['import'] and (self.objectclass[ident]['import'] == mod) : return # OK - already imported else: raise DuplicateError("object class", ident) self.objectclass[ident] = {'import' : mod, 'proto' : proto, 'ethname' : '' } self.objectclass_imp.append(ident) #--- eth_import_value ------------------------------------------------------- def eth_import_value(self, ident, mod, proto): #print "eth_import_value(ident='%s', mod='%s', prot='%s')" % (ident, mod, prot) if ident in self.value: #print "already defined import=%s, module=%s" % (str(self.value[ident]['import']), self.value[ident]['module']) if not self.value[ident]['import'] and (self.value[ident]['module'] == mod) : return # OK - already defined elif self.value[ident]['import'] and (self.value[ident]['import'] == mod) : return # OK - already imported else: raise DuplicateError("value", ident) self.value[ident] = {'import' : mod, 'proto' : proto, 'ethname' : ''} self.value_imp.append(ident) #--- eth_sel_req ------------------------------------------------------------ def eth_sel_req(self, typ, sel): key = typ + '.' + sel if key not in self.sel_req: self.sel_req[key] = { 'typ' : typ , 'sel' : sel} self.sel_req_ord.append(key) return key #--- eth_comp_req ------------------------------------------------------------ def eth_comp_req(self, type): self.comp_req_ord.append(type) #--- eth_dep_add ------------------------------------------------------------ def eth_dep_add(self, type, dep): if type not in self.type_dep: self.type_dep[type] = [] self.type_dep[type].append(dep) #--- eth_reg_type ----------------------------------------------------------- def eth_reg_type(self, ident, val): #print "eth_reg_type(ident='%s', type='%s')" % (ident, val.type) if ident in self.type: if self.type[ident]['import'] and (self.type[ident]['import'] == self.Module()) : # replace imported type del self.type[ident] self.type_imp.remove(ident) else: raise DuplicateError("type", ident) val.ident = ident self.type[ident] = { 'val' : val, 'import' : None } self.type[ident]['module'] = self.Module() self.type[ident]['proto'] = self.proto if len(ident.split('/')) > 1: self.type[ident]['tname'] = val.eth_tname() else: self.type[ident]['tname'] = asn2c(ident) self.type[ident]['export'] = self.conform.use_item('EXPORTS', ident) self.type[ident]['enum'] = self.conform.use_item('MAKE_ENUM', ident) self.type[ident]['vals_ext'] = self.conform.use_item('USE_VALS_EXT', ident) self.type[ident]['user_def'] = self.conform.use_item('USER_DEFINED', ident) self.type[ident]['no_emit'] = self.conform.use_item('NO_EMIT', ident) self.type[ident]['tname'] = self.conform.use_item('TYPE_RENAME', ident, val_dflt=self.type[ident]['tname']) self.type[ident]['ethname'] = '' if (val.type == 'Type_Ref') or (val.type == 'TaggedType') or (val.type == 'SelectionType') : self.type[ident]['attr'] = {} else: (ftype, display) = val.eth_ftype(self) self.type[ident]['attr'] = { 'TYPE' : ftype, 'DISPLAY' : display, 'STRINGS' : val.eth_strings(), 'BITMASK' : '0' } self.type[ident]['attr'].update(self.conform.use_item('TYPE_ATTR', ident)) self.type_ord.append(ident) # PDU if (self.conform.check_item('PDU', ident)): self.eth_reg_field(ident, ident, impl=val.HasImplicitTag(self), pdu=self.conform.use_item('PDU', ident)) #--- eth_reg_objectclass ---------------------------------------------------------- def eth_reg_objectclass(self, ident, val): #print "eth_reg_objectclass(ident='%s')" % (ident) if ident in self.objectclass: if self.objectclass[ident]['import'] and (self.objectclass[ident]['import'] == self.Module()) : # replace imported object class del self.objectclass[ident] self.objectclass_imp.remove(ident) elif isinstance(self.objectclass[ident]['val'], Class_Ref) and \ isinstance(val, Class_Ref) and \ (self.objectclass[ident]['val'].val == val.val): pass # ignore duplicated CLASS1 ::= CLASS2 else: raise DuplicateError("object class", ident) self.objectclass[ident] = { 'import' : None, 'module' : self.Module(), 'proto' : self.proto } self.objectclass[ident]['val'] = val self.objectclass[ident]['export'] = self.conform.use_item('EXPORTS', ident) self.objectclass_ord.append(ident) #--- eth_reg_value ---------------------------------------------------------- def eth_reg_value(self, ident, type, value, ethname=None): #print "eth_reg_value(ident='%s')" % (ident) if ident in self.value: if self.value[ident]['import'] and (self.value[ident]['import'] == self.Module()) : # replace imported value del self.value[ident] self.value_imp.remove(ident) elif ethname: self.value[ident]['ethname'] = ethname return else: raise DuplicateError("value", ident) self.value[ident] = { 'import' : None, 'module' : self.Module(), 'proto' : self.proto, 'type' : type, 'value' : value, 'no_emit' : False } self.value[ident]['export'] = self.conform.use_item('EXPORTS', ident) self.value[ident]['ethname'] = '' if (ethname): self.value[ident]['ethname'] = ethname self.value_ord.append(ident) #--- eth_reg_field ---------------------------------------------------------- def eth_reg_field(self, ident, type, idx='', parent=None, impl=False, pdu=None): #print "eth_reg_field(ident='%s', type='%s')" % (ident, type) if ident in self.field: if pdu and (type == self.field[ident]['type']): pass # OK already created PDU else: raise DuplicateError("field", ident) self.field[ident] = {'type' : type, 'idx' : idx, 'impl' : impl, 'pdu' : pdu, 'modified' : '', 'attr' : {} } name = ident.split('/')[-1] if self.remove_prefix and name.startswith(self.remove_prefix): name = name[len(self.remove_prefix):] if len(ident.split('/')) > 1 and name == ITEM_FIELD_NAME: # Sequence/Set of type if len(self.field[ident]['type'].split('/')) > 1: self.field[ident]['attr']['NAME'] = '"%s item"' % ident.split('/')[-2] self.field[ident]['attr']['ABBREV'] = asn2c(ident.split('/')[-2] + name) else: self.field[ident]['attr']['NAME'] = '"%s"' % self.field[ident]['type'] self.field[ident]['attr']['ABBREV'] = asn2c(self.field[ident]['type']) else: self.field[ident]['attr']['NAME'] = '"%s"' % name self.field[ident]['attr']['ABBREV'] = asn2c(name) if self.conform.check_item('FIELD_ATTR', ident): self.field[ident]['modified'] = '#' + str(id(self)) self.field[ident]['attr'].update(self.conform.use_item('FIELD_ATTR', ident)) if (pdu): self.field[ident]['pdu']['export'] = (self.conform.use_item('EXPORTS', ident + '_PDU') != 0) self.pdu_ord.append(ident) else: self.field_ord.append(ident) if parent: self.eth_dep_add(parent, type) def eth_dummy_eag_field_required(self): if (not self.dummy_eag_field): self.dummy_eag_field = 'eag_field' #--- eth_clean -------------------------------------------------------------- def eth_clean(self): self.proto = self.proto_opt; #--- ASN.1 tables ---------------- self.assign = {} self.assign_ord = [] self.field = {} self.pdu_ord = [] self.field_ord = [] self.type = {} self.type_ord = [] self.type_imp = [] self.type_dep = {} self.sel_req = {} self.sel_req_ord = [] self.comp_req_ord = [] self.vassign = {} self.vassign_ord = [] self.value = {} self.value_ord = [] self.value_imp = [] self.objectclass = {} self.objectclass_ord = [] self.objectclass_imp = [] self.oassign = {} self.oassign_ord = [] self.oassign_cls = {} #--- Modules ------------ self.modules = [] self.exports_all = False self.exports = [] self.cexports = [] self.vexports = [] #--- types ------------------- self.eth_type = {} self.eth_type_ord = [] self.eth_export_ord = [] self.eth_type_dupl = {} self.named_bit = [] #--- value dependencies ------------------- self.value_dep = {} #--- values ------------------- self.eth_value = {} self.eth_value_ord = [] #--- fields ------------------------- self.eth_hf = {} self.eth_hf_ord = [] self.eth_hfpdu_ord = [] self.eth_hf_dupl = {} self.dummy_eag_field = None #--- type dependencies ------------------- self.eth_type_ord1 = [] self.eth_dep_cycle = [] self.dep_cycle_eth_type = {} #--- value dependencies and export ------------------- self.eth_value_ord1 = [] self.eth_vexport_ord = [] #--- eth_prepare ------------------------------------------------------------ def eth_prepare(self): self.eproto = asn2c(self.proto) #--- dummy types/fields for PDU registration --- nm = 'NULL' if (self.conform.check_item('PDU', nm)): self.eth_reg_type('_dummy/'+nm, NullType()) self.eth_reg_field(nm, '_dummy/'+nm, pdu=self.conform.use_item('PDU', nm)) #--- required PDUs ---------------------------- for t in self.type_ord: pdu = self.type[t]['val'].eth_need_pdu(self) if not pdu: continue f = pdu['type'] pdu['reg'] = None pdu['hidden'] = False pdu['need_decl'] = True if f not in self.field: self.eth_reg_field(f, f, pdu=pdu) #--- values -> named values ------------------- t_for_update = {} for v in self.value_ord: if (self.value[v]['type'].type == 'Type_Ref') or self.conform.check_item('ASSIGN_VALUE_TO_TYPE', v): if self.conform.check_item('ASSIGN_VALUE_TO_TYPE', v): tnm = self.conform.use_item('ASSIGN_VALUE_TO_TYPE', v) else: tnm = self.value[v]['type'].val if tnm in self.type \ and not self.type[tnm]['import'] \ and (self.type[tnm]['val'].type == 'IntegerType'): self.type[tnm]['val'].add_named_value(v, self.value[v]['value']) self.value[v]['no_emit'] = True t_for_update[tnm] = True for t in list(t_for_update.keys()): self.type[t]['attr']['STRINGS'] = self.type[t]['val'].eth_strings() self.type[t]['attr'].update(self.conform.use_item('TYPE_ATTR', t)) #--- required components of --------------------------- #print "self.comp_req_ord = ", self.comp_req_ord for t in self.comp_req_ord: self.type[t]['val'].eth_reg_sub(t, self, components_available=True) #--- required selection types --------------------------- #print "self.sel_req_ord = ", self.sel_req_ord for t in self.sel_req_ord: tt = self.sel_req[t]['typ'] if tt not in self.type: self.dummy_import_type(t) elif self.type[tt]['import']: self.eth_import_type(t, self.type[tt]['import'], self.type[tt]['proto']) else: self.type[tt]['val'].sel_req(t, self.sel_req[t]['sel'], self) #--- types ------------------- for t in self.type_imp: # imported types nm = asn2c(t) self.eth_type[nm] = { 'import' : self.type[t]['import'], 'proto' : asn2c(self.type[t]['proto']), 'attr' : {}, 'ref' : []} self.eth_type[nm]['attr'].update(self.conform.use_item('ETYPE_ATTR', nm)) self.type[t]['ethname'] = nm for t in self.type_ord: # dummy import for missing type reference tp = self.type[t]['val'] #print "X : %s %s " % (t, tp.type) if isinstance(tp, TaggedType): #print "%s : %s " % (tp.type, t) tp = tp.val if isinstance(tp, Type_Ref): #print "%s : %s ::= %s " % (tp.type, t, tp.val) if tp.val not in self.type: self.dummy_import_type(tp.val) for t in self.type_ord: nm = self.type[t]['tname'] if ((nm.find('#') >= 0) or ((len(t.split('/'))>1) and (self.conform.get_fn_presence(t) or self.conform.check_item('FN_PARS', t) or self.conform.get_fn_presence('/'.join((t,ITEM_FIELD_NAME))) or self.conform.check_item('FN_PARS', '/'.join((t,ITEM_FIELD_NAME)))) and not self.conform.check_item('TYPE_RENAME', t))): if len(t.split('/')) == 2 and t.split('/')[1] == ITEM_FIELD_NAME: # Sequence of type at the 1st level nm = t.split('/')[0] + t.split('/')[1] elif t.split('/')[-1] == ITEM_FIELD_NAME: # Sequence/Set of type at next levels nm = 'T_' + self.conform.use_item('FIELD_RENAME', '/'.join(t.split('/')[0:-1]), val_dflt=t.split('/')[-2]) + t.split('/')[-1] elif t.split('/')[-1] == UNTAG_TYPE_NAME: # Untagged type nm = self.type['/'.join(t.split('/')[0:-1])]['ethname'] + '_U' else: nm = 'T_' + self.conform.use_item('FIELD_RENAME', t, val_dflt=t.split('/')[-1]) nm = asn2c(nm) if nm in self.eth_type: if nm in self.eth_type_dupl: self.eth_type_dupl[nm].append(t) else: self.eth_type_dupl[nm] = [self.eth_type[nm]['ref'][0], t] nm += '_%02d' % (len(self.eth_type_dupl[nm])-1) if nm in self.eth_type: self.eth_type[nm]['ref'].append(t) else: self.eth_type_ord.append(nm) self.eth_type[nm] = { 'import' : None, 'proto' : self.eproto, 'export' : 0, 'enum' : 0, 'vals_ext' : 0, 'user_def' : EF_TYPE|EF_VALS, 'no_emit' : EF_TYPE|EF_VALS, 'val' : self.type[t]['val'], 'attr' : {}, 'ref' : [t]} self.type[t]['ethname'] = nm if (not self.eth_type[nm]['export'] and self.type[t]['export']): # new export self.eth_export_ord.append(nm) self.eth_type[nm]['export'] |= self.type[t]['export'] self.eth_type[nm]['enum'] |= self.type[t]['enum'] self.eth_type[nm]['vals_ext'] |= self.type[t]['vals_ext'] self.eth_type[nm]['user_def'] &= self.type[t]['user_def'] self.eth_type[nm]['no_emit'] &= self.type[t]['no_emit'] if self.type[t]['attr'].get('STRINGS') == '$$': use_ext = self.type[t]['vals_ext'] if (use_ext): self.eth_type[nm]['attr']['STRINGS'] = '&%s_ext' % (self.eth_vals_nm(nm)) else: self.eth_type[nm]['attr']['STRINGS'] = 'VALS(%s)' % (self.eth_vals_nm(nm)) self.eth_type[nm]['attr'].update(self.conform.use_item('ETYPE_ATTR', nm)) for t in self.eth_type_ord: bits = self.eth_type[t]['val'].eth_named_bits() if (bits): for (val, id) in bits: self.named_bit.append({'name' : id, 'val' : val, 'ethname' : 'hf_%s_%s_%s' % (self.eproto, t, asn2c(id)), 'ftype' : 'FT_BOOLEAN', 'display' : '8', 'strings' : 'NULL', 'bitmask' : '0x'+('80','40','20','10','08','04','02','01')[val%8]}) if self.eth_type[t]['val'].eth_need_tree(): self.eth_type[t]['tree'] = "ett_%s_%s" % (self.eth_type[t]['proto'], t) else: self.eth_type[t]['tree'] = None #--- register values from enums ------------ for t in self.eth_type_ord: if (self.eth_type[t]['val'].eth_has_enum(t, self)): self.eth_type[t]['val'].reg_enum_vals(t, self) #--- value dependencies ------------------- for v in self.value_ord: if isinstance (self.value[v]['value'], Value): dep = self.value[v]['value'].get_dep() else: dep = self.value[v]['value'] if dep and dep in self.value: self.value_dep.setdefault(v, []).append(dep) #--- exports all necessary values for v in self.value_ord: if not self.value[v]['export']: continue deparr = self.value_dep.get(v, []) while deparr: d = deparr.pop() if not self.value[d]['import']: if not self.value[d]['export']: self.value[d]['export'] = EF_TYPE deparr.extend(self.value_dep.get(d, [])) #--- values ------------------- for v in self.value_imp: nm = asn2c(v) self.eth_value[nm] = { 'import' : self.value[v]['import'], 'proto' : asn2c(self.value[v]['proto']), 'ref' : []} self.value[v]['ethname'] = nm for v in self.value_ord: if (self.value[v]['ethname']): continue if (self.value[v]['no_emit']): continue nm = asn2c(v) self.eth_value[nm] = { 'import' : None, 'proto' : asn2c(self.value[v]['proto']), 'export' : self.value[v]['export'], 'ref' : [v] } self.eth_value[nm]['value'] = self.value[v]['value'] self.eth_value_ord.append(nm) self.value[v]['ethname'] = nm #--- fields ------------------------- for f in (self.pdu_ord + self.field_ord): if len(f.split('/')) > 1 and f.split('/')[-1] == ITEM_FIELD_NAME: # Sequence/Set of type nm = self.conform.use_item('FIELD_RENAME', '/'.join(f.split('/')[0:-1]), val_dflt=f.split('/')[-2]) + f.split('/')[-1] else: nm = f.split('/')[-1] nm = self.conform.use_item('FIELD_RENAME', f, val_dflt=nm) nm = asn2c(nm) if (self.field[f]['pdu']): nm += '_PDU' if (not self.merge_modules or self.field[f]['pdu']['export']): nm = self.eproto + '_' + nm t = self.field[f]['type'] if t in self.type: ethtype = self.type[t]['ethname'] else: # undefined type ethtype = self.dummy_import_type(t) ethtypemod = ethtype + self.field[f]['modified'] if nm in self.eth_hf: if nm in self.eth_hf_dupl: if ethtypemod in self.eth_hf_dupl[nm]: nm = self.eth_hf_dupl[nm][ethtypemod] self.eth_hf[nm]['ref'].append(f) self.field[f]['ethname'] = nm continue else: nmx = nm + ('_%02d' % (len(self.eth_hf_dupl[nm]))) self.eth_hf_dupl[nm][ethtype] = nmx nm = nmx else: if (self.eth_hf[nm]['ethtype']+self.eth_hf[nm]['modified']) == ethtypemod: self.eth_hf[nm]['ref'].append(f) self.field[f]['ethname'] = nm continue else: nmx = nm + '_01' self.eth_hf_dupl[nm] = {self.eth_hf[nm]['ethtype']+self.eth_hf[nm]['modified'] : nm, \ ethtypemod : nmx} nm = nmx if (self.field[f]['pdu']): self.eth_hfpdu_ord.append(nm) else: self.eth_hf_ord.append(nm) fullname = 'hf_%s_%s' % (self.eproto, nm) attr = self.eth_get_type_attr(self.field[f]['type']).copy() attr.update(self.field[f]['attr']) if (self.NAPI() and 'NAME' in attr): attr['NAME'] += self.field[f]['idx'] attr.update(self.conform.use_item('EFIELD_ATTR', nm)) use_vals_ext = self.eth_type[ethtype].get('vals_ext') if (use_vals_ext): attr['DISPLAY'] += '|BASE_EXT_STRING' self.eth_hf[nm] = {'fullname' : fullname, 'pdu' : self.field[f]['pdu'], 'ethtype' : ethtype, 'modified' : self.field[f]['modified'], 'attr' : attr.copy(), 'ref' : [f]} self.field[f]['ethname'] = nm if (self.dummy_eag_field): # Prepending "dummy_" avoids matching checkhf.pl. self.dummy_eag_field = 'dummy_hf_%s_%s' % (self.eproto, self.dummy_eag_field) #--- type dependencies ------------------- (self.eth_type_ord1, self.eth_dep_cycle) = dependency_compute(self.type_ord, self.type_dep, map_fn = lambda t: self.type[t]['ethname'], ignore_fn = lambda t: self.type[t]['import']) i = 0 while i < len(self.eth_dep_cycle): t = self.type[self.eth_dep_cycle[i][0]]['ethname'] self.dep_cycle_eth_type.setdefault(t, []).append(i) i += 1 #--- value dependencies and export ------------------- for v in self.eth_value_ord: if self.eth_value[v]['export']: self.eth_vexport_ord.append(v) else: self.eth_value_ord1.append(v) #--- export tags, values, ... --- for t in self.exports: if t not in self.type: continue if self.type[t]['import']: continue m = self.type[t]['module'] if not self.Per(): if m not in self.all_tags: self.all_tags[m] = {} self.all_tags[m][t] = self.type[t]['val'].GetTTag(self) if m not in self.all_type_attr: self.all_type_attr[m] = {} self.all_type_attr[m][t] = self.eth_get_type_attr(t).copy() for v in self.vexports: if v not in self.value: continue if self.value[v]['import']: continue m = self.value[v]['module'] if m not in self.all_vals: self.all_vals[m] = {} vv = self.value[v]['value'] if isinstance (vv, Value): vv = vv.to_str(self) self.all_vals[m][v] = vv #--- eth_vals_nm ------------------------------------------------------------ def eth_vals_nm(self, tname): out = "" if (not self.eth_type[tname]['export'] & EF_NO_PROT): out += "%s_" % (self.eproto) out += "%s_vals" % (tname) return out #--- eth_vals --------------------------------------------------------------- def eth_vals(self, tname, vals): out = "" has_enum = self.eth_type[tname]['enum'] & EF_ENUM use_ext = self.eth_type[tname]['vals_ext'] if (use_ext): vals.sort(key=lambda vals_entry: int(vals_entry[0])) if (not self.eth_type[tname]['export'] & EF_VALS): out += 'static ' if (self.eth_type[tname]['export'] & EF_VALS) and (self.eth_type[tname]['export'] & EF_TABLE): out += 'static ' out += "const value_string %s[] = {\n" % (self.eth_vals_nm(tname)) for (val, id) in vals: if (has_enum): vval = self.eth_enum_item(tname, id) else: vval = val out += ' { %3s, "%s" },\n' % (vval, id) out += " { 0, NULL }\n};\n" if (use_ext): out += "\nstatic value_string_ext %s_ext = VALUE_STRING_EXT_INIT(%s);\n" % (self.eth_vals_nm(tname), self.eth_vals_nm(tname)) return out #--- eth_enum_prefix ------------------------------------------------------------ def eth_enum_prefix(self, tname, type=False): out = "" if (self.eth_type[tname]['export'] & EF_ENUM): no_prot = self.eth_type[tname]['export'] & EF_NO_PROT else: no_prot = self.eth_type[tname]['enum'] & EF_NO_PROT if (not no_prot): out += self.eproto if ((not self.eth_type[tname]['enum'] & EF_NO_TYPE) or type): if (out): out += '_' out += tname if (self.eth_type[tname]['enum'] & EF_UCASE): out = out.upper() if (out): out += '_' return out #--- eth_enum_nm ------------------------------------------------------------ def eth_enum_nm(self, tname): out = self.eth_enum_prefix(tname, type=True) out += "enum" return out #--- eth_enum_item --------------------------------------------------------------- def eth_enum_item(self, tname, ident): out = self.eth_enum_prefix(tname) out += asn2c(ident) if (self.eth_type[tname]['enum'] & EF_UCASE): out = out.upper() return out #--- eth_enum --------------------------------------------------------------- def eth_enum(self, tname, vals): out = "" if (self.eth_type[tname]['enum'] & EF_DEFINE): out += "/* enumerated values for %s */\n" % (tname) for (val, id) in vals: out += '#define %-12s %3s\n' % (self.eth_enum_item(tname, id), val) else: out += "typedef enum _%s {\n" % (self.eth_enum_nm(tname)) first_line = 1 for (val, id) in vals: if (first_line == 1): first_line = 0 else: out += ",\n" out += ' %-12s = %3s' % (self.eth_enum_item(tname, id), val) out += "\n} %s;\n" % (self.eth_enum_nm(tname)) return out #--- eth_bits --------------------------------------------------------------- def eth_bits(self, tname, bits): out = "" out += "static const " out += "asn_namedbit %(TABLE)s[] = {\n" for (val, id) in bits: out += ' { %2d, &hf_%s_%s_%s, -1, -1, "%s", NULL },\n' % (val, self.eproto, tname, asn2c(id), id) out += " { 0, NULL, 0, 0, NULL, NULL }\n};\n" return out #--- eth_type_fn_h ---------------------------------------------------------- def eth_type_fn_h(self, tname): out = "" if (not self.eth_type[tname]['export'] & EF_TYPE): out += 'static ' out += "int " if (self.Ber()): out += "dissect_%s_%s(gboolean implicit_tag _U_, tvbuff_t *tvb _U_, int offset _U_, asn1_ctx_t *actx _U_, proto_tree *tree _U_, int hf_index _U_)" % (self.eth_type[tname]['proto'], tname) elif (self.Per()): out += "dissect_%s_%s(tvbuff_t *tvb _U_, int offset _U_, asn1_ctx_t *actx _U_, proto_tree *tree _U_, int hf_index _U_)" % (self.eth_type[tname]['proto'], tname) out += ";\n" return out #--- eth_fn_call ------------------------------------------------------------ def eth_fn_call(self, fname, ret=None, indent=2, par=None): out = indent * ' ' if (ret): if (ret == 'return'): out += 'return ' else: out += ret + ' = ' out += fname + '(' ind = len(out) for i in range(len(par)): if (i>0): out += ind * ' ' out += ', '.join(par[i]) if (i<(len(par)-1)): out += ',\n' out += ');\n' return out #--- eth_type_fn_hdr -------------------------------------------------------- def eth_type_fn_hdr(self, tname): out = '\n' if (not self.eth_type[tname]['export'] & EF_TYPE): out += 'static ' out += "int\n" if (self.Ber()): out += "dissect_%s_%s(gboolean implicit_tag _U_, tvbuff_t *tvb _U_, int offset _U_, asn1_ctx_t *actx _U_, proto_tree *tree _U_, int hf_index _U_) {\n" % (self.eth_type[tname]['proto'], tname) elif (self.Per()): out += "dissect_%s_%s(tvbuff_t *tvb _U_, int offset _U_, asn1_ctx_t *actx _U_, proto_tree *tree _U_, int hf_index _U_) {\n" % (self.eth_type[tname]['proto'], tname) #if self.conform.get_fn_presence(tname): # out += self.conform.get_fn_text(tname, 'FN_HDR') #el if self.conform.get_fn_presence(self.eth_type[tname]['ref'][0]): out += self.conform.get_fn_text(self.eth_type[tname]['ref'][0], 'FN_HDR') return out #--- eth_type_fn_ftr -------------------------------------------------------- def eth_type_fn_ftr(self, tname): out = '\n' #if self.conform.get_fn_presence(tname): # out += self.conform.get_fn_text(tname, 'FN_FTR') #el if self.conform.get_fn_presence(self.eth_type[tname]['ref'][0]): out += self.conform.get_fn_text(self.eth_type[tname]['ref'][0], 'FN_FTR') out += " return offset;\n" out += "}\n" return out #--- eth_type_fn_body ------------------------------------------------------- def eth_type_fn_body(self, tname, body, pars=None): out = body #if self.conform.get_fn_body_presence(tname): # out = self.conform.get_fn_text(tname, 'FN_BODY') #el if self.conform.get_fn_body_presence(self.eth_type[tname]['ref'][0]): out = self.conform.get_fn_text(self.eth_type[tname]['ref'][0], 'FN_BODY') if pars: try: out = out % pars except (TypeError): pass return out #--- eth_out_pdu_decl ---------------------------------------------------------- def eth_out_pdu_decl(self, f): t = self.eth_hf[f]['ethtype'] is_new = self.eth_hf[f]['pdu']['new'] out = '' if (not self.eth_hf[f]['pdu']['export']): out += 'static ' if (is_new): out += 'int ' out += 'dissect_'+f+'(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_, void *data _U_);\n' else: out += 'void ' out += 'dissect_'+f+'(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_);\n' return out #--- eth_output_hf ---------------------------------------------------------- def eth_output_hf (self): if not len(self.eth_hf_ord) and not len(self.eth_hfpdu_ord) and not len(self.named_bit): return fx = self.output.file_open('hf') for f in (self.eth_hfpdu_ord + self.eth_hf_ord): fx.write("%-50s/* %s */\n" % ("static int %s = -1; " % (self.eth_hf[f]['fullname']), self.eth_hf[f]['ethtype'])) if (self.named_bit): fx.write('/* named bits */\n') for nb in self.named_bit: fx.write("static int %s = -1;\n" % (nb['ethname'])) if (self.dummy_eag_field): fx.write("static int %s = -1; /* never registered */\n" % (self.dummy_eag_field)) self.output.file_close(fx) #--- eth_output_hf_arr ------------------------------------------------------ def eth_output_hf_arr (self): if not len(self.eth_hf_ord) and not len(self.eth_hfpdu_ord) and not len(self.named_bit): return fx = self.output.file_open('hfarr') for f in (self.eth_hfpdu_ord + self.eth_hf_ord): t = self.eth_hf[f]['ethtype'] if self.remove_prefix and t.startswith(self.remove_prefix): t = t[len(self.remove_prefix):] name=self.eth_hf[f]['attr']['NAME'] try: # Python < 3 trantab = maketrans("- ", "__") except: trantab = str.maketrans("- ", "__") name = name.translate(trantab) namelower = name.lower() tquoted_lower = '"' + t.lower() + '"' # Try to avoid giving blurbs that give no more info than the name if tquoted_lower == namelower or \ t == "NULL" or \ tquoted_lower.replace("t_", "") == namelower: blurb = 'NULL' else: blurb = '"%s"' % (t) attr = self.eth_hf[f]['attr'].copy() if attr['TYPE'] == 'FT_NONE': attr['ABBREV'] = '"%s.%s_element"' % (self.proto, attr['ABBREV']) else: attr['ABBREV'] = '"%s.%s"' % (self.proto, attr['ABBREV']) if 'BLURB' not in attr: attr['BLURB'] = blurb fx.write(' { &%s,\n' % (self.eth_hf[f]['fullname'])) fx.write(' { %(NAME)s, %(ABBREV)s,\n' % attr) fx.write(' %(TYPE)s, %(DISPLAY)s, %(STRINGS)s, %(BITMASK)s,\n' % attr) fx.write(' %(BLURB)s, HFILL }},\n' % attr) for nb in self.named_bit: fx.write(' { &%s,\n' % (nb['ethname'])) fx.write(' { "%s", "%s.%s",\n' % (nb['name'], self.proto, nb['name'])) fx.write(' %s, %s, %s, %s,\n' % (nb['ftype'], nb['display'], nb['strings'], nb['bitmask'])) fx.write(' NULL, HFILL }},\n') self.output.file_close(fx) #--- eth_output_ett --------------------------------------------------------- def eth_output_ett (self): fx = self.output.file_open('ett') fempty = True #fx.write("static gint ett_%s = -1;\n" % (self.eproto)) for t in self.eth_type_ord: if self.eth_type[t]['tree']: fx.write("static gint %s = -1;\n" % (self.eth_type[t]['tree'])) fempty = False self.output.file_close(fx, discard=fempty) #--- eth_output_ett_arr ----------------------------------------------------- def eth_output_ett_arr(self): fx = self.output.file_open('ettarr') fempty = True #fx.write(" &ett_%s,\n" % (self.eproto)) for t in self.eth_type_ord: if self.eth_type[t]['tree']: fx.write(" &%s,\n" % (self.eth_type[t]['tree'])) fempty = False self.output.file_close(fx, discard=fempty) #--- eth_output_export ------------------------------------------------------ def eth_output_export(self): fx = self.output.file_open('exp', ext='h') for t in self.eth_export_ord: # vals if (self.eth_type[t]['export'] & EF_ENUM) and self.eth_type[t]['val'].eth_has_enum(t, self): fx.write(self.eth_type[t]['val'].eth_type_enum(t, self)) if (self.eth_type[t]['export'] & EF_VALS) and self.eth_type[t]['val'].eth_has_vals(): if not self.eth_type[t]['export'] & EF_TABLE: if self.eth_type[t]['export'] & EF_WS_DLL: fx.write("WS_DLL_PUBLIC ") else: fx.write("extern ") fx.write("const value_string %s[];\n" % (self.eth_vals_nm(t))) else: fx.write(self.eth_type[t]['val'].eth_type_vals(t, self)) for t in self.eth_export_ord: # functions if (self.eth_type[t]['export'] & EF_TYPE): if self.eth_type[t]['export'] & EF_EXTERN: if self.eth_type[t]['export'] & EF_WS_DLL: fx.write("WS_DLL_PUBLIC ") else: fx.write("extern ") fx.write(self.eth_type_fn_h(t)) for f in self.eth_hfpdu_ord: # PDUs if (self.eth_hf[f]['pdu'] and self.eth_hf[f]['pdu']['export']): fx.write(self.eth_out_pdu_decl(f)) self.output.file_close(fx) #--- eth_output_expcnf ------------------------------------------------------ def eth_output_expcnf(self): fx = self.output.file_open('exp', ext='cnf') fx.write('#.MODULE\n') maxw = 0 for (m, p) in self.modules: if (len(m) > maxw): maxw = len(m) for (m, p) in self.modules: fx.write("%-*s %s\n" % (maxw, m, p)) fx.write('#.END\n\n') for cls in self.objectclass_ord: if self.objectclass[cls]['export']: cnm = cls if self.objectclass[cls]['export'] & EF_MODULE: cnm = "$%s$%s" % (self.objectclass[cls]['module'], cnm) fx.write('#.CLASS %s\n' % (cnm)) maxw = 2 for fld in self.objectclass[cls]['val'].fields: w = len(fld.fld_repr()[0]) if (w > maxw): maxw = w for fld in self.objectclass[cls]['val'].fields: repr = fld.fld_repr() fx.write('%-*s %s\n' % (maxw, repr[0], ' '.join(repr[1:]))) fx.write('#.END\n\n') if self.Ber(): fx.write('#.IMPORT_TAG\n') for t in self.eth_export_ord: # tags if (self.eth_type[t]['export'] & EF_TYPE): fx.write('%-24s ' % self.eth_type[t]['ref'][0]) fx.write('%s %s\n' % self.eth_type[t]['val'].GetTag(self)) fx.write('#.END\n\n') fx.write('#.TYPE_ATTR\n') for t in self.eth_export_ord: # attributes if (self.eth_type[t]['export'] & EF_TYPE): tnm = self.eth_type[t]['ref'][0] if self.eth_type[t]['export'] & EF_MODULE: tnm = "$%s$%s" % (self.type[tnm]['module'], tnm) fx.write('%-24s ' % tnm) attr = self.eth_get_type_attr(self.eth_type[t]['ref'][0]).copy() fx.write('TYPE = %(TYPE)-9s DISPLAY = %(DISPLAY)-9s STRINGS = %(STRINGS)s BITMASK = %(BITMASK)s\n' % attr) fx.write('#.END\n\n') self.output.file_close(fx, keep_anyway=True) #--- eth_output_val ------------------------------------------------------ def eth_output_val(self): fx = self.output.file_open('val', ext='h') for v in self.eth_value_ord1: vv = self.eth_value[v]['value'] if isinstance (vv, Value): vv = vv.to_str(self) fx.write("#define %-30s %s\n" % (v, vv)) for t in self.eth_type_ord1: if self.eth_type[t]['import']: continue if self.eth_type[t]['val'].eth_has_enum(t, self) and not (self.eth_type[t]['export'] & EF_ENUM): fx.write(self.eth_type[t]['val'].eth_type_enum(t, self)) self.output.file_close(fx) #--- eth_output_valexp ------------------------------------------------------ def eth_output_valexp(self): if (not len(self.eth_vexport_ord)): return fx = self.output.file_open('valexp', ext='h') for v in self.eth_vexport_ord: vv = self.eth_value[v]['value'] if isinstance (vv, Value): vv = vv.to_str(self) fx.write("#define %-30s %s\n" % (v, vv)) self.output.file_close(fx) #--- eth_output_types ------------------------------------------------------- def eth_output_types(self): def out_pdu(f): t = self.eth_hf[f]['ethtype'] is_new = self.eth_hf[f]['pdu']['new'] impl = 'FALSE' out = '' if (not self.eth_hf[f]['pdu']['export']): out += 'static ' if (is_new): out += 'int ' out += 'dissect_'+f+'(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_, void *data _U_) {\n' else: out += 'void ' out += 'dissect_'+f+'(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_) {\n' if (is_new): out += ' int offset = 0;\n' off_par = 'offset' ret_par = 'offset' else: off_par = '0' ret_par = None if (self.Per()): if (self.Aligned()): aligned = 'TRUE' else: aligned = 'FALSE' out += " asn1_ctx_t asn1_ctx;\n" out += self.eth_fn_call('asn1_ctx_init', par=(('&asn1_ctx', 'ASN1_ENC_PER', aligned, 'pinfo'),)) if (self.Ber()): out += " asn1_ctx_t asn1_ctx;\n" out += self.eth_fn_call('asn1_ctx_init', par=(('&asn1_ctx', 'ASN1_ENC_BER', 'TRUE', 'pinfo'),)) par=((impl, 'tvb', off_par,'&asn1_ctx', 'tree', self.eth_hf[f]['fullname']),) elif (self.Per()): par=(('tvb', off_par, '&asn1_ctx', 'tree', self.eth_hf[f]['fullname']),) else: par=((),) out += self.eth_fn_call('dissect_%s_%s' % (self.eth_type[t]['proto'], t), ret=ret_par, par=par) if (self.Per() and is_new): out += ' offset += 7; offset >>= 3;\n' if (is_new): out += ' return offset;\n' out += '}\n' return out #end out_pdu() fx = self.output.file_open('fn') pos = fx.tell() if (len(self.eth_hfpdu_ord)): first_decl = True for f in self.eth_hfpdu_ord: if (self.eth_hf[f]['pdu'] and self.eth_hf[f]['pdu']['need_decl']): if first_decl: fx.write('/*--- PDUs declarations ---*/\n') first_decl = False fx.write(self.eth_out_pdu_decl(f)) if not first_decl: fx.write('\n') if self.eth_dep_cycle: fx.write('/*--- Cyclic dependencies ---*/\n\n') i = 0 while i < len(self.eth_dep_cycle): t = self.type[self.eth_dep_cycle[i][0]]['ethname'] if self.dep_cycle_eth_type[t][0] != i: i += 1; continue fx.write(''.join(['/* %s */\n' % ' -> '.join(self.eth_dep_cycle[i]) for i in self.dep_cycle_eth_type[t]])) fx.write(self.eth_type_fn_h(t)) fx.write('\n') i += 1 fx.write('\n') for t in self.eth_type_ord1: if self.eth_type[t]['import']: continue if self.eth_type[t]['val'].eth_has_vals(): if self.eth_type[t]['no_emit'] & EF_VALS: pass elif self.eth_type[t]['user_def'] & EF_VALS: fx.write("extern const value_string %s[];\n" % (self.eth_vals_nm(t))) elif (self.eth_type[t]['export'] & EF_VALS) and (self.eth_type[t]['export'] & EF_TABLE): pass else: fx.write(self.eth_type[t]['val'].eth_type_vals(t, self)) if self.eth_type[t]['no_emit'] & EF_TYPE: pass elif self.eth_type[t]['user_def'] & EF_TYPE: fx.write(self.eth_type_fn_h(t)) else: fx.write(self.eth_type[t]['val'].eth_type_fn(self.eth_type[t]['proto'], t, self)) fx.write('\n') if (len(self.eth_hfpdu_ord)): fx.write('/*--- PDUs ---*/\n\n') for f in self.eth_hfpdu_ord: if (self.eth_hf[f]['pdu']): if (f in self.emitted_pdu): fx.write(" /* %s already emitted */\n" % (f)) else: fx.write(out_pdu(f)) self.emitted_pdu[f] = True fx.write('\n') fempty = pos == fx.tell() self.output.file_close(fx, discard=fempty) #--- eth_output_dis_hnd ----------------------------------------------------- def eth_output_dis_hnd(self): fx = self.output.file_open('dis-hnd') fempty = True for f in self.eth_hfpdu_ord: pdu = self.eth_hf[f]['pdu'] if (pdu and pdu['reg'] and not pdu['hidden']): dis = self.proto if (pdu['reg'] != '.'): dis += '.' + pdu['reg'] fx.write('static dissector_handle_t %s_handle;\n' % (asn2c(dis))) fempty = False fx.write('\n') self.output.file_close(fx, discard=fempty) #--- eth_output_dis_reg ----------------------------------------------------- def eth_output_dis_reg(self): fx = self.output.file_open('dis-reg') fempty = True for f in self.eth_hfpdu_ord: pdu = self.eth_hf[f]['pdu'] if (pdu and pdu['reg']): new_prefix = '' if (pdu['new']): new_prefix = 'new_' dis = self.proto if (pdu['reg'] != '.'): dis += '.' + pdu['reg'] fx.write(' %sregister_dissector("%s", dissect_%s, proto_%s);\n' % (new_prefix, dis, f, self.eproto)) if (not pdu['hidden']): fx.write(' %s_handle = find_dissector("%s");\n' % (asn2c(dis), dis)) fempty = False fx.write('\n') self.output.file_close(fx, discard=fempty) #--- eth_output_dis_tab ----------------------------------------------------- def eth_output_dis_tab(self): fx = self.output.file_open('dis-tab') fempty = True for k in self.conform.get_order('REGISTER'): reg = self.conform.use_item('REGISTER', k) if reg['pdu'] not in self.field: continue f = self.field[reg['pdu']]['ethname'] pdu = self.eth_hf[f]['pdu'] new_prefix = '' if (pdu['new']): new_prefix = 'new_' if (reg['rtype'] in ('NUM', 'STR')): rstr = '' if (reg['rtype'] == 'STR'): rstr = 'string' else: rstr = 'uint' if (pdu['reg']): dis = self.proto if (pdu['reg'] != '.'): dis += '.' + pdu['reg'] if (not pdu['hidden']): hnd = '%s_handle' % (asn2c(dis)) else: hnd = 'find_dissector("%s")' % (dis) else: hnd = '%screate_dissector_handle(dissect_%s, proto_%s)' % (new_prefix, f, self.eproto) rport = self.value_get_eth(reg['rport']) fx.write(' dissector_add_%s("%s", %s, %s);\n' % (rstr, reg['rtable'], rport, hnd)) elif (reg['rtype'] in ('BER', 'PER')): roid = self.value_get_eth(reg['roid']) fx.write(' %sregister_%s_oid_dissector(%s, dissect_%s, proto_%s, %s);\n' % (new_prefix, reg['rtype'].lower(), roid, f, self.eproto, reg['roidname'])) fempty = False fx.write('\n') self.output.file_close(fx, discard=fempty) #--- eth_output_syn_reg ----------------------------------------------------- def eth_output_syn_reg(self): fx = self.output.file_open('syn-reg') fempty = True first_decl = True for k in self.conform.get_order('SYNTAX'): reg = self.conform.use_item('SYNTAX', k) if reg['pdu'] not in self.field: continue f = self.field[reg['pdu']]['ethname'] pdu = self.eth_hf[f]['pdu'] new_prefix = '' if (pdu['new']): new_prefix = 'new_' if first_decl: fx.write(' /*--- Syntax registrations ---*/\n') first_decl = False fx.write(' %sregister_ber_syntax_dissector(%s, proto_%s, dissect_%s_PDU);\n' % (new_prefix, k, self.eproto, reg['pdu'])); fempty=False self.output.file_close(fx, discard=fempty) #--- eth_output_tables ----------------------------------------------------- def eth_output_tables(self): for num in list(self.conform.report.keys()): fx = self.output.file_open('table' + num) for rep in self.conform.report[num]: self.eth_output_table(fx, rep) self.output.file_close(fx) #--- eth_output_table ----------------------------------------------------- def eth_output_table(self, fx, rep): if rep['type'] == 'HDR': fx.write('\n') if rep['var']: var = rep['var'] var_list = var.split('.', 1) cls = var_list[0] del var_list[0] flds = [] not_flds = [] sort_flds = [] for f in var_list: if f[0] == '!': not_flds.append(f[1:]) continue if f[0] == '#': flds.append(f[1:]) sort_flds.append(f) continue if f[0] == '@': flds.append(f[1:]) sort_flds.append(f[1:]) continue flds.append(f) objs = {} objs_ord = [] if (cls in self.oassign_cls): for ident in self.oassign_cls[cls]: obj = self.get_obj_repr(ident, flds, not_flds) if not obj: continue obj['_LOOP'] = var obj['_DICT'] = str(obj) objs[ident] = obj objs_ord.append(ident) if (sort_flds): # Sort identifiers according to the matching object in objs. # The order is determined by sort_flds, keys prefixed by a # '#' are compared numerically. def obj_key_fn(name): obj = objs[name] return list( int(obj[f[1:]]) if f[0] == '#' else obj[f] for f in sort_flds ) objs_ord.sort(key=obj_key_fn) for ident in objs_ord: obj = objs[ident] try: text = rep['text'] % obj except (KeyError): raise sys.exc_info()[0]("%s:%s invalid key %s for information object %s of %s" % (rep['fn'], rep['lineno'], sys.exc_info()[1], ident, var)) fx.write(text) else: fx.write("/* Unknown or empty loop list %s */\n" % (var)) else: fx.write(rep['text']) if rep['type'] == 'FTR': fx.write('\n') #--- dupl_report ----------------------------------------------------- def dupl_report(self): # types tmplist = sorted(self.eth_type_dupl.keys()) for t in tmplist: msg = "The same type names for different types. Explicit type renaming is recommended.\n" msg += t + "\n" for tt in self.eth_type_dupl[t]: msg += " %-20s %s\n" % (self.type[tt]['ethname'], tt) warnings.warn_explicit(msg, UserWarning, '', 0) # fields tmplist = list(self.eth_hf_dupl.keys()) tmplist.sort() for f in tmplist: msg = "The same field names for different types. Explicit field renaming is recommended.\n" msg += f + "\n" for tt in list(self.eth_hf_dupl[f].keys()): msg += " %-20s %-20s " % (self.eth_hf_dupl[f][tt], tt) msg += ", ".join(self.eth_hf[self.eth_hf_dupl[f][tt]]['ref']) msg += "\n" warnings.warn_explicit(msg, UserWarning, '', 0) #--- eth_do_output ------------------------------------------------------------ def eth_do_output(self): if self.dbg('a'): print("\n# Assignments") for a in self.assign_ord: v = ' ' if (self.assign[a]['virt']): v = '*' print(v, a) print("\n# Value assignments") for a in self.vassign_ord: print(' ', a) print("\n# Information object assignments") for a in self.oassign_ord: print(" %-12s (%s)" % (a, self.oassign[a].cls)) if self.dbg('t'): print("\n# Imported Types") print("%-40s %-24s %-24s" % ("ASN.1 name", "Module", "Protocol")) print("-" * 100) for t in self.type_imp: print("%-40s %-24s %-24s" % (t, self.type[t]['import'], self.type[t]['proto'])) print("\n# Imported Values") print("%-40s %-24s %-24s" % ("ASN.1 name", "Module", "Protocol")) print("-" * 100) for t in self.value_imp: print("%-40s %-24s %-24s" % (t, self.value[t]['import'], self.value[t]['proto'])) print("\n# Imported Object Classes") print("%-40s %-24s %-24s" % ("ASN.1 name", "Module", "Protocol")) print("-" * 100) for t in self.objectclass_imp: print("%-40s %-24s %-24s" % (t, self.objectclass[t]['import'], self.objectclass[t]['proto'])) print("\n# Exported Types") print("%-31s %s" % ("Wireshark type", "Export Flag")) print("-" * 100) for t in self.eth_export_ord: print("%-31s 0x%02X" % (t, self.eth_type[t]['export'])) print("\n# Exported Values") print("%-40s %s" % ("Wireshark name", "Value")) print("-" * 100) for v in self.eth_vexport_ord: vv = self.eth_value[v]['value'] if isinstance (vv, Value): vv = vv.to_str(self) print("%-40s %s" % (v, vv)) print("\n# ASN.1 Object Classes") print("%-40s %-24s %-24s" % ("ASN.1 name", "Module", "Protocol")) print("-" * 100) for t in self.objectclass_ord: print("%-40s " % (t)) print("\n# ASN.1 Types") print("%-49s %-24s %-24s" % ("ASN.1 unique name", "'tname'", "Wireshark type")) print("-" * 100) for t in self.type_ord: print("%-49s %-24s %-24s" % (t, self.type[t]['tname'], self.type[t]['ethname'])) print("\n# Wireshark Types") print("Wireshark type References (ASN.1 types)") print("-" * 100) for t in self.eth_type_ord: sys.stdout.write("%-31s %d" % (t, len(self.eth_type[t]['ref']))) print(', '.join(self.eth_type[t]['ref'])) print("\n# ASN.1 Values") print("%-40s %-18s %-20s %s" % ("ASN.1 unique name", "Type", "Value", "Wireshark value")) print("-" * 100) for v in self.value_ord: vv = self.value[v]['value'] if isinstance (vv, Value): vv = vv.to_str(self) print("%-40s %-18s %-20s %s" % (v, self.value[v]['type'].eth_tname(), vv, self.value[v]['ethname'])) #print "\n# Wireshark Values" #print "%-40s %s" % ("Wireshark name", "Value") #print "-" * 100 #for v in self.eth_value_ord: # vv = self.eth_value[v]['value'] # if isinstance (vv, Value): # vv = vv.to_str(self) # print "%-40s %s" % (v, vv) print("\n# ASN.1 Fields") print("ASN.1 unique name Wireshark name ASN.1 type") print("-" * 100) for f in (self.pdu_ord + self.field_ord): print("%-40s %-20s %s" % (f, self.field[f]['ethname'], self.field[f]['type'])) print("\n# Wireshark Fields") print("Wireshark name Wireshark type References (ASN.1 fields)") print("-" * 100) for f in (self.eth_hfpdu_ord + self.eth_hf_ord): sys.stdout.write("%-30s %-20s %s" % (f, self.eth_hf[f]['ethtype'], len(self.eth_hf[f]['ref']))) print(', '.join(self.eth_hf[f]['ref'])) #print "\n# Order after dependencies" #print '\n'.join(self.eth_type_ord1) print("\n# Cyclic dependencies") for c in self.eth_dep_cycle: print(' -> '.join(c)) self.dupl_report() self.output.outnm = self.outnm_opt if (not self.output.outnm): self.output.outnm = self.proto self.output.outnm = self.output.outnm.replace('.', '-') if not self.justexpcnf: self.eth_output_hf() self.eth_output_ett() self.eth_output_types() self.eth_output_hf_arr() self.eth_output_ett_arr() self.eth_output_export() self.eth_output_val() self.eth_output_valexp() self.eth_output_dis_hnd() self.eth_output_dis_reg() self.eth_output_dis_tab() self.eth_output_syn_reg() self.eth_output_tables() if self.expcnf: self.eth_output_expcnf() def dbg_modules(self): def print_mod(m): sys.stdout.write("%-30s " % (m)) dep = self.module[m][:] for i in range(len(dep)): if dep[i] not in self.module: dep[i] = '*' + dep[i] print(', '.join(dep)) # end of print_mod() (mod_ord, mod_cyc) = dependency_compute(self.module_ord, self.module, ignore_fn = lambda t: t not in self.module) print("\n# ASN.1 Moudules") print("Module name Dependency") print("-" * 100) new_ord = False for m in (self.module_ord): print_mod(m) new_ord = new_ord or (self.module_ord.index(m) != mod_ord.index(m)) if new_ord: print("\n# ASN.1 Moudules - in dependency order") print("Module name Dependency") print("-" * 100) for m in (mod_ord): print_mod(m) if mod_cyc: print("\nCyclic dependencies:") for i in (list(range(len(mod_cyc)))): print("%02d: %s" % (i + 1, str(mod_cyc[i]))) #--- EthCnf ------------------------------------------------------------------- class EthCnf: def __init__(self): self.ectx = None self.tblcfg = {} self.table = {} self.order = {} self.fn = {} self.report = {} self.suppress_line = False self.include_path = [] # Value name Default value Duplicity check Usage check self.tblcfg['EXPORTS'] = { 'val_nm' : 'flag', 'val_dflt' : 0, 'chk_dup' : True, 'chk_use' : True } self.tblcfg['MAKE_ENUM'] = { 'val_nm' : 'flag', 'val_dflt' : 0, 'chk_dup' : True, 'chk_use' : True } self.tblcfg['USE_VALS_EXT'] = { 'val_nm' : 'flag', 'val_dflt' : 0, 'chk_dup' : True, 'chk_use' : True } self.tblcfg['PDU'] = { 'val_nm' : 'attr', 'val_dflt' : None, 'chk_dup' : True, 'chk_use' : True } self.tblcfg['SYNTAX'] = { 'val_nm' : 'attr', 'val_dflt' : None, 'chk_dup' : True, 'chk_use' : True } self.tblcfg['REGISTER'] = { 'val_nm' : 'attr', 'val_dflt' : None, 'chk_dup' : True, 'chk_use' : True } self.tblcfg['USER_DEFINED'] = { 'val_nm' : 'flag', 'val_dflt' : 0, 'chk_dup' : True, 'chk_use' : True } self.tblcfg['NO_EMIT'] = { 'val_nm' : 'flag', 'val_dflt' : 0, 'chk_dup' : True, 'chk_use' : True } self.tblcfg['MODULE'] = { 'val_nm' : 'proto', 'val_dflt' : None, 'chk_dup' : True, 'chk_use' : False } self.tblcfg['OMIT_ASSIGNMENT'] = { 'val_nm' : 'omit', 'val_dflt' : False, 'chk_dup' : True, 'chk_use' : True } self.tblcfg['NO_OMIT_ASSGN'] = { 'val_nm' : 'omit', 'val_dflt' : True, 'chk_dup' : True, 'chk_use' : True } self.tblcfg['VIRTUAL_ASSGN'] = { 'val_nm' : 'name', 'val_dflt' : None, 'chk_dup' : True, 'chk_use' : True } self.tblcfg['SET_TYPE'] = { 'val_nm' : 'type', 'val_dflt' : None, 'chk_dup' : True, 'chk_use' : True } self.tblcfg['TYPE_RENAME'] = { 'val_nm' : 'eth_name', 'val_dflt' : None, 'chk_dup' : True, 'chk_use' : True } self.tblcfg['FIELD_RENAME'] = { 'val_nm' : 'eth_name', 'val_dflt' : None, 'chk_dup' : True, 'chk_use' : True } self.tblcfg['IMPORT_TAG'] = { 'val_nm' : 'ttag', 'val_dflt' : (), 'chk_dup' : True, 'chk_use' : False } self.tblcfg['FN_PARS'] = { 'val_nm' : 'pars', 'val_dflt' : {}, 'chk_dup' : True, 'chk_use' : True } self.tblcfg['TYPE_ATTR'] = { 'val_nm' : 'attr', 'val_dflt' : {}, 'chk_dup' : True, 'chk_use' : False } self.tblcfg['ETYPE_ATTR'] = { 'val_nm' : 'attr', 'val_dflt' : {}, 'chk_dup' : True, 'chk_use' : False } self.tblcfg['FIELD_ATTR'] = { 'val_nm' : 'attr', 'val_dflt' : {}, 'chk_dup' : True, 'chk_use' : True } self.tblcfg['EFIELD_ATTR'] = { 'val_nm' : 'attr', 'val_dflt' : {}, 'chk_dup' : True, 'chk_use' : True } self.tblcfg['ASSIGNED_ID'] = { 'val_nm' : 'ids', 'val_dflt' : {}, 'chk_dup' : False,'chk_use' : False } self.tblcfg['ASSIGN_VALUE_TO_TYPE'] = { 'val_nm' : 'name', 'val_dflt' : None, 'chk_dup' : True, 'chk_use' : True } for k in list(self.tblcfg.keys()) : self.table[k] = {} self.order[k] = [] def add_item(self, table, key, fn, lineno, **kw): if self.tblcfg[table]['chk_dup'] and key in self.table[table]: warnings.warn_explicit("Duplicated %s for %s. Previous one is at %s:%d" % (table, key, self.table[table][key]['fn'], self.table[table][key]['lineno']), UserWarning, fn, lineno) return self.table[table][key] = {'fn' : fn, 'lineno' : lineno, 'used' : False} self.table[table][key].update(kw) self.order[table].append(key) def update_item(self, table, key, fn, lineno, **kw): if key not in self.table[table]: self.table[table][key] = {'fn' : fn, 'lineno' : lineno, 'used' : False} self.order[table].append(key) self.table[table][key][self.tblcfg[table]['val_nm']] = {} self.table[table][key][self.tblcfg[table]['val_nm']].update(kw[self.tblcfg[table]['val_nm']]) def get_order(self, table): return self.order[table] def check_item(self, table, key): return key in self.table[table] def copy_item(self, table, dst_key, src_key): if (src_key in self.table[table]): self.table[table][dst_key] = self.table[table][src_key] def check_item_value(self, table, key, **kw): return key in self.table[table] and kw.get('val_nm', self.tblcfg[table]['val_nm']) in self.table[table][key] def use_item(self, table, key, **kw): vdflt = kw.get('val_dflt', self.tblcfg[table]['val_dflt']) if key not in self.table[table]: return vdflt vname = kw.get('val_nm', self.tblcfg[table]['val_nm']) #print "use_item() - set used for %s %s" % (table, key) self.table[table][key]['used'] = True return self.table[table][key].get(vname, vdflt) def omit_assignment(self, type, ident, module): if self.ectx.conform.use_item('OMIT_ASSIGNMENT', ident): return True if self.ectx.conform.use_item('OMIT_ASSIGNMENT', '*') or \ self.ectx.conform.use_item('OMIT_ASSIGNMENT', '*'+type) or \ self.ectx.conform.use_item('OMIT_ASSIGNMENT', '*/'+module) or \ self.ectx.conform.use_item('OMIT_ASSIGNMENT', '*'+type+'/'+module): return self.ectx.conform.use_item('NO_OMIT_ASSGN', ident) return False def add_fn_line(self, name, ctx, line, fn, lineno): if name not in self.fn: self.fn[name] = {'FN_HDR' : None, 'FN_FTR' : None, 'FN_BODY' : None} if (self.fn[name][ctx]): self.fn[name][ctx]['text'] += line else: self.fn[name][ctx] = {'text' : line, 'used' : False, 'fn' : fn, 'lineno' : lineno} def get_fn_presence(self, name): #print "get_fn_presence('%s'):%s" % (name, str(self.fn.has_key(name))) #if self.fn.has_key(name): print self.fn[name] return name in self.fn def get_fn_body_presence(self, name): return name in self.fn and self.fn[name]['FN_BODY'] def get_fn_text(self, name, ctx): if (name not in self.fn): return ''; if (not self.fn[name][ctx]): return ''; self.fn[name][ctx]['used'] = True out = self.fn[name][ctx]['text'] if (not self.suppress_line): out = '#line %u "%s"\n%s\n' % (self.fn[name][ctx]['lineno'], rel_dissector_path(self.fn[name][ctx]['fn']), out); return out def add_pdu(self, par, is_new, fn, lineno): #print "add_pdu(par=%s, %s, %d)" % (str(par), fn, lineno) (reg, hidden) = (None, False) if (len(par) > 1): reg = par[1] if (reg and reg[0]=='@'): (reg, hidden) = (reg[1:], True) attr = {'new' : is_new, 'reg' : reg, 'hidden' : hidden, 'need_decl' : False, 'export' : False} self.add_item('PDU', par[0], attr=attr, fn=fn, lineno=lineno) return def add_syntax(self, par, fn, lineno): #print "add_syntax(par=%s, %s, %d)" % (str(par), fn, lineno) if( (len(par) >=2)): name = par[1] else: name = '"'+par[0]+'"' attr = { 'pdu' : par[0] } self.add_item('SYNTAX', name, attr=attr, fn=fn, lineno=lineno) return def add_register(self, pdu, par, fn, lineno): #print "add_register(pdu=%s, par=%s, %s, %d)" % (pdu, str(par), fn, lineno) if (par[0] in ('N', 'NUM')): rtype = 'NUM'; (pmin, pmax) = (2, 2) elif (par[0] in ('S', 'STR')): rtype = 'STR'; (pmin, pmax) = (2, 2) elif (par[0] in ('B', 'BER')): rtype = 'BER'; (pmin, pmax) = (1, 2) elif (par[0] in ('P', 'PER')): rtype = 'PER'; (pmin, pmax) = (1, 2) else: warnings.warn_explicit("Unknown registration type '%s'" % (par[2]), UserWarning, fn, lineno); return if ((len(par)-1) < pmin): warnings.warn_explicit("Too few parameters for %s registration type. At least %d parameters are required" % (rtype, pmin), UserWarning, fn, lineno) return if ((len(par)-1) > pmax): warnings.warn_explicit("Too many parameters for %s registration type. Only %d parameters are allowed" % (rtype, pmax), UserWarning, fn, lineno) attr = {'pdu' : pdu, 'rtype' : rtype} if (rtype in ('NUM', 'STR')): attr['rtable'] = par[1] attr['rport'] = par[2] rkey = '/'.join([rtype, attr['rtable'], attr['rport']]) elif (rtype in ('BER', 'PER')): attr['roid'] = par[1] attr['roidname'] = '""' if (len(par)>=3): attr['roidname'] = par[2] elif attr['roid'][0] != '"': attr['roidname'] = '"' + attr['roid'] + '"' rkey = '/'.join([rtype, attr['roid']]) self.add_item('REGISTER', rkey, attr=attr, fn=fn, lineno=lineno) def check_par(self, par, pmin, pmax, fn, lineno): for i in range(len(par)): if par[i] == '-': par[i] = None continue if par[i][0] == '#': par[i:] = [] break if len(par) < pmin: warnings.warn_explicit("Too few parameters. At least %d parameters are required" % (pmin), UserWarning, fn, lineno) return None if (pmax >= 0) and (len(par) > pmax): warnings.warn_explicit("Too many parameters. Only %d parameters are allowed" % (pmax), UserWarning, fn, lineno) return par[0:pmax] return par def read(self, fn): def get_par(line, pmin, pmax, fn, lineno): par = line.split(None, pmax) par = self.check_par(par, pmin, pmax, fn, lineno) return par def get_par_nm(line, pmin, pmax, fn, lineno): if pmax: par = line.split(None, pmax) else: par = [line,] for i in range(len(par)): if par[i][0] == '#': par[i:] = [] break if len(par) < pmin: warnings.warn_explicit("Too few parameters. At least %d parameters are required" % (pmin), UserWarning, fn, lineno) return None if len(par) > pmax: nmpar = par[pmax] else: nmpar = '' nmpars = {} nmpar_first = re.compile(r'^\s*(?P<attr>[_A-Z][_A-Z0-9]*)\s*=\s*') nmpar_next = re.compile(r'\s+(?P<attr>[_A-Z][_A-Z0-9]*)\s*=\s*') nmpar_end = re.compile(r'\s*$') result = nmpar_first.search(nmpar) pos = 0 while result: k = result.group('attr') pos = result.end() result = nmpar_next.search(nmpar, pos) p1 = pos if result: p2 = result.start() else: p2 = nmpar_end.search(nmpar, pos).start() v = nmpar[p1:p2] nmpars[k] = v if len(par) > pmax: par[pmax] = nmpars return par f = open(fn, "r") lineno = 0 is_import = False directive = re.compile(r'^\s*#\.(?P<name>[A-Z_][A-Z_0-9]*)(\s+|$)') cdirective = re.compile(r'^\s*##') report = re.compile(r'^TABLE(?P<num>\d*)_(?P<type>HDR|BODY|FTR)$') comment = re.compile(r'^\s*#[^.#]') empty = re.compile(r'^\s*$') ctx = None name = '' default_flags = 0x00 stack = [] while True: if not f.closed: line = f.readline() lineno += 1 else: line = None if not line: if not f.closed: f.close() if stack: frec = stack.pop() fn, f, lineno, is_import = frec['fn'], frec['f'], frec['lineno'], frec['is_import'] continue else: break if comment.search(line): continue result = directive.search(line) if result: # directive rep_result = report.search(result.group('name')) if result.group('name') == 'END_OF_CNF': f.close() elif result.group('name') == 'OPT': ctx = result.group('name') par = get_par(line[result.end():], 0, -1, fn=fn, lineno=lineno) if not par: continue self.set_opt(par[0], par[1:], fn, lineno) ctx = None elif result.group('name') in ('PDU', 'PDU_NEW', 'REGISTER', 'REGISTER_NEW', 'MODULE', 'MODULE_IMPORT', 'OMIT_ASSIGNMENT', 'NO_OMIT_ASSGN', 'VIRTUAL_ASSGN', 'SET_TYPE', 'ASSIGN_VALUE_TO_TYPE', 'TYPE_RENAME', 'FIELD_RENAME', 'TF_RENAME', 'IMPORT_TAG', 'TYPE_ATTR', 'ETYPE_ATTR', 'FIELD_ATTR', 'EFIELD_ATTR', 'SYNTAX', 'SYNTAX_NEW'): ctx = result.group('name') elif result.group('name') in ('OMIT_ALL_ASSIGNMENTS', 'OMIT_ASSIGNMENTS_EXCEPT', 'OMIT_ALL_TYPE_ASSIGNMENTS', 'OMIT_TYPE_ASSIGNMENTS_EXCEPT', 'OMIT_ALL_VALUE_ASSIGNMENTS', 'OMIT_VALUE_ASSIGNMENTS_EXCEPT'): ctx = result.group('name') key = '*' if ctx in ('OMIT_ALL_TYPE_ASSIGNMENTS', 'OMIT_TYPE_ASSIGNMENTS_EXCEPT'): key += 'T' if ctx in ('OMIT_ALL_VALUE_ASSIGNMENTS', 'OMIT_VALUE_ASSIGNMENTS_EXCEPT'): key += 'V' par = get_par(line[result.end():], 0, 1, fn=fn, lineno=lineno) if par: key += '/' + par[0] self.add_item('OMIT_ASSIGNMENT', key, omit=True, fn=fn, lineno=lineno) if ctx in ('OMIT_ASSIGNMENTS_EXCEPT', 'OMIT_TYPE_ASSIGNMENTS_EXCEPT', 'OMIT_VALUE_ASSIGNMENTS_EXCEPT'): ctx = 'NO_OMIT_ASSGN' else: ctx = None elif result.group('name') in ('EXPORTS', 'MODULE_EXPORTS', 'USER_DEFINED', 'NO_EMIT'): ctx = result.group('name') default_flags = EF_TYPE|EF_VALS if ctx == 'MODULE_EXPORTS': ctx = 'EXPORTS' default_flags |= EF_MODULE if ctx == 'EXPORTS': par = get_par(line[result.end():], 0, 5, fn=fn, lineno=lineno) else: par = get_par(line[result.end():], 0, 1, fn=fn, lineno=lineno) if not par: continue p = 1 if (par[0] == 'WITH_VALS'): default_flags |= EF_TYPE|EF_VALS elif (par[0] == 'WITHOUT_VALS'): default_flags |= EF_TYPE; default_flags &= ~EF_TYPE elif (par[0] == 'ONLY_VALS'): default_flags &= ~EF_TYPE; default_flags |= EF_VALS elif (ctx == 'EXPORTS'): p = 0 else: warnings.warn_explicit("Unknown parameter value '%s'" % (par[0]), UserWarning, fn, lineno) for i in range(p, len(par)): if (par[i] == 'ONLY_ENUM'): default_flags &= ~(EF_TYPE|EF_VALS); default_flags |= EF_ENUM elif (par[i] == 'WITH_ENUM'): default_flags |= EF_ENUM elif (par[i] == 'VALS_WITH_TABLE'): default_flags |= EF_TABLE elif (par[i] == 'WS_DLL'): default_flags |= EF_WS_DLL elif (par[i] == 'EXTERN'): default_flags |= EF_EXTERN elif (par[i] == 'NO_PROT_PREFIX'): default_flags |= EF_NO_PROT else: warnings.warn_explicit("Unknown parameter value '%s'" % (par[i]), UserWarning, fn, lineno) elif result.group('name') in ('MAKE_ENUM', 'MAKE_DEFINES'): ctx = result.group('name') default_flags = EF_ENUM if ctx == 'MAKE_ENUM': default_flags |= EF_NO_PROT|EF_NO_TYPE if ctx == 'MAKE_DEFINES': default_flags |= EF_DEFINE|EF_UCASE|EF_NO_TYPE par = get_par(line[result.end():], 0, 3, fn=fn, lineno=lineno) for i in range(0, len(par)): if (par[i] == 'NO_PROT_PREFIX'): default_flags |= EF_NO_PROT elif (par[i] == 'PROT_PREFIX'): default_flags &= ~ EF_NO_PROT elif (par[i] == 'NO_TYPE_PREFIX'): default_flags |= EF_NO_TYPE elif (par[i] == 'TYPE_PREFIX'): default_flags &= ~ EF_NO_TYPE elif (par[i] == 'UPPER_CASE'): default_flags |= EF_UCASE elif (par[i] == 'NO_UPPER_CASE'): default_flags &= ~EF_UCASE else: warnings.warn_explicit("Unknown parameter value '%s'" % (par[i]), UserWarning, fn, lineno) elif result.group('name') == 'USE_VALS_EXT': ctx = result.group('name') default_flags = 0xFF elif result.group('name') == 'FN_HDR': minp = 1 if (ctx in ('FN_PARS',)) and name: minp = 0 par = get_par(line[result.end():], minp, 1, fn=fn, lineno=lineno) if (not par) and (minp > 0): continue ctx = result.group('name') if par: name = par[0] elif result.group('name') == 'FN_FTR': minp = 1 if (ctx in ('FN_PARS','FN_HDR')) and name: minp = 0 par = get_par(line[result.end():], minp, 1, fn=fn, lineno=lineno) if (not par) and (minp > 0): continue ctx = result.group('name') if par: name = par[0] elif result.group('name') == 'FN_BODY': par = get_par_nm(line[result.end():], 1, 1, fn=fn, lineno=lineno) if not par: continue ctx = result.group('name') name = par[0] if len(par) > 1: self.add_item('FN_PARS', name, pars=par[1], fn=fn, lineno=lineno) elif result.group('name') == 'FN_PARS': par = get_par_nm(line[result.end():], 0, 1, fn=fn, lineno=lineno) ctx = result.group('name') if not par: name = None elif len(par) == 1: name = par[0] self.add_item(ctx, name, pars={}, fn=fn, lineno=lineno) elif len(par) > 1: self.add_item(ctx, par[0], pars=par[1], fn=fn, lineno=lineno) ctx = None elif result.group('name') == 'CLASS': par = get_par(line[result.end():], 1, 1, fn=fn, lineno=lineno) if not par: continue ctx = result.group('name') name = par[0] add_class_ident(name) if not name.split('$')[-1].isupper(): warnings.warn_explicit("No lower-case letters shall be included in information object class name (%s)" % (name), UserWarning, fn, lineno) elif result.group('name') == 'ASSIGNED_OBJECT_IDENTIFIER': par = get_par(line[result.end():], 1, 1, fn=fn, lineno=lineno) if not par: continue self.update_item('ASSIGNED_ID', 'OBJECT_IDENTIFIER', ids={par[0] : par[0]}, fn=fn, lineno=lineno) elif rep_result: # Reports num = rep_result.group('num') type = rep_result.group('type') if type == 'BODY': par = get_par(line[result.end():], 1, 1, fn=fn, lineno=lineno) if not par: continue else: par = get_par(line[result.end():], 0, 0, fn=fn, lineno=lineno) rep = { 'type' : type, 'var' : None, 'text' : '', 'fn' : fn, 'lineno' : lineno } if len(par) > 0: rep['var'] = par[0] self.report.setdefault(num, []).append(rep) ctx = 'TABLE' name = num elif result.group('name') in ('INCLUDE', 'IMPORT') : is_imp = result.group('name') == 'IMPORT' par = get_par(line[result.end():], 1, 1, fn=fn, lineno=lineno) if not par: warnings.warn_explicit("%s requires parameter" % (result.group('name'),), UserWarning, fn, lineno) continue fname = par[0] #print "Try include: %s" % (fname) if (not os.path.exists(fname)): fname = os.path.join(os.path.split(fn)[0], par[0]) #print "Try include: %s" % (fname) i = 0 while not os.path.exists(fname) and (i < len(self.include_path)): fname = os.path.join(self.include_path[i], par[0]) #print "Try include: %s" % (fname) i += 1 if (not os.path.exists(fname)): if is_imp: continue # just ignore else: fname = par[0] # report error fnew = open(fname, "r") stack.append({'fn' : fn, 'f' : f, 'lineno' : lineno, 'is_import' : is_import}) fn, f, lineno, is_import = par[0], fnew, 0, is_imp elif result.group('name') == 'END': ctx = None else: warnings.warn_explicit("Unknown directive '%s'" % (result.group('name')), UserWarning, fn, lineno) continue if not ctx: if not empty.match(line): warnings.warn_explicit("Non-empty line in empty context", UserWarning, fn, lineno) elif ctx == 'OPT': if empty.match(line): continue par = get_par(line, 1, -1, fn=fn, lineno=lineno) if not par: continue self.set_opt(par[0], par[1:], fn, lineno) elif ctx in ('EXPORTS', 'USER_DEFINED', 'NO_EMIT'): if empty.match(line): continue if ctx == 'EXPORTS': par = get_par(line, 1, 6, fn=fn, lineno=lineno) else: par = get_par(line, 1, 2, fn=fn, lineno=lineno) if not par: continue flags = default_flags p = 2 if (len(par)>=2): if (par[1] == 'WITH_VALS'): flags |= EF_TYPE|EF_VALS elif (par[1] == 'WITHOUT_VALS'): flags |= EF_TYPE; flags &= ~EF_TYPE elif (par[1] == 'ONLY_VALS'): flags &= ~EF_TYPE; flags |= EF_VALS elif (ctx == 'EXPORTS'): p = 1 else: warnings.warn_explicit("Unknown parameter value '%s'" % (par[1]), UserWarning, fn, lineno) for i in range(p, len(par)): if (par[i] == 'ONLY_ENUM'): flags &= ~(EF_TYPE|EF_VALS); flags |= EF_ENUM elif (par[i] == 'WITH_ENUM'): flags |= EF_ENUM elif (par[i] == 'VALS_WITH_TABLE'): flags |= EF_TABLE elif (par[i] == 'WS_DLL'): flags |= EF_WS_DLL elif (par[i] == 'EXTERN'): flags |= EF_EXTERN elif (par[i] == 'NO_PROT_PREFIX'): flags |= EF_NO_PROT else: warnings.warn_explicit("Unknown parameter value '%s'" % (par[i]), UserWarning, fn, lineno) self.add_item(ctx, par[0], flag=flags, fn=fn, lineno=lineno) elif ctx in ('MAKE_ENUM', 'MAKE_DEFINES'): if empty.match(line): continue par = get_par(line, 1, 4, fn=fn, lineno=lineno) if not par: continue flags = default_flags for i in range(1, len(par)): if (par[i] == 'NO_PROT_PREFIX'): flags |= EF_NO_PROT elif (par[i] == 'PROT_PREFIX'): flags &= ~ EF_NO_PROT elif (par[i] == 'NO_TYPE_PREFIX'): flags |= EF_NO_TYPE elif (par[i] == 'TYPE_PREFIX'): flags &= ~ EF_NO_TYPE elif (par[i] == 'UPPER_CASE'): flags |= EF_UCASE elif (par[i] == 'NO_UPPER_CASE'): flags &= ~EF_UCASE else: warnings.warn_explicit("Unknown parameter value '%s'" % (par[i]), UserWarning, fn, lineno) self.add_item('MAKE_ENUM', par[0], flag=flags, fn=fn, lineno=lineno) elif ctx == 'USE_VALS_EXT': if empty.match(line): continue par = get_par(line, 1, 1, fn=fn, lineno=lineno) if not par: continue flags = default_flags self.add_item('USE_VALS_EXT', par[0], flag=flags, fn=fn, lineno=lineno) elif ctx in ('PDU', 'PDU_NEW'): if empty.match(line): continue par = get_par(line, 1, 5, fn=fn, lineno=lineno) if not par: continue is_new = False if (ctx == 'PDU_NEW'): is_new = True self.add_pdu(par[0:2], is_new, fn, lineno) if (len(par)>=3): self.add_register(par[0], par[2:5], fn, lineno) elif ctx in ('SYNTAX', 'SYNTAX_NEW'): if empty.match(line): continue par = get_par(line, 1, 2, fn=fn, lineno=lineno) if not par: continue if not self.check_item('PDU', par[0]): is_new = False if (ctx == 'SYNTAX_NEW'): is_new = True self.add_pdu(par[0:1], is_new, fn, lineno) self.add_syntax(par, fn, lineno) elif ctx in ('REGISTER', 'REGISTER_NEW'): if empty.match(line): continue par = get_par(line, 3, 4, fn=fn, lineno=lineno) if not par: continue if not self.check_item('PDU', par[0]): is_new = False if (ctx == 'REGISTER_NEW'): is_new = True self.add_pdu(par[0:1], is_new, fn, lineno) self.add_register(par[0], par[1:4], fn, lineno) elif ctx in ('MODULE', 'MODULE_IMPORT'): if empty.match(line): continue par = get_par(line, 2, 2, fn=fn, lineno=lineno) if not par: continue self.add_item('MODULE', par[0], proto=par[1], fn=fn, lineno=lineno) elif ctx == 'IMPORT_TAG': if empty.match(line): continue par = get_par(line, 3, 3, fn=fn, lineno=lineno) if not par: continue self.add_item(ctx, par[0], ttag=(par[1], par[2]), fn=fn, lineno=lineno) elif ctx == 'OMIT_ASSIGNMENT': if empty.match(line): continue par = get_par(line, 1, 1, fn=fn, lineno=lineno) if not par: continue self.add_item(ctx, par[0], omit=True, fn=fn, lineno=lineno) elif ctx == 'NO_OMIT_ASSGN': if empty.match(line): continue par = get_par(line, 1, 1, fn=fn, lineno=lineno) if not par: continue self.add_item(ctx, par[0], omit=False, fn=fn, lineno=lineno) elif ctx == 'VIRTUAL_ASSGN': if empty.match(line): continue par = get_par(line, 2, -1, fn=fn, lineno=lineno) if not par: continue if (len(par[1].split('/')) > 1) and not self.check_item('SET_TYPE', par[1]): self.add_item('SET_TYPE', par[1], type=par[0], fn=fn, lineno=lineno) self.add_item('VIRTUAL_ASSGN', par[1], name=par[0], fn=fn, lineno=lineno) for nm in par[2:]: self.add_item('SET_TYPE', nm, type=par[0], fn=fn, lineno=lineno) if not par[0][0].isupper(): warnings.warn_explicit("Virtual assignment should have uppercase name (%s)" % (par[0]), UserWarning, fn, lineno) elif ctx == 'SET_TYPE': if empty.match(line): continue par = get_par(line, 2, 2, fn=fn, lineno=lineno) if not par: continue if not self.check_item('VIRTUAL_ASSGN', par[0]): self.add_item('SET_TYPE', par[0], type=par[1], fn=fn, lineno=lineno) if not par[1][0].isupper(): warnings.warn_explicit("Set type should have uppercase name (%s)" % (par[1]), UserWarning, fn, lineno) elif ctx == 'ASSIGN_VALUE_TO_TYPE': if empty.match(line): continue par = get_par(line, 2, 2, fn=fn, lineno=lineno) if not par: continue self.add_item(ctx, par[0], name=par[1], fn=fn, lineno=lineno) elif ctx == 'TYPE_RENAME': if empty.match(line): continue par = get_par(line, 2, 2, fn=fn, lineno=lineno) if not par: continue self.add_item('TYPE_RENAME', par[0], eth_name=par[1], fn=fn, lineno=lineno) if not par[1][0].isupper(): warnings.warn_explicit("Type should be renamed to uppercase name (%s)" % (par[1]), UserWarning, fn, lineno) elif ctx == 'FIELD_RENAME': if empty.match(line): continue par = get_par(line, 2, 2, fn=fn, lineno=lineno) if not par: continue self.add_item('FIELD_RENAME', par[0], eth_name=par[1], fn=fn, lineno=lineno) if not par[1][0].islower(): warnings.warn_explicit("Field should be renamed to lowercase name (%s)" % (par[1]), UserWarning, fn, lineno) elif ctx == 'TF_RENAME': if empty.match(line): continue par = get_par(line, 2, 2, fn=fn, lineno=lineno) if not par: continue tmpu = par[1][0].upper() + par[1][1:] tmpl = par[1][0].lower() + par[1][1:] self.add_item('TYPE_RENAME', par[0], eth_name=tmpu, fn=fn, lineno=lineno) if not tmpu[0].isupper(): warnings.warn_explicit("Type should be renamed to uppercase name (%s)" % (par[1]), UserWarning, fn, lineno) self.add_item('FIELD_RENAME', par[0], eth_name=tmpl, fn=fn, lineno=lineno) if not tmpl[0].islower(): warnings.warn_explicit("Field should be renamed to lowercase name (%s)" % (par[1]), UserWarning, fn, lineno) elif ctx in ('TYPE_ATTR', 'ETYPE_ATTR', 'FIELD_ATTR', 'EFIELD_ATTR'): if empty.match(line): continue par = get_par_nm(line, 1, 1, fn=fn, lineno=lineno) if not par: continue self.add_item(ctx, par[0], attr=par[1], fn=fn, lineno=lineno) elif ctx == 'FN_PARS': if empty.match(line): continue if name: par = get_par_nm(line, 0, 0, fn=fn, lineno=lineno) else: par = get_par_nm(line, 1, 1, fn=fn, lineno=lineno) if not par: continue if name: self.update_item(ctx, name, pars=par[0], fn=fn, lineno=lineno) else: self.add_item(ctx, par[0], pars=par[1], fn=fn, lineno=lineno) elif ctx in ('FN_HDR', 'FN_FTR', 'FN_BODY'): result = cdirective.search(line) if result: # directive line = '#' + line[result.end():] self.add_fn_line(name, ctx, line, fn=fn, lineno=lineno) elif ctx == 'CLASS': if empty.match(line): continue par = get_par(line, 1, 3, fn=fn, lineno=lineno) if not par: continue if not set_type_to_class(name, par[0], par[1:]): warnings.warn_explicit("Could not set type of class member %s.&%s to %s" % (name, par[0], par[1]), UserWarning, fn, lineno) elif ctx == 'TABLE': self.report[name][-1]['text'] += line def set_opt(self, opt, par, fn, lineno): #print "set_opt: %s, %s" % (opt, par) if opt in ("-I",): par = self.check_par(par, 1, 1, fn, lineno) if not par: return self.include_path.append(par[0]) elif opt in ("-b", "BER", "CER", "DER"): par = self.check_par(par, 0, 0, fn, lineno) self.ectx.encoding = 'ber' elif opt in ("PER",): par = self.check_par(par, 0, 0, fn, lineno) self.ectx.encoding = 'per' elif opt in ("-p", "PROTO"): par = self.check_par(par, 1, 1, fn, lineno) if not par: return self.ectx.proto_opt = par[0] self.ectx.merge_modules = True elif opt in ("ALIGNED",): par = self.check_par(par, 0, 0, fn, lineno) self.ectx.aligned = True elif opt in ("-u", "UNALIGNED"): par = self.check_par(par, 0, 0, fn, lineno) self.ectx.aligned = False elif opt in ("-d",): par = self.check_par(par, 1, 1, fn, lineno) if not par: return self.ectx.dbgopt = par[0] elif opt in ("-e",): par = self.check_par(par, 0, 0, fn, lineno) self.ectx.expcnf = True elif opt in ("-S",): par = self.check_par(par, 0, 0, fn, lineno) self.ectx.merge_modules = True elif opt in ("GROUP_BY_PROT",): par = self.check_par(par, 0, 0, fn, lineno) self.ectx.group_by_prot = True elif opt in ("-o",): par = self.check_par(par, 1, 1, fn, lineno) if not par: return self.ectx.outnm_opt = par[0] elif opt in ("-O",): par = self.check_par(par, 1, 1, fn, lineno) if not par: return self.ectx.output.outdir = par[0] elif opt in ("-s",): par = self.check_par(par, 1, 1, fn, lineno) if not par: return self.ectx.output.single_file = par[0] elif opt in ("-k",): par = self.check_par(par, 0, 0, fn, lineno) self.ectx.output.keep = True elif opt in ("-L",): par = self.check_par(par, 0, 0, fn, lineno) self.suppress_line = True elif opt in ("EMBEDDED_PDV_CB",): par = self.check_par(par, 1, 1, fn, lineno) if not par: return self.ectx.default_embedded_pdv_cb = par[0] elif opt in ("EXTERNAL_TYPE_CB",): par = self.check_par(par, 1, 1, fn, lineno) if not par: return self.ectx.default_external_type_cb = par[0] elif opt in ("-r",): par = self.check_par(par, 1, 1, fn, lineno) if not par: return self.ectx.remove_prefix = par[0] else: warnings.warn_explicit("Unknown option %s" % (opt), UserWarning, fn, lineno) def dbg_print(self): print("\n# Conformance values") print("%-15s %-4s %-15s %-20s %s" % ("File", "Line", "Table", "Key", "Value")) print("-" * 100) tbls = sorted(self.table.keys()) for t in tbls: keys = sorted(self.table[t].keys()) for k in keys: print("%-15s %4s %-15s %-20s %s" % ( self.table[t][k]['fn'], self.table[t][k]['lineno'], t, k, str(self.table[t][k][self.tblcfg[t]['val_nm']]))) def unused_report(self): tbls = sorted(self.table.keys()) for t in tbls: if not self.tblcfg[t]['chk_use']: continue keys = sorted(self.table[t].keys()) for k in keys: if not self.table[t][k]['used']: warnings.warn_explicit("Unused %s for %s" % (t, k), UserWarning, self.table[t][k]['fn'], self.table[t][k]['lineno']) fnms = list(self.fn.keys()) fnms.sort() for f in fnms: keys = sorted(self.fn[f].keys()) for k in keys: if not self.fn[f][k]: continue if not self.fn[f][k]['used']: warnings.warn_explicit("Unused %s for %s" % (k, f), UserWarning, self.fn[f][k]['fn'], self.fn[f][k]['lineno']) #--- EthOut ------------------------------------------------------------------- class EthOut: def __init__(self): self.ectx = None self.outnm = None self.outdir = '.' self.single_file = None self.created_files = {} self.created_files_ord = [] self.keep = False def outcomment(self, ln, comment=None): if comment: return '%s %s\n' % (comment, ln) else: return '/* %-74s */\n' % (ln) def created_file_add(self, name, keep_anyway): name = os.path.normcase(os.path.abspath(name)) if name not in self.created_files: self.created_files_ord.append(name) self.created_files[name] = keep_anyway else: self.created_files[name] = self.created_files[name] or keep_anyway def created_file_exists(self, name): name = os.path.normcase(os.path.abspath(name)) return name in self.created_files #--- output_fname ------------------------------------------------------- def output_fname(self, ftype, ext='c'): fn = '' if not ext in ('cnf',): fn += 'packet-' fn += self.outnm if (ftype): fn += '-' + ftype fn += '.' + ext return fn #--- file_open ------------------------------------------------------- def file_open(self, ftype, ext='c'): fn = self.output_fname(ftype, ext=ext) if self.created_file_exists(fn): fx = open(fn, 'a') else: fx = open(fn, 'w') comment = None if ext in ('cnf',): comment = '#' fx.write(self.fhdr(fn, comment = comment)) else: if (not self.single_file and not self.created_file_exists(fn)): fx.write(self.fhdr(fn)) if not self.ectx.merge_modules: fx.write('\n') mstr = "--- " if self.ectx.groups(): mstr += "Module" if (len(self.ectx.modules) > 1): mstr += "s" for (m, p) in self.ectx.modules: mstr += " %s" % (m) else: mstr += "Module %s" % (self.ectx.Module()) mstr += " --- --- ---" fx.write(self.outcomment(mstr, comment)) fx.write('\n') return fx #--- file_close ------------------------------------------------------- def file_close(self, fx, discard=False, keep_anyway=False): fx.close() if discard and not self.created_file_exists(fx.name): os.unlink(fx.name) else: self.created_file_add(fx.name, keep_anyway) #--- fhdr ------------------------------------------------------- def fhdr(self, fn, comment=None): out = '' out += self.outcomment('Do not modify this file. Changes will be overwritten.', comment) out += self.outcomment('Generated automatically by the ASN.1 to Wireshark dissector compiler', comment) out += self.outcomment(os.path.basename(fn), comment) out += self.outcomment(' '.join(sys.argv), comment) out += '\n' # Make Windows path separator look like Unix path separator out = out.replace('\\', '/') # Change absolute paths and relative paths generated outside # source directory to paths relative to asn1/<proto> subdir. out = re.sub(r'(\s)[./]\S*(/tools/|/epan/)', r'\1../..\2', out) out = re.sub(r'(\s)[./]\S*/asn1/\S*?([\s/])', r'\1.\2', out) return out #--- dbg_print ------------------------------------------------------- def dbg_print(self): print("\n# Output files") print("\n".join(self.created_files_ord)) print("\n") #--- make_single_file ------------------------------------------------------- def make_single_file(self): if (not self.single_file): return in_nm = self.single_file + '.c' out_nm = os.path.join(self.outdir, self.output_fname('')) self.do_include(out_nm, in_nm) in_nm = self.single_file + '.h' if (os.path.exists(in_nm)): out_nm = os.path.join(self.outdir, self.output_fname('', ext='h')) self.do_include(out_nm, in_nm) if (not self.keep): for fn in self.created_files_ord: if not self.created_files[fn]: os.unlink(fn) #--- do_include ------------------------------------------------------- def do_include(self, out_nm, in_nm): def check_file(fn, fnlist): fnfull = os.path.normcase(os.path.abspath(fn)) if (fnfull in fnlist and os.path.exists(fnfull)): return os.path.normpath(fn) return None fin = open(in_nm, "r") fout = open(out_nm, "w") fout.write(self.fhdr(out_nm)) fout.write('/* Input file: ' + os.path.basename(in_nm) +' */\n') fout.write('\n') fout.write('#line %u "%s"\n' % (1, rel_dissector_path(in_nm))) include = re.compile(r'^\s*#\s*include\s+[<"](?P<fname>[^>"]+)[>"]', re.IGNORECASE) cont_linenum = 0; while (True): cont_linenum = cont_linenum + 1; line = fin.readline() if (line == ''): break ifile = None result = include.search(line) #if (result): print os.path.normcase(os.path.abspath(result.group('fname'))) if (result): ifile = check_file(os.path.join(os.path.split(in_nm)[0], result.group('fname')), self.created_files) if (not ifile): ifile = check_file(os.path.join(self.outdir, result.group('fname')), self.created_files) if (not ifile): ifile = check_file(result.group('fname'), self.created_files) if (ifile): fout.write('\n') fout.write('/*--- Included file: ' + ifile + ' ---*/\n') fout.write('#line %u "%s"\n' % (1, rel_dissector_path(ifile))) finc = open(ifile, "r") fout.write(finc.read()) fout.write('\n') fout.write('/*--- End of included file: ' + ifile + ' ---*/\n') fout.write('#line %u "%s"\n' % (cont_linenum+1, rel_dissector_path(in_nm)) ) finc.close() else: fout.write(line) fout.close() fin.close() #--- Node --------------------------------------------------------------------- class Node: def __init__(self,*args, **kw): if len (args) == 0: self.type = self.__class__.__name__ else: assert (len(args) == 1) self.type = args[0] self.__dict__.update (kw) def str_child (self, key, child, depth): indent = " " * (2 * depth) keystr = indent + key + ": " if key == 'type': # already processed in str_depth return "" if isinstance (child, Node): # ugh return keystr + "\n" + child.str_depth (depth+1) if isinstance(child, type ([])): l = [] for x in child: if isinstance (x, Node): l.append (x.str_depth (depth+1)) else: l.append (indent + " " + str(x) + "\n") return keystr + "[\n" + ''.join(l) + indent + "]\n" else: return keystr + str (child) + "\n" def str_depth (self, depth): # ugh indent = " " * (2 * depth) l = ["%s%s" % (indent, self.type)] l.append ("".join ([self.str_child (k_v[0], k_v[1], depth + 1) for k_v in list(self.__dict__.items ())])) return "\n".join (l) def __repr__(self): return "\n" + self.str_depth (0) def to_python (self, ctx): return self.str_depth (ctx.indent_lev) def eth_reg(self, ident, ectx): pass def fld_obj_repr(self, ectx): return "/* TO DO %s */" % (str(self)) #--- ValueAssignment ------------------------------------------------------------- class ValueAssignment (Node): def __init__(self,*args, **kw) : Node.__init__ (self,*args, **kw) def eth_reg(self, ident, ectx): if ectx.conform.omit_assignment('V', self.ident, ectx.Module()): return # Assignment to omit ectx.eth_reg_vassign(self) ectx.eth_reg_value(self.ident, self.typ, self.val) #--- ObjectAssignment ------------------------------------------------------------- class ObjectAssignment (Node): def __init__(self,*args, **kw) : Node.__init__ (self,*args, **kw) def __eq__(self, other): if self.cls != other.cls: return False if len(self.val) != len(other.val): return False for f in (list(self.val.keys())): if f not in other.val: return False if isinstance(self.val[f], Node) and isinstance(other.val[f], Node): if not self.val[f].fld_obj_eq(other.val[f]): return False else: if str(self.val[f]) != str(other.val[f]): return False return True def eth_reg(self, ident, ectx): def make_virtual_type(cls, field, prefix): if isinstance(self.val, str): return if field in self.val and not isinstance(self.val[field], Type_Ref): vnm = prefix + '-' + self.ident virtual_tr = Type_Ref(val = vnm) t = self.val[field] self.val[field] = virtual_tr ectx.eth_reg_assign(vnm, t, virt=True) ectx.eth_reg_type(vnm, t) t.eth_reg_sub(vnm, ectx) if field in self.val and ectx.conform.check_item('PDU', cls + '.' + field): ectx.eth_reg_field(self.val[field].val, self.val[field].val, impl=self.val[field].HasImplicitTag(ectx), pdu=ectx.conform.use_item('PDU', cls + '.' + field)) return # end of make_virtual_type() if ectx.conform.omit_assignment('V', self.ident, ectx.Module()): return # Assignment to omit self.module = ectx.Module() ectx.eth_reg_oassign(self) if (self.cls == 'TYPE-IDENTIFIER') or (self.cls == 'ABSTRACT-SYNTAX'): make_virtual_type(self.cls, '&Type', 'TYPE') if (self.cls == 'OPERATION'): make_virtual_type(self.cls, '&ArgumentType', 'ARG') make_virtual_type(self.cls, '&ResultType', 'RES') if (self.cls == 'ERROR'): make_virtual_type(self.cls, '&ParameterType', 'PAR') #--- Type --------------------------------------------------------------------- class Type (Node): def __init__(self,*args, **kw) : self.name = None self.constr = None self.tags = [] self.named_list = None Node.__init__ (self,*args, **kw) def IsNamed(self): if self.name is None : return False else: return True def HasConstraint(self): if self.constr is None : return False else : return True def HasSizeConstraint(self): return self.HasConstraint() and self.constr.IsSize() def HasValueConstraint(self): return self.HasConstraint() and self.constr.IsValue() def HasPermAlph(self): return self.HasConstraint() and self.constr.IsPermAlph() def HasContentsConstraint(self): return self.HasConstraint() and self.constr.IsContents() def HasOwnTag(self): return len(self.tags) > 0 def HasImplicitTag(self, ectx): return (self.HasOwnTag() and self.tags[0].IsImplicit(ectx)) def IndetermTag(self, ectx): return False def AddTag(self, tag): self.tags[0:0] = [tag] def GetTag(self, ectx): #print "GetTag(%s)\n" % self.name; if (self.HasOwnTag()): return self.tags[0].GetTag(ectx) else: return self.GetTTag(ectx) def GetTTag(self, ectx): print("#Unhandled GetTTag() in %s" % (self.type)) print(self.str_depth(1)) return ('BER_CLASS_unknown', 'TAG_unknown') def SetName(self, name): self.name = name def AddConstraint(self, constr): if not self.HasConstraint(): self.constr = constr else: self.constr = Constraint(type = 'Intersection', subtype = [self.constr, constr]) def eth_tname(self): return '#' + self.type + '_' + str(id(self)) def eth_ftype(self, ectx): return ('FT_NONE', 'BASE_NONE') def eth_strings(self): return 'NULL' def eth_omit_field(self): return False def eth_need_tree(self): return False def eth_has_vals(self): return False def eth_has_enum(self, tname, ectx): return self.eth_has_vals() and (ectx.eth_type[tname]['enum'] & EF_ENUM) def eth_need_pdu(self, ectx): return None def eth_named_bits(self): return None def eth_reg_sub(self, ident, ectx): pass def get_components(self, ectx): print("#Unhandled get_components() in %s" % (self.type)) print(self.str_depth(1)) return [] def sel_req(self, sel, ectx): print("#Selection '%s' required for non-CHOICE type %s" % (sel, self.type)) print(self.str_depth(1)) def fld_obj_eq(self, other): return isinstance(other, Type) and (self.eth_tname() == other.eth_tname()) def eth_reg(self, ident, ectx, tstrip=0, tagflag=False, selflag=False, idx='', parent=None): #print "eth_reg(): %s, ident=%s, tstrip=%d, tagflag=%s, selflag=%s, parent=%s" %(self.type, ident, tstrip, str(tagflag), str(selflag), str(parent)) #print " ", self if (ectx.NeedTags() and (len(self.tags) > tstrip)): tagged_type = self for i in range(len(self.tags)-1, tstrip-1, -1): tagged_type = TaggedType(val=tagged_type, tstrip=i) tagged_type.AddTag(self.tags[i]) if not tagflag: # 1st tagged level if self.IsNamed() and not selflag: tagged_type.SetName(self.name) tagged_type.eth_reg(ident, ectx, tstrip=1, tagflag=tagflag, idx=idx, parent=parent) return nm = '' if ident and self.IsNamed() and not tagflag and not selflag: nm = ident + '/' + self.name elif ident: nm = ident elif self.IsNamed(): nm = self.name if not ident and ectx.conform.omit_assignment('T', nm, ectx.Module()): return # Assignment to omit if not ident: # Assignment ectx.eth_reg_assign(nm, self) if self.type == 'Type_Ref' and not self.tr_need_own_fn(ectx): ectx.eth_reg_type(nm, self) virtual_tr = Type_Ref(val=ectx.conform.use_item('SET_TYPE', nm)) if (self.type == 'Type_Ref') or ectx.conform.check_item('SET_TYPE', nm): if ident and (ectx.conform.check_item('TYPE_RENAME', nm) or ectx.conform.get_fn_presence(nm) or selflag): if ectx.conform.check_item('SET_TYPE', nm): ectx.eth_reg_type(nm, virtual_tr) # dummy Type Reference else: ectx.eth_reg_type(nm, self) # new type trnm = nm elif ectx.conform.check_item('SET_TYPE', nm): trnm = ectx.conform.use_item('SET_TYPE', nm) elif (self.type == 'Type_Ref') and self.tr_need_own_fn(ectx): ectx.eth_reg_type(nm, self) # need own function, e.g. for constraints trnm = nm else: trnm = self.val else: ectx.eth_reg_type(nm, self) trnm = nm if ectx.conform.check_item('VIRTUAL_ASSGN', nm): vnm = ectx.conform.use_item('VIRTUAL_ASSGN', nm) ectx.eth_reg_assign(vnm, self, virt=True) ectx.eth_reg_type(vnm, self) self.eth_reg_sub(vnm, ectx) if parent and (ectx.type[parent]['val'].type == 'TaggedType'): ectx.type[parent]['val'].eth_set_val_name(parent, trnm, ectx) if ident and not tagflag and not self.eth_omit_field(): ectx.eth_reg_field(nm, trnm, idx=idx, parent=parent, impl=self.HasImplicitTag(ectx)) if ectx.conform.check_item('SET_TYPE', nm): virtual_tr.eth_reg_sub(nm, ectx) else: self.eth_reg_sub(nm, ectx) def eth_get_size_constr(self, ectx): (minv, maxv, ext) = ('MIN', 'MAX', False) if self.HasSizeConstraint(): if self.constr.IsSize(): (minv, maxv, ext) = self.constr.GetSize(ectx) if (self.constr.type == 'Intersection'): if self.constr.subtype[0].IsSize(): (minv, maxv, ext) = self.constr.subtype[0].GetSize(ectx) elif self.constr.subtype[1].IsSize(): (minv, maxv, ext) = self.constr.subtype[1].GetSize(ectx) if minv == 'MIN': minv = 'NO_BOUND' if maxv == 'MAX': maxv = 'NO_BOUND' if (ext): ext = 'TRUE' else: ext = 'FALSE' return (minv, maxv, ext) def eth_get_value_constr(self, ectx): (minv, maxv, ext) = ('MIN', 'MAX', False) if self.HasValueConstraint(): (minv, maxv, ext) = self.constr.GetValue(ectx) if minv == 'MIN': minv = 'NO_BOUND' if maxv == 'MAX': maxv = 'NO_BOUND' if str(minv).isdigit(): minv += 'U' elif (str(minv)[0] == "-") and str(minv)[1:].isdigit(): if (int(minv) == -(2**31)): minv = "G_MININT32" elif (int(minv) < -(2**31)): minv = "G_GINT64_CONSTANT(%s)" % (str(minv)) if str(maxv).isdigit(): if (int(maxv) >= 2**32): maxv = "G_GUINT64_CONSTANT(%s)" % (str(maxv)) else: maxv += 'U' if (ext): ext = 'TRUE' else: ext = 'FALSE' return (minv, maxv, ext) def eth_get_alphabet_constr(self, ectx): (alph, alphlen) = ('NULL', '0') if self.HasPermAlph(): alph = self.constr.GetPermAlph(ectx) if not alph: alph = 'NULL' if (alph != 'NULL'): if (((alph[0] + alph[-1]) == '""') and (not alph.count('"', 1, -1))): alphlen = str(len(alph) - 2) else: alphlen = 'strlen(%s)' % (alph) return (alph, alphlen) def eth_type_vals(self, tname, ectx): if self.eth_has_vals(): print("#Unhandled eth_type_vals('%s') in %s" % (tname, self.type)) print(self.str_depth(1)) return '' def eth_type_enum(self, tname, ectx): if self.eth_has_enum(tname, ectx): print("#Unhandled eth_type_enum('%s') in %s" % (tname, self.type)) print(self.str_depth(1)) return '' def eth_type_default_table(self, ectx, tname): return '' def eth_type_default_body(self, ectx): print("#Unhandled eth_type_default_body() in %s" % (self.type)) print(self.str_depth(1)) return '' def eth_type_default_pars(self, ectx, tname): pars = { 'TNAME' : tname, 'ER' : ectx.encp(), 'FN_VARIANT' : '', 'TREE' : 'tree', 'TVB' : 'tvb', 'OFFSET' : 'offset', 'ACTX' : 'actx', 'HF_INDEX' : 'hf_index', 'VAL_PTR' : 'NULL', 'IMPLICIT_TAG' : 'implicit_tag', } if (ectx.eth_type[tname]['tree']): pars['ETT_INDEX'] = ectx.eth_type[tname]['tree'] if (ectx.merge_modules): pars['PROTOP'] = '' else: pars['PROTOP'] = ectx.eth_type[tname]['proto'] + '_' return pars def eth_type_fn(self, proto, tname, ectx): body = self.eth_type_default_body(ectx, tname) pars = self.eth_type_default_pars(ectx, tname) if ectx.conform.check_item('FN_PARS', tname): pars.update(ectx.conform.use_item('FN_PARS', tname)) elif ectx.conform.check_item('FN_PARS', ectx.eth_type[tname]['ref'][0]): pars.update(ectx.conform.use_item('FN_PARS', ectx.eth_type[tname]['ref'][0])) pars['DEFAULT_BODY'] = body for i in range(4): for k in list(pars.keys()): try: pars[k] = pars[k] % pars except (ValueError,TypeError): raise sys.exc_info()[0]("%s\n%s" % (str(pars), sys.exc_info()[1])) out = '\n' out += self.eth_type_default_table(ectx, tname) % pars out += ectx.eth_type_fn_hdr(tname) out += ectx.eth_type_fn_body(tname, body, pars=pars) out += ectx.eth_type_fn_ftr(tname) return out #--- Value -------------------------------------------------------------------- class Value (Node): def __init__(self,*args, **kw) : self.name = None Node.__init__ (self,*args, **kw) def SetName(self, name) : self.name = name def to_str(self, ectx): return str(self.val) def get_dep(self): return None def fld_obj_repr(self, ectx): return self.to_str(ectx) #--- Value_Ref ----------------------------------------------------------------- class Value_Ref (Value): def to_str(self, ectx): return asn2c(self.val) #--- ObjectClass --------------------------------------------------------------------- class ObjectClass (Node): def __init__(self,*args, **kw) : self.name = None Node.__init__ (self,*args, **kw) def SetName(self, name): self.name = name add_class_ident(self.name) def eth_reg(self, ident, ectx): if ectx.conform.omit_assignment('C', self.name, ectx.Module()): return # Assignment to omit ectx.eth_reg_objectclass(self.name, self) #--- Class_Ref ----------------------------------------------------------------- class Class_Ref (ObjectClass): pass #--- ObjectClassDefn --------------------------------------------------------------------- class ObjectClassDefn (ObjectClass): def reg_types(self): for fld in self.fields: repr = fld.fld_repr() set_type_to_class(self.name, repr[0], repr[1:]) #--- Tag --------------------------------------------------------------- class Tag (Node): def to_python (self, ctx): return 'asn1.TYPE(%s,%s)' % (mk_tag_str (ctx, self.tag.cls, self.tag_typ, self.tag.num), self.typ.to_python (ctx)) def IsImplicit(self, ectx): return ((self.mode == 'IMPLICIT') or ((self.mode == 'default') and (ectx.tag_def != 'EXPLICIT'))) def GetTag(self, ectx): tc = '' if (self.cls == 'UNIVERSAL'): tc = 'BER_CLASS_UNI' elif (self.cls == 'APPLICATION'): tc = 'BER_CLASS_APP' elif (self.cls == 'CONTEXT'): tc = 'BER_CLASS_CON' elif (self.cls == 'PRIVATE'): tc = 'BER_CLASS_PRI' return (tc, self.num) def eth_tname(self): n = '' if (self.cls == 'UNIVERSAL'): n = 'U' elif (self.cls == 'APPLICATION'): n = 'A' elif (self.cls == 'CONTEXT'): n = 'C' elif (self.cls == 'PRIVATE'): n = 'P' return n + str(self.num) #--- Constraint --------------------------------------------------------------- constr_cnt = 0 class Constraint (Node): def to_python (self, ctx): print("Ignoring constraint:", self.type) return self.subtype.typ.to_python (ctx) def __str__ (self): return "Constraint: type=%s, subtype=%s" % (self.type, self.subtype) def eth_tname(self): return '#' + self.type + '_' + str(id(self)) def IsSize(self): return (self.type == 'Size' and self.subtype.IsValue()) \ or (self.type == 'Intersection' and (self.subtype[0].IsSize() or self.subtype[1].IsSize())) \ def GetSize(self, ectx): (minv, maxv, ext) = ('MIN', 'MAX', False) if self.IsSize(): if self.type == 'Size': (minv, maxv, ext) = self.subtype.GetValue(ectx) elif self.type == 'Intersection': if self.subtype[0].IsSize() and not self.subtype[1].IsSize(): (minv, maxv, ext) = self.subtype[0].GetSize(ectx) elif not self.subtype[0].IsSize() and self.subtype[1].IsSize(): (minv, maxv, ext) = self.subtype[1].GetSize(ectx) return (minv, maxv, ext) def IsValue(self): return self.type == 'SingleValue' \ or self.type == 'ValueRange' \ or (self.type == 'Intersection' and (self.subtype[0].IsValue() or self.subtype[1].IsValue())) \ or (self.type == 'Union' and (self.subtype[0].IsValue() and self.subtype[1].IsValue())) def GetValue(self, ectx): (minv, maxv, ext) = ('MIN', 'MAX', False) if self.IsValue(): if self.type == 'SingleValue': minv = ectx.value_get_eth(self.subtype) maxv = ectx.value_get_eth(self.subtype) ext = hasattr(self, 'ext') and self.ext elif self.type == 'ValueRange': minv = ectx.value_get_eth(self.subtype[0]) maxv = ectx.value_get_eth(self.subtype[1]) ext = hasattr(self, 'ext') and self.ext elif self.type == 'Intersection': if self.subtype[0].IsValue() and not self.subtype[1].IsValue(): (minv, maxv, ext) = self.subtype[0].GetValue(ectx) elif not self.subtype[0].IsValue() and self.subtype[1].IsValue(): (minv, maxv, ext) = self.subtype[1].GetValue(ectx) elif self.subtype[0].IsValue() and self.subtype[1].IsValue(): v0 = self.subtype[0].GetValue(ectx) v1 = self.subtype[1].GetValue(ectx) (minv, maxv, ext) = (ectx.value_max(v0[0],v1[0]), ectx.value_min(v0[1],v1[1]), v0[2] and v1[2]) elif self.type == 'Union': if self.subtype[0].IsValue() and self.subtype[1].IsValue(): v0 = self.subtype[0].GetValue(ectx) v1 = self.subtype[1].GetValue(ectx) (minv, maxv, ext) = (ectx.value_min(v0[0],v1[0]), ectx.value_max(v0[1],v1[1]), v0[2] or v1[2]) return (minv, maxv, ext) def IsAlphabet(self): return self.type == 'SingleValue' \ or self.type == 'ValueRange' \ or (self.type == 'Intersection' and (self.subtype[0].IsAlphabet() or self.subtype[1].IsAlphabet())) \ or (self.type == 'Union' and (self.subtype[0].IsAlphabet() and self.subtype[1].IsAlphabet())) def GetAlphabet(self, ectx): alph = None if self.IsAlphabet(): if self.type == 'SingleValue': alph = ectx.value_get_eth(self.subtype) elif self.type == 'ValueRange': if ((len(self.subtype[0]) == 3) and ((self.subtype[0][0] + self.subtype[0][-1]) == '""') \ and (len(self.subtype[1]) == 3) and ((self.subtype[1][0] + self.subtype[1][-1]) == '""')): alph = '"' for c in range(ord(self.subtype[0][1]), ord(self.subtype[1][1]) + 1): alph += chr(c) alph += '"' elif self.type == 'Union': if self.subtype[0].IsAlphabet() and self.subtype[1].IsAlphabet(): a0 = self.subtype[0].GetAlphabet(ectx) a1 = self.subtype[1].GetAlphabet(ectx) if (((a0[0] + a0[-1]) == '""') and not a0.count('"', 1, -1) \ and ((a1[0] + a1[-1]) == '""') and not a1.count('"', 1, -1)): alph = '"' + a0[1:-1] + a1[1:-1] + '"' else: alph = a0 + ' ' + a1 return alph def IsPermAlph(self): return self.type == 'From' and self.subtype.IsAlphabet() \ or (self.type == 'Intersection' and (self.subtype[0].IsPermAlph() or self.subtype[1].IsPermAlph())) \ def GetPermAlph(self, ectx): alph = None if self.IsPermAlph(): if self.type == 'From': alph = self.subtype.GetAlphabet(ectx) elif self.type == 'Intersection': if self.subtype[0].IsPermAlph() and not self.subtype[1].IsPermAlph(): alph = self.subtype[0].GetPermAlph(ectx) elif not self.subtype[0].IsPermAlph() and self.subtype[1].IsPermAlph(): alph = self.subtype[1].GetPermAlph(ectx) return alph def IsContents(self): return self.type == 'Contents' \ or (self.type == 'Intersection' and (self.subtype[0].IsContents() or self.subtype[1].IsContents())) \ def GetContents(self, ectx): contents = None if self.IsContents(): if self.type == 'Contents': if self.subtype.type == 'Type_Ref': contents = self.subtype.val elif self.type == 'Intersection': if self.subtype[0].IsContents() and not self.subtype[1].IsContents(): contents = self.subtype[0].GetContents(ectx) elif not self.subtype[0].IsContents() and self.subtype[1].IsContents(): contents = self.subtype[1].GetContents(ectx) return contents def IsNegativ(self): def is_neg(sval): return isinstance(sval, str) and (sval[0] == '-') if self.type == 'SingleValue': return is_neg(self.subtype) elif self.type == 'ValueRange': if self.subtype[0] == 'MIN': return True return is_neg(self.subtype[0]) return False def eth_constrname(self): def int2str(val): if isinstance(val, Value_Ref): return asn2c(val.val) try: if (int(val) < 0): return 'M' + str(-int(val)) else: return str(int(val)) except (ValueError, TypeError): return asn2c(str(val)) ext = '' if hasattr(self, 'ext') and self.ext: ext = '_' if self.type == 'SingleValue': return int2str(self.subtype) + ext elif self.type == 'ValueRange': return int2str(self.subtype[0]) + '_' + int2str(self.subtype[1]) + ext elif self.type == 'Size': return 'SIZE_' + self.subtype.eth_constrname() + ext else: if (not hasattr(self, 'constr_num')): global constr_cnt constr_cnt += 1 self.constr_num = constr_cnt return 'CONSTR%03d%s' % (self.constr_num, ext) def Needs64b(self, ectx): (minv, maxv, ext) = self.GetValue(ectx) if (str(minv).isdigit() or ((str(minv)[0] == "-") and str(minv)[1:].isdigit())) \ and str(maxv).isdigit() and (abs(int(maxv) - int(minv)) >= 2**32): return True return False class Module (Node): def to_python (self, ctx): ctx.tag_def = self.tag_def.dfl_tag return """#%s %s""" % (self.ident, self.body.to_python (ctx)) def get_name(self): return self.ident.val def get_proto(self, ectx): if (ectx.proto): prot = ectx.proto else: prot = ectx.conform.use_item('MODULE', self.get_name(), val_dflt=self.get_name()) return prot def to_eth(self, ectx): ectx.tags_def = 'EXPLICIT' # default = explicit ectx.proto = self.get_proto(ectx) ectx.tag_def = self.tag_def.dfl_tag ectx.eth_reg_module(self) self.body.to_eth(ectx) class Module_Body (Node): def to_python (self, ctx): # XXX handle exports, imports. l = [x.to_python (ctx) for x in self.assign_list] l = [a for a in l if a != ''] return "\n".join (l) def to_eth(self, ectx): # Exports ectx.eth_exports(self.exports) # Imports for i in self.imports: mod = i.module.val proto = ectx.conform.use_item('MODULE', mod, val_dflt=mod) ectx.eth_module_dep_add(ectx.Module(), mod) for s in i.symbol_list: if isinstance(s, Type_Ref): ectx.eth_import_type(s.val, mod, proto) elif isinstance(s, Value_Ref): ectx.eth_import_value(s.val, mod, proto) elif isinstance(s, Class_Ref): ectx.eth_import_class(s.val, mod, proto) else: msg = 'Unknown kind of imported symbol %s from %s' % (str(s), mod) warnings.warn_explicit(msg, UserWarning, '', 0) # AssignmentList for a in self.assign_list: a.eth_reg('', ectx) class Default_Tags (Node): def to_python (self, ctx): # not to be used directly assert (0) # XXX should just calculate dependencies as we go along. def calc_dependencies (node, dict, trace = 0): if not hasattr (node, '__dict__'): if trace: print("#returning, node=", node) return if isinstance (node, Type_Ref): dict [node.val] = 1 if trace: print("#Setting", node.val) return for (a, val) in list(node.__dict__.items ()): if trace: print("# Testing node ", node, "attr", a, " val", val) if a[0] == '_': continue elif isinstance (val, Node): calc_dependencies (val, dict, trace) elif isinstance (val, type ([])): for v in val: calc_dependencies (v, dict, trace) class Type_Assign (Node): def __init__ (self, *args, **kw): Node.__init__ (self, *args, **kw) if isinstance (self.val, Tag): # XXX replace with generalized get_typ_ignoring_tag (no-op for Node, override in Tag) to_test = self.val.typ else: to_test = self.val if isinstance (to_test, SequenceType): to_test.sequence_name = self.name.name def to_python (self, ctx): dep_dict = {} calc_dependencies (self.val, dep_dict, 0) depend_list = list(dep_dict.keys ()) return ctx.register_assignment (self.name.name, self.val.to_python (ctx), depend_list) class PyQuote (Node): def to_python (self, ctx): return ctx.register_pyquote (self.val) #--- Type_Ref ----------------------------------------------------------------- class Type_Ref (Type): def to_python (self, ctx): return self.val def eth_reg_sub(self, ident, ectx): ectx.eth_dep_add(ident, self.val) def eth_tname(self): if self.HasSizeConstraint(): return asn2c(self.val) + '_' + self.constr.eth_constrname()<|fim▁hole|> else: return asn2c(self.val) def tr_need_own_fn(self, ectx): return ectx.Per() and self.HasSizeConstraint() def fld_obj_repr(self, ectx): return self.val def get_components(self, ectx): if self.val not in ectx.type or ectx.type[self.val]['import']: msg = "Can not get COMPONENTS OF %s which is imported type" % (self.val) warnings.warn_explicit(msg, UserWarning, '', 0) return [] else: return ectx.type[self.val]['val'].get_components(ectx) def GetTTag(self, ectx): #print "GetTTag(%s)\n" % self.val; if (ectx.type[self.val]['import']): if 'ttag' not in ectx.type[self.val]: ttag = ectx.get_ttag_from_all(self.val, ectx.type[self.val]['import']) if not ttag and not ectx.conform.check_item('IMPORT_TAG', self.val): msg = 'Missing tag information for imported type %s from %s (%s)' % (self.val, ectx.type[self.val]['import'], ectx.type[self.val]['proto']) warnings.warn_explicit(msg, UserWarning, '', 0) ttag = ('-1/*imported*/', '-1/*imported*/') ectx.type[self.val]['ttag'] = ectx.conform.use_item('IMPORT_TAG', self.val, val_dflt=ttag) return ectx.type[self.val]['ttag'] else: return ectx.type[self.val]['val'].GetTag(ectx) def IndetermTag(self, ectx): if (ectx.type[self.val]['import']): return False else: return ectx.type[self.val]['val'].IndetermTag(ectx) def eth_type_default_pars(self, ectx, tname): if tname: pars = Type.eth_type_default_pars(self, ectx, tname) else: pars = {} t = ectx.type[self.val]['ethname'] pars['TYPE_REF_PROTO'] = ectx.eth_type[t]['proto'] pars['TYPE_REF_TNAME'] = t pars['TYPE_REF_FN'] = 'dissect_%(TYPE_REF_PROTO)s_%(TYPE_REF_TNAME)s' if self.HasSizeConstraint(): (pars['MIN_VAL'], pars['MAX_VAL'], pars['EXT']) = self.eth_get_size_constr(ectx) return pars def eth_type_default_body(self, ectx, tname): if (ectx.Ber()): body = ectx.eth_fn_call('%(TYPE_REF_FN)s', ret='offset', par=(('%(IMPLICIT_TAG)s', '%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'),)) elif (ectx.Per()): if self.HasSizeConstraint(): body = ectx.eth_fn_call('dissect_%(ER)s_size_constrained_type', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s', '%(TYPE_REF_FN)s',), ('"%(TYPE_REF_TNAME)s"', '%(MIN_VAL)s', '%(MAX_VAL)s', '%(EXT)s',),)) else: body = ectx.eth_fn_call('%(TYPE_REF_FN)s', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'),)) else: body = '#error Can not decode %s' % (tname) return body #--- SelectionType ------------------------------------------------------------ class SelectionType (Type): def to_python (self, ctx): return self.val def sel_of_typeref(self): return self.typ.type == 'Type_Ref' def eth_reg_sub(self, ident, ectx): if not self.sel_of_typeref(): self.seltype = '' return self.seltype = ectx.eth_sel_req(self.typ.val, self.sel) ectx.eth_dep_add(ident, self.seltype) def eth_ftype(self, ectx): (ftype, display) = ('FT_NONE', 'BASE_NONE') if self.sel_of_typeref() and not ectx.type[self.seltype]['import']: (ftype, display) = ectx.type[self.typ.val]['val'].eth_ftype_sel(self.sel, ectx) return (ftype, display) def GetTTag(self, ectx): #print "GetTTag(%s)\n" % self.seltype; if (ectx.type[self.seltype]['import']): if 'ttag' not in ectx.type[self.seltype]: if not ectx.conform.check_item('IMPORT_TAG', self.seltype): msg = 'Missing tag information for imported type %s from %s (%s)' % (self.seltype, ectx.type[self.seltype]['import'], ectx.type[self.seltype]['proto']) warnings.warn_explicit(msg, UserWarning, '', 0) ectx.type[self.seltype]['ttag'] = ectx.conform.use_item('IMPORT_TAG', self.seltype, val_dflt=('-1 /*imported*/', '-1 /*imported*/')) return ectx.type[self.seltype]['ttag'] else: return ectx.type[self.typ.val]['val'].GetTTagSel(self.sel, ectx) def eth_type_default_pars(self, ectx, tname): pars = Type.eth_type_default_pars(self, ectx, tname) if self.sel_of_typeref(): t = ectx.type[self.seltype]['ethname'] pars['TYPE_REF_PROTO'] = ectx.eth_type[t]['proto'] pars['TYPE_REF_TNAME'] = t pars['TYPE_REF_FN'] = 'dissect_%(TYPE_REF_PROTO)s_%(TYPE_REF_TNAME)s' return pars def eth_type_default_body(self, ectx, tname): if not self.sel_of_typeref(): body = '#error Can not decode %s' % (tname) elif (ectx.Ber()): body = ectx.eth_fn_call('%(TYPE_REF_FN)s', ret='offset', par=(('%(IMPLICIT_TAG)s', '%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'),)) elif (ectx.Per()): body = ectx.eth_fn_call('%(TYPE_REF_FN)s', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'),)) else: body = '#error Can not decode %s' % (tname) return body #--- TaggedType ----------------------------------------------------------------- class TaggedType (Type): def eth_tname(self): tn = '' for i in range(self.tstrip, len(self.val.tags)): tn += self.val.tags[i].eth_tname() tn += '_' tn += self.val.eth_tname() return tn def eth_set_val_name(self, ident, val_name, ectx): #print "TaggedType::eth_set_val_name(): ident=%s, val_name=%s" % (ident, val_name) self.val_name = val_name ectx.eth_dep_add(ident, self.val_name) def eth_reg_sub(self, ident, ectx): self.val_name = ident + '/' + UNTAG_TYPE_NAME self.val.eth_reg(self.val_name, ectx, tstrip=self.tstrip+1, tagflag=True, parent=ident) def GetTTag(self, ectx): #print "GetTTag(%s)\n" % self.seltype; return self.GetTag(ectx) def eth_ftype(self, ectx): return self.val.eth_ftype(ectx) def eth_type_default_pars(self, ectx, tname): pars = Type.eth_type_default_pars(self, ectx, tname) t = ectx.type[self.val_name]['ethname'] pars['TYPE_REF_PROTO'] = ectx.eth_type[t]['proto'] pars['TYPE_REF_TNAME'] = t pars['TYPE_REF_FN'] = 'dissect_%(TYPE_REF_PROTO)s_%(TYPE_REF_TNAME)s' (pars['TAG_CLS'], pars['TAG_TAG']) = self.GetTag(ectx) if self.HasImplicitTag(ectx): pars['TAG_IMPL'] = 'TRUE' else: pars['TAG_IMPL'] = 'FALSE' return pars def eth_type_default_body(self, ectx, tname): if (ectx.Ber()): body = ectx.eth_fn_call('dissect_%(ER)s_tagged_type', ret='offset', par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s'), ('%(HF_INDEX)s', '%(TAG_CLS)s', '%(TAG_TAG)s', '%(TAG_IMPL)s', '%(TYPE_REF_FN)s',),)) else: body = '#error Can not decode %s' % (tname) return body #--- SqType ----------------------------------------------------------- class SqType (Type): def out_item(self, f, val, optional, ext, ectx): if (val.eth_omit_field()): t = ectx.type[val.ident]['ethname'] fullname = ectx.dummy_eag_field else: ef = ectx.field[f]['ethname'] t = ectx.eth_hf[ef]['ethtype'] fullname = ectx.eth_hf[ef]['fullname'] if (ectx.Ber()): #print "optional=%s, e.val.HasOwnTag()=%s, e.val.IndetermTag()=%s" % (str(e.optional), str(e.val.HasOwnTag()), str(e.val.IndetermTag(ectx))) #print val.str_depth(1) opt = '' if (optional): opt = 'BER_FLAGS_OPTIONAL' if (not val.HasOwnTag()): if (opt): opt += '|' opt += 'BER_FLAGS_NOOWNTAG' elif (val.HasImplicitTag(ectx)): if (opt): opt += '|' opt += 'BER_FLAGS_IMPLTAG' if (val.IndetermTag(ectx)): if (opt): opt += '|' opt += 'BER_FLAGS_NOTCHKTAG' if (not opt): opt = '0' else: if optional: opt = 'ASN1_OPTIONAL' else: opt = 'ASN1_NOT_OPTIONAL' if (ectx.Ber()): (tc, tn) = val.GetTag(ectx) out = ' { %-24s, %-13s, %s, %s, dissect_%s_%s },\n' \ % ('&'+fullname, tc, tn, opt, ectx.eth_type[t]['proto'], t) elif (ectx.Per()): out = ' { %-24s, %-23s, %-17s, dissect_%s_%s },\n' \ % ('&'+fullname, ext, opt, ectx.eth_type[t]['proto'], t) else: out = '' return out #--- SeqType ----------------------------------------------------------- class SeqType (SqType): def all_components(self): lst = self.elt_list[:] if hasattr(self, 'ext_list'): lst.extend(self.ext_list) if hasattr(self, 'elt_list2'): lst.extend(self.elt_list2) return lst def need_components(self): lst = self.all_components() for e in (lst): if e.type == 'components_of': return True return False def expand_components(self, ectx): while self.need_components(): for i in range(len(self.elt_list)): if self.elt_list[i].type == 'components_of': comp = self.elt_list[i].typ.get_components(ectx) self.elt_list[i:i+1] = comp break if hasattr(self, 'ext_list'): for i in range(len(self.ext_list)): if self.ext_list[i].type == 'components_of': comp = self.ext_list[i].typ.get_components(ectx) self.ext_list[i:i+1] = comp break if hasattr(self, 'elt_list2'): for i in range(len(self.elt_list2)): if self.elt_list2[i].type == 'components_of': comp = self.elt_list2[i].typ.get_components(ectx) self.elt_list2[i:i+1] = comp break def get_components(self, ectx): lst = self.elt_list[:] if hasattr(self, 'elt_list2'): lst.extend(self.elt_list2) return lst def eth_reg_sub(self, ident, ectx, components_available=False): # check if autotag is required autotag = False if (ectx.NeedTags() and (ectx.tag_def == 'AUTOMATIC')): autotag = True lst = self.all_components() for e in (self.elt_list): if e.val.HasOwnTag(): autotag = False; break; # expand COMPONENTS OF if self.need_components(): if components_available: self.expand_components(ectx) else: ectx.eth_comp_req(ident) return # extension addition groups if hasattr(self, 'ext_list'): if (ectx.Per()): # add names eag_num = 1 for e in (self.ext_list): if isinstance(e.val, ExtensionAdditionGroup): e.val.parent_ident = ident e.val.parent_tname = ectx.type[ident]['tname'] if (e.val.ver): e.val.SetName("eag_v%s" % (e.val.ver)) else: e.val.SetName("eag_%d" % (eag_num)) eag_num += 1; else: # expand new_ext_list = [] for e in (self.ext_list): if isinstance(e.val, ExtensionAdditionGroup): new_ext_list.extend(e.val.elt_list) else: new_ext_list.append(e) self.ext_list = new_ext_list # do autotag if autotag: atag = 0 for e in (self.elt_list): e.val.AddTag(Tag(cls = 'CONTEXT', num = str(atag), mode = 'IMPLICIT')) atag += 1 if autotag and hasattr(self, 'elt_list2'): for e in (self.elt_list2): e.val.AddTag(Tag(cls = 'CONTEXT', num = str(atag), mode = 'IMPLICIT')) atag += 1 if autotag and hasattr(self, 'ext_list'): for e in (self.ext_list): e.val.AddTag(Tag(cls = 'CONTEXT', num = str(atag), mode = 'IMPLICIT')) atag += 1 # register components for e in (self.elt_list): e.val.eth_reg(ident, ectx, tstrip=1, parent=ident) if hasattr(self, 'ext_list'): for e in (self.ext_list): e.val.eth_reg(ident, ectx, tstrip=1, parent=ident) if hasattr(self, 'elt_list2'): for e in (self.elt_list2): e.val.eth_reg(ident, ectx, tstrip=1, parent=ident) def eth_type_default_table(self, ectx, tname): #print "eth_type_default_table(tname='%s')" % (tname) fname = ectx.eth_type[tname]['ref'][0] table = "static const %(ER)s_sequence_t %(TABLE)s[] = {\n" if hasattr(self, 'ext_list'): ext = 'ASN1_EXTENSION_ROOT' else: ext = 'ASN1_NO_EXTENSIONS' empty_ext_flag = '0' if (len(self.elt_list)==0) and hasattr(self, 'ext_list') and (len(self.ext_list)==0) and (not hasattr(self, 'elt_list2') or (len(self.elt_list2)==0)): empty_ext_flag = ext for e in (self.elt_list): f = fname + '/' + e.val.name table += self.out_item(f, e.val, e.optional, ext, ectx) if hasattr(self, 'ext_list'): for e in (self.ext_list): f = fname + '/' + e.val.name table += self.out_item(f, e.val, e.optional, 'ASN1_NOT_EXTENSION_ROOT', ectx) if hasattr(self, 'elt_list2'): for e in (self.elt_list2): f = fname + '/' + e.val.name table += self.out_item(f, e.val, e.optional, ext, ectx) if (ectx.Ber()): table += " { NULL, 0, 0, 0, NULL }\n};\n" else: table += " { NULL, %s, 0, NULL }\n};\n" % (empty_ext_flag) return table #--- SeqOfType ----------------------------------------------------------- class SeqOfType (SqType): def eth_type_default_table(self, ectx, tname): #print "eth_type_default_table(tname='%s')" % (tname) fname = ectx.eth_type[tname]['ref'][0] if self.val.IsNamed (): f = fname + '/' + self.val.name else: f = fname + '/' + ITEM_FIELD_NAME table = "static const %(ER)s_sequence_t %(TABLE)s[1] = {\n" table += self.out_item(f, self.val, False, 'ASN1_NO_EXTENSIONS', ectx) table += "};\n" return table #--- SequenceOfType ----------------------------------------------------------- class SequenceOfType (SeqOfType): def to_python (self, ctx): # name, tag (None for no tag, EXPLICIT() for explicit), typ) # or '' + (1,) for optional sizestr = '' if self.size_constr != None: print("#Ignoring size constraint:", self.size_constr.subtype) return "%sasn1.SEQUENCE_OF (%s%s)" % (ctx.spaces (), self.val.to_python (ctx), sizestr) def eth_reg_sub(self, ident, ectx): itmnm = ident if not self.val.IsNamed (): itmnm += '/' + ITEM_FIELD_NAME self.val.eth_reg(itmnm, ectx, tstrip=1, idx='[##]', parent=ident) def eth_tname(self): if self.val.type != 'Type_Ref': return '#' + self.type + '_' + str(id(self)) if not self.HasConstraint(): return "SEQUENCE_OF_" + self.val.eth_tname() elif self.constr.IsSize(): return 'SEQUENCE_' + self.constr.eth_constrname() + '_OF_' + self.val.eth_tname() else: return '#' + self.type + '_' + str(id(self)) def eth_ftype(self, ectx): return ('FT_UINT32', 'BASE_DEC') def eth_need_tree(self): return True def GetTTag(self, ectx): return ('BER_CLASS_UNI', 'BER_UNI_TAG_SEQUENCE') def eth_type_default_pars(self, ectx, tname): pars = Type.eth_type_default_pars(self, ectx, tname) (pars['MIN_VAL'], pars['MAX_VAL'], pars['EXT']) = self.eth_get_size_constr(ectx) pars['TABLE'] = '%(PROTOP)s%(TNAME)s_sequence_of' return pars def eth_type_default_body(self, ectx, tname): if (ectx.Ber()): if (ectx.constraints_check and self.HasSizeConstraint()): body = ectx.eth_fn_call('dissect_%(ER)s_constrained_sequence_of', ret='offset', par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s'), ('%(MIN_VAL)s', '%(MAX_VAL)s', '%(TABLE)s', '%(HF_INDEX)s', '%(ETT_INDEX)s',),)) else: body = ectx.eth_fn_call('dissect_%(ER)s_sequence_of', ret='offset', par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s'), ('%(TABLE)s', '%(HF_INDEX)s', '%(ETT_INDEX)s',),)) elif (ectx.Per() and not self.HasConstraint()): body = ectx.eth_fn_call('dissect_%(ER)s_sequence_of', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'), ('%(ETT_INDEX)s', '%(TABLE)s',),)) elif (ectx.Per() and self.constr.type == 'Size'): body = ectx.eth_fn_call('dissect_%(ER)s_constrained_sequence_of', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'), ('%(ETT_INDEX)s', '%(TABLE)s',), ('%(MIN_VAL)s', '%(MAX_VAL)s','%(EXT)s'),)) else: body = '#error Can not decode %s' % (tname) return body #--- SetOfType ---------------------------------------------------------------- class SetOfType (SeqOfType): def eth_reg_sub(self, ident, ectx): itmnm = ident if not self.val.IsNamed (): itmnm += '/' + ITEM_FIELD_NAME self.val.eth_reg(itmnm, ectx, tstrip=1, idx='(##)', parent=ident) def eth_tname(self): if self.val.type != 'Type_Ref': return '#' + self.type + '_' + str(id(self)) if not self.HasConstraint(): return "SET_OF_" + self.val.eth_tname() elif self.constr.IsSize(): return 'SET_' + self.constr.eth_constrname() + '_OF_' + self.val.eth_tname() else: return '#' + self.type + '_' + str(id(self)) def eth_ftype(self, ectx): return ('FT_UINT32', 'BASE_DEC') def eth_need_tree(self): return True def GetTTag(self, ectx): return ('BER_CLASS_UNI', 'BER_UNI_TAG_SET') def eth_type_default_pars(self, ectx, tname): pars = Type.eth_type_default_pars(self, ectx, tname) (pars['MIN_VAL'], pars['MAX_VAL'], pars['EXT']) = self.eth_get_size_constr(ectx) pars['TABLE'] = '%(PROTOP)s%(TNAME)s_set_of' return pars def eth_type_default_body(self, ectx, tname): if (ectx.Ber()): if (ectx.constraints_check and self.HasSizeConstraint()): body = ectx.eth_fn_call('dissect_%(ER)s_constrained_set_of', ret='offset', par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s'), ('%(MIN_VAL)s', '%(MAX_VAL)s', '%(TABLE)s', '%(HF_INDEX)s', '%(ETT_INDEX)s',),)) else: body = ectx.eth_fn_call('dissect_%(ER)s_set_of', ret='offset', par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s'), ('%(TABLE)s', '%(HF_INDEX)s', '%(ETT_INDEX)s',),)) elif (ectx.Per() and not self.HasConstraint()): body = ectx.eth_fn_call('dissect_%(ER)s_set_of', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'), ('%(ETT_INDEX)s', '%(TABLE)s',),)) elif (ectx.Per() and self.constr.type == 'Size'): body = ectx.eth_fn_call('dissect_%(ER)s_constrained_set_of', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'), ('%(ETT_INDEX)s', '%(TABLE)s',), ('%(MIN_VAL)s', '%(MAX_VAL)s','%(EXT)s',),)) else: body = '#error Can not decode %s' % (tname) return body def mk_tag_str (ctx, cls, typ, num): # XXX should do conversion to int earlier! val = int (num) typ = typ.upper() if typ == 'DEFAULT': typ = ctx.tags_def return 'asn1.%s(%d,cls=asn1.%s_FLAG)' % (typ, val, cls) # XXX still ned #--- SequenceType ------------------------------------------------------------- class SequenceType (SeqType): def to_python (self, ctx): # name, tag (None for no tag, EXPLICIT() for explicit), typ) # or '' + (1,) for optional # XXX should also collect names for SEQUENCE inside SEQUENCE or # CHOICE or SEQUENCE_OF (where should the SEQUENCE_OF name come # from? for others, element or arm name would be fine) seq_name = getattr (self, 'sequence_name', None) if seq_name == None: seq_name = 'None' else: seq_name = "'" + seq_name + "'" if 'ext_list' in self.__dict__: return "%sasn1.SEQUENCE ([%s], ext=[%s], seq_name = %s)" % (ctx.spaces (), self.elts_to_py (self.elt_list, ctx), self.elts_to_py (self.ext_list, ctx), seq_name) else: return "%sasn1.SEQUENCE ([%s]), seq_name = %s" % (ctx.spaces (), self.elts_to_py (self.elt_list, ctx), seq_name) def elts_to_py (self, list, ctx): # we have elt_type, val= named_type, maybe default=, optional= # named_type node: either ident = or typ = # need to dismember these in order to generate Python output syntax. ctx.indent () def elt_to_py (e): assert (e.type == 'elt_type') nt = e.val optflag = e.optional #assert (not hasattr (e, 'default')) # XXX add support for DEFAULT! assert (nt.type == 'named_type') tagstr = 'None' identstr = nt.ident if hasattr (nt.typ, 'type') and nt.typ.type == 'tag': # ugh tagstr = mk_tag_str (ctx,nt.typ.tag.cls, nt.typ.tag.tag_typ,nt.typ.tag.num) nt = nt.typ return "('%s',%s,%s,%d)" % (identstr, tagstr, nt.typ.to_python (ctx), optflag) indentstr = ",\n" + ctx.spaces () rv = indentstr.join ([elt_to_py (e) for e in list]) ctx.outdent () return rv def eth_need_tree(self): return True def GetTTag(self, ectx): return ('BER_CLASS_UNI', 'BER_UNI_TAG_SEQUENCE') def eth_type_default_pars(self, ectx, tname): pars = Type.eth_type_default_pars(self, ectx, tname) pars['TABLE'] = '%(PROTOP)s%(TNAME)s_sequence' return pars def eth_type_default_body(self, ectx, tname): if (ectx.Ber()): body = ectx.eth_fn_call('dissect_%(ER)s_sequence', ret='offset', par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s'), ('%(TABLE)s', '%(HF_INDEX)s', '%(ETT_INDEX)s',),)) elif (ectx.Per()): body = ectx.eth_fn_call('dissect_%(ER)s_sequence', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'), ('%(ETT_INDEX)s', '%(TABLE)s',),)) else: body = '#error Can not decode %s' % (tname) return body #--- ExtensionAdditionGroup --------------------------------------------------- class ExtensionAdditionGroup (SeqType): def __init__(self,*args, **kw) : self.parent_ident = None self.parent_tname = None SeqType.__init__ (self,*args, **kw) def eth_omit_field(self): return True def eth_tname(self): if (self.parent_tname and self.IsNamed()): return self.parent_tname + "_" + self.name else: return SeqType.eth_tname(self) def eth_reg_sub(self, ident, ectx): ectx.eth_dummy_eag_field_required() ectx.eth_dep_add(self.parent_ident, ident) SeqType.eth_reg_sub(self, ident, ectx) def eth_type_default_pars(self, ectx, tname): pars = Type.eth_type_default_pars(self, ectx, tname) pars['TABLE'] = '%(PROTOP)s%(TNAME)s_sequence' return pars def eth_type_default_body(self, ectx, tname): if (ectx.Per()): body = ectx.eth_fn_call('dissect_%(ER)s_sequence_eag', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(TABLE)s',),)) else: body = '#error Can not decode %s' % (tname) return body #--- SetType ------------------------------------------------------------------ class SetType (SeqType): def eth_need_tree(self): return True def GetTTag(self, ectx): return ('BER_CLASS_UNI', 'BER_UNI_TAG_SET') def eth_type_default_pars(self, ectx, tname): pars = Type.eth_type_default_pars(self, ectx, tname) pars['TABLE'] = '%(PROTOP)s%(TNAME)s_set' return pars def eth_type_default_body(self, ectx, tname): if (ectx.Ber()): body = ectx.eth_fn_call('dissect_%(ER)s_set', ret='offset', par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s'), ('%(TABLE)s', '%(HF_INDEX)s', '%(ETT_INDEX)s',),)) elif (ectx.Per()): body = ectx.eth_fn_call('dissect_%(ER)s_set', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'), ('%(ETT_INDEX)s', '%(TABLE)s',),)) else: body = '#error Can not decode %s' % (tname) return body #--- ChoiceType --------------------------------------------------------------- class ChoiceType (Type): def to_python (self, ctx): # name, tag (None for no tag, EXPLICIT() for explicit), typ) # or '' + (1,) for optional if 'ext_list' in self.__dict__: return "%sasn1.CHOICE ([%s], ext=[%s])" % (ctx.spaces (), self.elts_to_py (self.elt_list, ctx), self.elts_to_py (self.ext_list, ctx)) else: return "%sasn1.CHOICE ([%s])" % (ctx.spaces (), self.elts_to_py (self.elt_list, ctx)) def elts_to_py (self, list, ctx): ctx.indent () def elt_to_py (nt): assert (nt.type == 'named_type') tagstr = 'None' if hasattr (nt, 'ident'): identstr = nt.ident else: if hasattr (nt.typ, 'val'): identstr = nt.typ.val # XXX, making up name elif hasattr (nt.typ, 'name'): identstr = nt.typ.name else: identstr = ctx.make_new_name () if hasattr (nt.typ, 'type') and nt.typ.type == 'tag': # ugh tagstr = mk_tag_str (ctx,nt.typ.tag.cls, nt.typ.tag.tag_typ,nt.typ.tag.num) nt = nt.typ return "('%s',%s,%s)" % (identstr, tagstr, nt.typ.to_python (ctx)) indentstr = ",\n" + ctx.spaces () rv = indentstr.join ([elt_to_py (e) for e in list]) ctx.outdent () return rv def eth_reg_sub(self, ident, ectx): #print "eth_reg_sub(ident='%s')" % (ident) # check if autotag is required autotag = False if (ectx.NeedTags() and (ectx.tag_def == 'AUTOMATIC')): autotag = True for e in (self.elt_list): if e.HasOwnTag(): autotag = False; break; if autotag and hasattr(self, 'ext_list'): for e in (self.ext_list): if e.HasOwnTag(): autotag = False; break; # do autotag if autotag: atag = 0 for e in (self.elt_list): e.AddTag(Tag(cls = 'CONTEXT', num = str(atag), mode = 'IMPLICIT')) atag += 1 if autotag and hasattr(self, 'ext_list'): for e in (self.ext_list): e.AddTag(Tag(cls = 'CONTEXT', num = str(atag), mode = 'IMPLICIT')) atag += 1 for e in (self.elt_list): e.eth_reg(ident, ectx, tstrip=1, parent=ident) if ectx.conform.check_item('EXPORTS', ident + '.' + e.name): ectx.eth_sel_req(ident, e.name) if hasattr(self, 'ext_list'): for e in (self.ext_list): e.eth_reg(ident, ectx, tstrip=1, parent=ident) if ectx.conform.check_item('EXPORTS', ident + '.' + e.name): ectx.eth_sel_req(ident, e.name) def sel_item(self, ident, sel, ectx): lst = self.elt_list[:] if hasattr(self, 'ext_list'): lst.extend(self.ext_list) ee = None for e in (self.elt_list): if e.IsNamed() and (e.name == sel): ee = e break if not ee: print("#CHOICE %s does not contain item %s" % (ident, sel)) return ee def sel_req(self, ident, sel, ectx): #print "sel_req(ident='%s', sel=%s)\n%s" % (ident, sel, str(self)) ee = self.sel_item(ident, sel, ectx) if ee: ee.eth_reg(ident, ectx, tstrip=0, selflag=True) def eth_ftype(self, ectx): return ('FT_UINT32', 'BASE_DEC') def eth_ftype_sel(self, sel, ectx): ee = self.sel_item('', sel, ectx) if ee: return ee.eth_ftype(ectx) else: return ('FT_NONE', 'BASE_NONE') def eth_strings(self): return '$$' def eth_need_tree(self): return True def eth_has_vals(self): return True def GetTTag(self, ectx): lst = self.elt_list cls = 'BER_CLASS_ANY/*choice*/' #if hasattr(self, 'ext_list'): # lst.extend(self.ext_list) #if (len(lst) > 0): # cls = lst[0].GetTag(ectx)[0] #for e in (lst): # if (e.GetTag(ectx)[0] != cls): # cls = '-1/*choice*/' return (cls, '-1/*choice*/') def GetTTagSel(self, sel, ectx): ee = self.sel_item('', sel, ectx) if ee: return ee.GetTag(ectx) else: return ('BER_CLASS_ANY/*unknown selection*/', '-1/*unknown selection*/') def IndetermTag(self, ectx): #print "Choice IndetermTag()=%s" % (str(not self.HasOwnTag())) return not self.HasOwnTag() def detect_tagval(self, ectx): tagval = False lst = self.elt_list[:] if hasattr(self, 'ext_list'): lst.extend(self.ext_list) if (len(lst) > 0) and (not ectx.Per() or lst[0].HasOwnTag()): t = lst[0].GetTag(ectx)[0] tagval = True else: t = '' tagval = False if (t == 'BER_CLASS_UNI'): tagval = False for e in (lst): if not ectx.Per() or e.HasOwnTag(): tt = e.GetTag(ectx)[0] else: tt = '' tagval = False if (tt != t): tagval = False return tagval def get_vals(self, ectx): tagval = self.detect_tagval(ectx) vals = [] cnt = 0 for e in (self.elt_list): if (tagval): val = e.GetTag(ectx)[1] else: val = str(cnt) vals.append((val, e.name)) cnt += 1 if hasattr(self, 'ext_list'): for e in (self.ext_list): if (tagval): val = e.GetTag(ectx)[1] else: val = str(cnt) vals.append((val, e.name)) cnt += 1 return vals def eth_type_vals(self, tname, ectx): out = '\n' vals = self.get_vals(ectx) out += ectx.eth_vals(tname, vals) return out def reg_enum_vals(self, tname, ectx): vals = self.get_vals(ectx) for (val, id) in vals: ectx.eth_reg_value(id, self, val, ethname=ectx.eth_enum_item(tname, id)) def eth_type_enum(self, tname, ectx): out = '\n' vals = self.get_vals(ectx) out += ectx.eth_enum(tname, vals) return out def eth_type_default_pars(self, ectx, tname): pars = Type.eth_type_default_pars(self, ectx, tname) pars['TABLE'] = '%(PROTOP)s%(TNAME)s_choice' return pars def eth_type_default_table(self, ectx, tname): def out_item(val, e, ext, ectx): has_enum = ectx.eth_type[tname]['enum'] & EF_ENUM if (has_enum): vval = ectx.eth_enum_item(tname, e.name) else: vval = val f = fname + '/' + e.name ef = ectx.field[f]['ethname'] t = ectx.eth_hf[ef]['ethtype'] if (ectx.Ber()): opt = '' if (not e.HasOwnTag()): opt = 'BER_FLAGS_NOOWNTAG' elif (e.HasImplicitTag(ectx)): if (opt): opt += '|' opt += 'BER_FLAGS_IMPLTAG' if (not opt): opt = '0' if (ectx.Ber()): (tc, tn) = e.GetTag(ectx) out = ' { %3s, %-24s, %-13s, %s, %s, dissect_%s_%s },\n' \ % (vval, '&'+ectx.eth_hf[ef]['fullname'], tc, tn, opt, ectx.eth_type[t]['proto'], t) elif (ectx.Per()): out = ' { %3s, %-24s, %-23s, dissect_%s_%s },\n' \ % (vval, '&'+ectx.eth_hf[ef]['fullname'], ext, ectx.eth_type[t]['proto'], t) else: out = '' return out # end out_item() #print "eth_type_default_table(tname='%s')" % (tname) fname = ectx.eth_type[tname]['ref'][0] tagval = self.detect_tagval(ectx) table = "static const %(ER)s_choice_t %(TABLE)s[] = {\n" cnt = 0 if hasattr(self, 'ext_list'): ext = 'ASN1_EXTENSION_ROOT' else: ext = 'ASN1_NO_EXTENSIONS' empty_ext_flag = '0' if (len(self.elt_list)==0) and hasattr(self, 'ext_list') and (len(self.ext_list)==0): empty_ext_flag = ext for e in (self.elt_list): if (tagval): val = e.GetTag(ectx)[1] else: val = str(cnt) table += out_item(val, e, ext, ectx) cnt += 1 if hasattr(self, 'ext_list'): for e in (self.ext_list): if (tagval): val = e.GetTag(ectx)[1] else: val = str(cnt) table += out_item(val, e, 'ASN1_NOT_EXTENSION_ROOT', ectx) cnt += 1 if (ectx.Ber()): table += " { 0, NULL, 0, 0, 0, NULL }\n};\n" else: table += " { 0, NULL, %s, NULL }\n};\n" % (empty_ext_flag) return table def eth_type_default_body(self, ectx, tname): if (ectx.Ber()): body = ectx.eth_fn_call('dissect_%(ER)s_choice', ret='offset', par=(('%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s'), ('%(TABLE)s', '%(HF_INDEX)s', '%(ETT_INDEX)s'), ('%(VAL_PTR)s',),)) elif (ectx.Per()): body = ectx.eth_fn_call('dissect_%(ER)s_choice', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'), ('%(ETT_INDEX)s', '%(TABLE)s',), ('%(VAL_PTR)s',),)) else: body = '#error Can not decode %s' % (tname) return body #--- ChoiceValue ---------------------------------------------------- class ChoiceValue (Value): def to_str(self, ectx): return self.val.to_str(ectx) def fld_obj_eq(self, other): return isinstance(other, ChoiceValue) and (self.choice == other.choice) and (str(self.val.val) == str(other.val.val)) #--- EnumeratedType ----------------------------------------------------------- class EnumeratedType (Type): def to_python (self, ctx): def strify_one (named_num): return "%s=%s" % (named_num.ident, named_num.val) return "asn1.ENUM(%s)" % ",".join (map (strify_one, self.val)) def eth_ftype(self, ectx): return ('FT_UINT32', 'BASE_DEC') def eth_strings(self): return '$$' def eth_has_vals(self): return True def GetTTag(self, ectx): return ('BER_CLASS_UNI', 'BER_UNI_TAG_ENUMERATED') def get_vals_etc(self, ectx): vals = [] lastv = 0 used = {} maxv = 0 root_num = 0 ext_num = 0 map_table = [] for e in (self.val): if e.type == 'NamedNumber': used[int(e.val)] = True for e in (self.val): if e.type == 'NamedNumber': val = int(e.val) else: while lastv in used: lastv += 1 val = lastv used[val] = True vals.append((val, e.ident)) map_table.append(val) root_num += 1 if val > maxv: maxv = val if self.ext is not None: for e in (self.ext): if e.type == 'NamedNumber': used[int(e.val)] = True for e in (self.ext): if e.type == 'NamedNumber': val = int(e.val) else: while lastv in used: lastv += 1 val = lastv used[val] = True vals.append((val, e.ident)) map_table.append(val) ext_num += 1 if val > maxv: maxv = val need_map = False for i in range(len(map_table)): need_map = need_map or (map_table[i] != i) if (not need_map): map_table = None return (vals, root_num, ext_num, map_table) def eth_type_vals(self, tname, ectx): out = '\n' vals = self.get_vals_etc(ectx)[0] out += ectx.eth_vals(tname, vals) return out def reg_enum_vals(self, tname, ectx): vals = self.get_vals_etc(ectx)[0] for (val, id) in vals: ectx.eth_reg_value(id, self, val, ethname=ectx.eth_enum_item(tname, id)) def eth_type_enum(self, tname, ectx): out = '\n' vals = self.get_vals_etc(ectx)[0] out += ectx.eth_enum(tname, vals) return out def eth_type_default_pars(self, ectx, tname): pars = Type.eth_type_default_pars(self, ectx, tname) (root_num, ext_num, map_table) = self.get_vals_etc(ectx)[1:] if (self.ext != None): ext = 'TRUE' else: ext = 'FALSE' pars['ROOT_NUM'] = str(root_num) pars['EXT'] = ext pars['EXT_NUM'] = str(ext_num) if (map_table): pars['TABLE'] = '%(PROTOP)s%(TNAME)s_value_map' else: pars['TABLE'] = 'NULL' return pars def eth_type_default_table(self, ectx, tname): if (not ectx.Per()): return '' map_table = self.get_vals_etc(ectx)[3] if (map_table == None): return '' table = "static guint32 %(TABLE)s[%(ROOT_NUM)s+%(EXT_NUM)s] = {" table += ", ".join([str(v) for v in map_table]) table += "};\n" return table def eth_type_default_body(self, ectx, tname): if (ectx.Ber()): if (ectx.constraints_check and self.HasValueConstraint()): body = ectx.eth_fn_call('dissect_%(ER)s_constrained_integer', ret='offset', par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s'), ('%(MIN_VAL)s', '%(MAX_VAL)s', '%(HF_INDEX)s', '%(VAL_PTR)s',),)) else: body = ectx.eth_fn_call('dissect_%(ER)s_integer', ret='offset', par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s', '%(HF_INDEX)s'), ('%(VAL_PTR)s',),)) elif (ectx.Per()): body = ectx.eth_fn_call('dissect_%(ER)s_enumerated', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'), ('%(ROOT_NUM)s', '%(VAL_PTR)s', '%(EXT)s', '%(EXT_NUM)s', '%(TABLE)s',),)) else: body = '#error Can not decode %s' % (tname) return body #--- EmbeddedPDVType ----------------------------------------------------------- class EmbeddedPDVType (Type): def eth_tname(self): return 'EMBEDDED_PDV' def eth_ftype(self, ectx): return ('FT_NONE', 'BASE_NONE') def GetTTag(self, ectx): return ('BER_CLASS_UNI', 'BER_UNI_TAG_EMBEDDED_PDV') def eth_type_default_pars(self, ectx, tname): pars = Type.eth_type_default_pars(self, ectx, tname) if ectx.default_embedded_pdv_cb: pars['TYPE_REF_FN'] = ectx.default_embedded_pdv_cb else: pars['TYPE_REF_FN'] = 'NULL' return pars def eth_type_default_body(self, ectx, tname): if (ectx.Ber()): body = ectx.eth_fn_call('dissect_%(ER)s_EmbeddedPDV_Type', ret='offset', par=(('%(IMPLICIT_TAG)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(HF_INDEX)s', '%(TYPE_REF_FN)s',),)) elif (ectx.Per()): body = ectx.eth_fn_call('dissect_%(ER)s_embedded_pdv', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s', '%(TYPE_REF_FN)s',),)) else: body = '#error Can not decode %s' % (tname) return body #--- ExternalType ----------------------------------------------------------- class ExternalType (Type): def eth_tname(self): return 'EXTERNAL' def eth_ftype(self, ectx): return ('FT_NONE', 'BASE_NONE') def GetTTag(self, ectx): return ('BER_CLASS_UNI', 'BER_UNI_TAG_EXTERNAL') def eth_type_default_pars(self, ectx, tname): pars = Type.eth_type_default_pars(self, ectx, tname) if ectx.default_external_type_cb: pars['TYPE_REF_FN'] = ectx.default_external_type_cb else: pars['TYPE_REF_FN'] = 'NULL' return pars def eth_type_default_body(self, ectx, tname): if (ectx.Ber()): body = ectx.eth_fn_call('dissect_%(ER)s_external_type', ret='offset', par=(('%(IMPLICIT_TAG)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(HF_INDEX)s', '%(TYPE_REF_FN)s',),)) elif (ectx.Per()): body = ectx.eth_fn_call('dissect_%(ER)s_external_type', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s', '%(TYPE_REF_FN)s',),)) else: body = '#error Can not decode %s' % (tname) return body #--- OpenType ----------------------------------------------------------- class OpenType (Type): def to_python (self, ctx): return "asn1.ANY" def single_type(self): if (self.HasConstraint() and self.constr.type == 'Type' and self.constr.subtype.type == 'Type_Ref'): return self.constr.subtype.val return None def eth_reg_sub(self, ident, ectx): t = self.single_type() if t: ectx.eth_dep_add(ident, t) def eth_tname(self): t = self.single_type() if t: return 'OpenType_' + t else: return Type.eth_tname(self) def eth_ftype(self, ectx): return ('FT_NONE', 'BASE_NONE') def GetTTag(self, ectx): return ('BER_CLASS_ANY', '0') def eth_type_default_pars(self, ectx, tname): pars = Type.eth_type_default_pars(self, ectx, tname) pars['FN_VARIANT'] = ectx.default_opentype_variant t = self.single_type() if t: t = ectx.type[t]['ethname'] pars['TYPE_REF_PROTO'] = ectx.eth_type[t]['proto'] pars['TYPE_REF_TNAME'] = t pars['TYPE_REF_FN'] = 'dissect_%(TYPE_REF_PROTO)s_%(TYPE_REF_TNAME)s' else: pars['TYPE_REF_FN'] = 'NULL' return pars def eth_type_default_body(self, ectx, tname): if (ectx.Per()): body = ectx.eth_fn_call('dissect_%(ER)s_open_type%(FN_VARIANT)s', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s', '%(TYPE_REF_FN)s',),)) else: body = '#error Can not decode %s' % (tname) return body #--- InstanceOfType ----------------------------------------------------------- class InstanceOfType (Type): def eth_tname(self): return 'INSTANCE_OF' def eth_ftype(self, ectx): return ('FT_NONE', 'BASE_NONE') def GetTTag(self, ectx): return ('BER_CLASS_UNI', 'BER_UNI_TAG_EXTERNAL') def eth_type_default_pars(self, ectx, tname): pars = Type.eth_type_default_pars(self, ectx, tname) if ectx.default_external_type_cb: pars['TYPE_REF_FN'] = ectx.default_external_type_cb else: pars['TYPE_REF_FN'] = 'NULL' return pars def eth_type_default_body(self, ectx, tname): if (ectx.Ber()): body = ectx.eth_fn_call('dissect_%(ER)s_external_type', ret='offset', par=(('%(IMPLICIT_TAG)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(HF_INDEX)s', '%(TYPE_REF_FN)s',),)) elif (ectx.Per()): body = '#error Can not decode %s' % (tname) else: body = '#error Can not decode %s' % (tname) return body #--- AnyType ----------------------------------------------------------- class AnyType (Type): def to_python (self, ctx): return "asn1.ANY" def eth_ftype(self, ectx): return ('FT_NONE', 'BASE_NONE') def GetTTag(self, ectx): return ('BER_CLASS_ANY', '0') def eth_type_default_body(self, ectx, tname): body = '#error Can not decode %s' % (tname) return body class Literal (Node): def to_python (self, ctx): return self.val #--- NullType ----------------------------------------------------------------- class NullType (Type): def to_python (self, ctx): return 'asn1.NULL' def eth_tname(self): return 'NULL' def GetTTag(self, ectx): return ('BER_CLASS_UNI', 'BER_UNI_TAG_NULL') def eth_type_default_body(self, ectx, tname): if (ectx.Ber()): body = ectx.eth_fn_call('dissect_%(ER)s_null', ret='offset', par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s', '%(HF_INDEX)s'),)) elif (ectx.Per()): body = ectx.eth_fn_call('dissect_%(ER)s_null', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'),)) else: body = '#error Can not decode %s' % (tname) return body #--- NullValue ---------------------------------------------------- class NullValue (Value): def to_str(self, ectx): return 'NULL' #--- RealType ----------------------------------------------------------------- class RealType (Type): def to_python (self, ctx): return 'asn1.REAL' def eth_tname(self): return 'REAL' def GetTTag(self, ectx): return ('BER_CLASS_UNI', 'BER_UNI_TAG_REAL') def eth_ftype(self, ectx): return ('FT_DOUBLE', 'BASE_NONE') def eth_type_default_body(self, ectx, tname): if (ectx.Ber()): body = ectx.eth_fn_call('dissect_%(ER)s_real', ret='offset', par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s', '%(HF_INDEX)s'), ('%(VAL_PTR)s',),)) elif (ectx.Per()): body = ectx.eth_fn_call('dissect_%(ER)s_real', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s', '%(VAL_PTR)s',),)) else: body = '#error Can not decode %s' % (tname) return body #--- BooleanType -------------------------------------------------------------- class BooleanType (Type): def to_python (self, ctx): return 'asn1.BOOLEAN' def eth_tname(self): return 'BOOLEAN' def GetTTag(self, ectx): return ('BER_CLASS_UNI', 'BER_UNI_TAG_BOOLEAN') def eth_ftype(self, ectx): return ('FT_BOOLEAN', 'BASE_NONE') def eth_type_default_body(self, ectx, tname): if (ectx.Ber()): body = ectx.eth_fn_call('dissect_%(ER)s_boolean', ret='offset', par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s', '%(HF_INDEX)s', '%(VAL_PTR)s'),)) elif (ectx.Per()): body = ectx.eth_fn_call('dissect_%(ER)s_boolean', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s', '%(VAL_PTR)s',),)) else: body = '#error Can not decode %s' % (tname) return body #--- OctetStringType ---------------------------------------------------------- class OctetStringType (Type): def to_python (self, ctx): return 'asn1.OCTSTRING' def eth_tname(self): if not self.HasConstraint(): return 'OCTET_STRING' elif self.constr.type == 'Size': return 'OCTET_STRING' + '_' + self.constr.eth_constrname() else: return '#' + self.type + '_' + str(id(self)) def eth_ftype(self, ectx): return ('FT_BYTES', 'BASE_NONE') def GetTTag(self, ectx): return ('BER_CLASS_UNI', 'BER_UNI_TAG_OCTETSTRING') def eth_need_pdu(self, ectx): pdu = None if self.HasContentsConstraint(): t = self.constr.GetContents(ectx) if t and (ectx.default_containing_variant in ('_pdu', '_pdu_new')): pdu = { 'type' : t, 'new' : ectx.default_containing_variant == '_pdu_new' } return pdu def eth_type_default_pars(self, ectx, tname): pars = Type.eth_type_default_pars(self, ectx, tname) (pars['MIN_VAL'], pars['MAX_VAL'], pars['EXT']) = self.eth_get_size_constr(ectx) if self.HasContentsConstraint(): pars['FN_VARIANT'] = ectx.default_containing_variant t = self.constr.GetContents(ectx) if t: if pars['FN_VARIANT'] in ('_pdu', '_pdu_new'): t = ectx.field[t]['ethname'] pars['TYPE_REF_PROTO'] = '' pars['TYPE_REF_TNAME'] = t pars['TYPE_REF_FN'] = 'dissect_%(TYPE_REF_TNAME)s' else: t = ectx.type[t]['ethname'] pars['TYPE_REF_PROTO'] = ectx.eth_type[t]['proto'] pars['TYPE_REF_TNAME'] = t pars['TYPE_REF_FN'] = 'dissect_%(TYPE_REF_PROTO)s_%(TYPE_REF_TNAME)s' else: pars['TYPE_REF_FN'] = 'NULL' return pars def eth_type_default_body(self, ectx, tname): if (ectx.Ber()): if (ectx.constraints_check and self.HasSizeConstraint()): body = ectx.eth_fn_call('dissect_%(ER)s_constrained_octet_string', ret='offset', par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s'), ('%(MIN_VAL)s', '%(MAX_VAL)s', '%(HF_INDEX)s', '%(VAL_PTR)s',),)) else: body = ectx.eth_fn_call('dissect_%(ER)s_octet_string', ret='offset', par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s', '%(HF_INDEX)s'), ('%(VAL_PTR)s',),)) elif (ectx.Per()): if self.HasContentsConstraint(): body = ectx.eth_fn_call('dissect_%(ER)s_octet_string_containing%(FN_VARIANT)s', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'), ('%(MIN_VAL)s', '%(MAX_VAL)s', '%(EXT)s', '%(TYPE_REF_FN)s',),)) else: body = ectx.eth_fn_call('dissect_%(ER)s_octet_string', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'), ('%(MIN_VAL)s', '%(MAX_VAL)s', '%(EXT)s', '%(VAL_PTR)s',),)) else: body = '#error Can not decode %s' % (tname) return body #--- CharacterStringType ------------------------------------------------------ class CharacterStringType (Type): def eth_tname(self): if not self.HasConstraint(): return self.eth_tsname() elif self.constr.type == 'Size': return self.eth_tsname() + '_' + self.constr.eth_constrname() else: return '#' + self.type + '_' + str(id(self)) def eth_ftype(self, ectx): return ('FT_STRING', 'BASE_NONE') class RestrictedCharacterStringType (CharacterStringType): def to_python (self, ctx): return 'asn1.' + self.eth_tsname() def GetTTag(self, ectx): return ('BER_CLASS_UNI', 'BER_UNI_TAG_' + self.eth_tsname()) def eth_type_default_pars(self, ectx, tname): pars = Type.eth_type_default_pars(self, ectx, tname) (pars['MIN_VAL'], pars['MAX_VAL'], pars['EXT']) = self.eth_get_size_constr(ectx) (pars['STRING_TYPE'], pars['STRING_TAG']) = (self.eth_tsname(), self.GetTTag(ectx)[1]) (pars['ALPHABET'], pars['ALPHABET_LEN']) = self.eth_get_alphabet_constr(ectx) return pars def eth_type_default_body(self, ectx, tname): if (ectx.Ber()): if (ectx.constraints_check and self.HasSizeConstraint()): body = ectx.eth_fn_call('dissect_%(ER)s_constrained_restricted_string', ret='offset', par=(('%(IMPLICIT_TAG)s', '%(STRING_TAG)s'), ('%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s'), ('%(MIN_VAL)s', '%(MAX_VAL)s', '%(HF_INDEX)s', '%(VAL_PTR)s',),)) else: body = ectx.eth_fn_call('dissect_%(ER)s_restricted_string', ret='offset', par=(('%(IMPLICIT_TAG)s', '%(STRING_TAG)s'), ('%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s', '%(HF_INDEX)s'), ('%(VAL_PTR)s',),)) elif (ectx.Per() and self.HasPermAlph()): body = ectx.eth_fn_call('dissect_%(ER)s_restricted_character_string', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'), ('%(MIN_VAL)s', '%(MAX_VAL)s', '%(EXT)s', '%(ALPHABET)s', '%(ALPHABET_LEN)s'), ('%(VAL_PTR)s',),)) elif (ectx.Per()): if (self.eth_tsname() == 'GeneralString'): body = ectx.eth_fn_call('dissect_%(ER)s_%(STRING_TYPE)s', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'),)) elif (self.eth_tsname() == 'GeneralizedTime'): body = ectx.eth_fn_call('dissect_%(ER)s_VisibleString', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'), ('%(MIN_VAL)s', '%(MAX_VAL)s', '%(EXT)s',),)) elif (self.eth_tsname() == 'UTCTime'): body = ectx.eth_fn_call('dissect_%(ER)s_VisibleString', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'), ('%(MIN_VAL)s', '%(MAX_VAL)s', '%(EXT)s',),)) else: body = ectx.eth_fn_call('dissect_%(ER)s_%(STRING_TYPE)s', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'), ('%(MIN_VAL)s', '%(MAX_VAL)s', '%(EXT)s',),)) else: body = '#error Can not decode %s' % (tname) return body class BMPStringType (RestrictedCharacterStringType): def eth_tsname(self): return 'BMPString' class GeneralStringType (RestrictedCharacterStringType): def eth_tsname(self): return 'GeneralString' class GraphicStringType (RestrictedCharacterStringType): def eth_tsname(self): return 'GraphicString' class IA5StringType (RestrictedCharacterStringType): def eth_tsname(self): return 'IA5String' class NumericStringType (RestrictedCharacterStringType): def eth_tsname(self): return 'NumericString' class PrintableStringType (RestrictedCharacterStringType): def eth_tsname(self): return 'PrintableString' class TeletexStringType (RestrictedCharacterStringType): def eth_tsname(self): return 'TeletexString' class T61StringType (RestrictedCharacterStringType): def eth_tsname(self): return 'T61String' def GetTTag(self, ectx): return ('BER_CLASS_UNI', 'BER_UNI_TAG_TeletexString') class UniversalStringType (RestrictedCharacterStringType): def eth_tsname(self): return 'UniversalString' class UTF8StringType (RestrictedCharacterStringType): def eth_tsname(self): return 'UTF8String' class VideotexStringType (RestrictedCharacterStringType): def eth_tsname(self): return 'VideotexString' class VisibleStringType (RestrictedCharacterStringType): def eth_tsname(self): return 'VisibleString' class ISO646StringType (RestrictedCharacterStringType): def eth_tsname(self): return 'ISO646String' def GetTTag(self, ectx): return ('BER_CLASS_UNI', 'BER_UNI_TAG_VisibleString') class UnrestrictedCharacterStringType (CharacterStringType): def to_python (self, ctx): return 'asn1.UnrestrictedCharacterString' def eth_tsname(self): return 'CHARACTER_STRING' #--- UsefulType --------------------------------------------------------------- class GeneralizedTime (RestrictedCharacterStringType): def eth_tsname(self): return 'GeneralizedTime' def eth_type_default_body(self, ectx, tname): if (ectx.Ber()): body = ectx.eth_fn_call('dissect_%(ER)s_%(STRING_TYPE)s', ret='offset', par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s', '%(HF_INDEX)s'),)) return body else: return RestrictedCharacterStringType.eth_type_default_body(self, ectx, tname) class UTCTime (RestrictedCharacterStringType): def eth_tsname(self): return 'UTCTime' def eth_type_default_body(self, ectx, tname): if (ectx.Ber()): body = ectx.eth_fn_call('dissect_%(ER)s_%(STRING_TYPE)s', ret='offset', par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s', '%(HF_INDEX)s'),)) return body else: return RestrictedCharacterStringType.eth_type_default_body(self, ectx, tname) class ObjectDescriptor (RestrictedCharacterStringType): def eth_tsname(self): return 'ObjectDescriptor' def eth_type_default_body(self, ectx, tname): if (ectx.Ber()): body = RestrictedCharacterStringType.eth_type_default_body(self, ectx, tname) elif (ectx.Per()): body = ectx.eth_fn_call('dissect_%(ER)s_object_descriptor', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s', '%(VAL_PTR)s',),)) else: body = '#error Can not decode %s' % (tname) return body #--- ObjectIdentifierType ----------------------------------------------------- class ObjectIdentifierType (Type): def to_python (self, ctx): return 'asn1.OBJECT_IDENTIFIER' def eth_tname(self): return 'OBJECT_IDENTIFIER' def eth_ftype(self, ectx): return ('FT_OID', 'BASE_NONE') def GetTTag(self, ectx): return ('BER_CLASS_UNI', 'BER_UNI_TAG_OID') def eth_type_default_pars(self, ectx, tname): pars = Type.eth_type_default_pars(self, ectx, tname) pars['FN_VARIANT'] = ectx.default_oid_variant return pars def eth_type_default_body(self, ectx, tname): if (ectx.Ber()): body = ectx.eth_fn_call('dissect_%(ER)s_object_identifier%(FN_VARIANT)s', ret='offset', par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s', '%(HF_INDEX)s', '%(VAL_PTR)s',),)) elif (ectx.Per()): body = ectx.eth_fn_call('dissect_%(ER)s_object_identifier%(FN_VARIANT)s', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s', '%(VAL_PTR)s',),)) else: body = '#error Can not decode %s' % (tname) return body #--- ObjectIdentifierValue ---------------------------------------------------- class ObjectIdentifierValue (Value): def get_num(self, path, val): return str(oid_names.get(path + '/' + val, val)) def to_str(self, ectx): out = '' path = '' first = True sep = '' for v in self.comp_list: if isinstance(v, Node) and (v.type == 'name_and_number'): vstr = v.number elif v.isdigit(): vstr = v else: vstr = self.get_num(path, v) if not first and not vstr.isdigit(): vstr = ectx.value_get_val(vstr) if first: if vstr.isdigit(): out += '"' + vstr else: out += ectx.value_get_eth(vstr) + '"' else: out += sep + vstr path += sep + vstr first = False sep = '.' out += '"' return out def get_dep(self): v = self.comp_list[0] if isinstance(v, Node) and (v.type == 'name_and_number'): return None elif v.isdigit(): return None else: vstr = self.get_num('', v) if vstr.isdigit(): return None else: return vstr class NamedNumber(Node): def to_python (self, ctx): return "('%s',%s)" % (self.ident, self.val) class NamedNumListBase(Node): def to_python (self, ctx): return "asn1.%s_class ([%s])" % (self.asn1_typ,",".join ( [x.to_python (ctx) for x in self.named_list])) #--- RelativeOIDType ---------------------------------------------------------- class RelativeOIDType (Type): def eth_tname(self): return 'RELATIVE_OID' def eth_ftype(self, ectx): return ('FT_REL_OID', 'BASE_NONE') def GetTTag(self, ectx): return ('BER_CLASS_UNI', 'BER_UNI_TAG_RELATIVE_OID') def eth_type_default_pars(self, ectx, tname): pars = Type.eth_type_default_pars(self, ectx, tname) pars['FN_VARIANT'] = ectx.default_oid_variant return pars def eth_type_default_body(self, ectx, tname): if (ectx.Ber()): body = ectx.eth_fn_call('dissect_%(ER)s_relative_oid%(FN_VARIANT)s', ret='offset', par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s', '%(HF_INDEX)s', '%(VAL_PTR)s',),)) elif (ectx.Per()): body = ectx.eth_fn_call('dissect_%(ER)s_relative_oid%(FN_VARIANT)s', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s', '%(VAL_PTR)s',),)) else: body = '#error Can not decode %s' % (tname) return body #--- IntegerType -------------------------------------------------------------- class IntegerType (Type): def to_python (self, ctx): return "asn1.INTEGER_class ([%s])" % (",".join ( [x.to_python (ctx) for x in self.named_list])) def add_named_value(self, ident, val): e = NamedNumber(ident = ident, val = val) if not self.named_list: self.named_list = [] self.named_list.append(e) def eth_tname(self): if self.named_list: return Type.eth_tname(self) if not self.HasConstraint(): return 'INTEGER' elif self.constr.type == 'SingleValue' or self.constr.type == 'ValueRange': return 'INTEGER' + '_' + self.constr.eth_constrname() else: return 'INTEGER' + '_' + self.constr.eth_tname() def GetTTag(self, ectx): return ('BER_CLASS_UNI', 'BER_UNI_TAG_INTEGER') def eth_ftype(self, ectx): if self.HasConstraint(): if not self.constr.IsNegativ(): if self.constr.Needs64b(ectx): return ('FT_UINT64', 'BASE_DEC') else: return ('FT_UINT32', 'BASE_DEC') if self.constr.Needs64b(ectx): return ('FT_INT64', 'BASE_DEC') return ('FT_INT32', 'BASE_DEC') def eth_strings(self): if (self.named_list): return '$$' else: return 'NULL' def eth_has_vals(self): if (self.named_list): return True else: return False def get_vals(self, ectx): vals = [] for e in (self.named_list): vals.append((int(e.val), e.ident)) return vals def eth_type_vals(self, tname, ectx): if not self.eth_has_vals(): return '' out = '\n' vals = self.get_vals(ectx) out += ectx.eth_vals(tname, vals) return out def reg_enum_vals(self, tname, ectx): vals = self.get_vals(ectx) for (val, id) in vals: ectx.eth_reg_value(id, self, val, ethname=ectx.eth_enum_item(tname, id)) def eth_type_enum(self, tname, ectx): if not self.eth_has_enum(tname, ectx): return '' out = '\n' vals = self.get_vals(ectx) out += ectx.eth_enum(tname, vals) return out def eth_type_default_pars(self, ectx, tname): pars = Type.eth_type_default_pars(self, ectx, tname) if self.HasValueConstraint(): (pars['MIN_VAL'], pars['MAX_VAL'], pars['EXT']) = self.eth_get_value_constr(ectx) if (pars['FN_VARIANT'] == '') and self.constr.Needs64b(ectx): if ectx.Ber(): pars['FN_VARIANT'] = '64' else: pars['FN_VARIANT'] = '_64b' return pars def eth_type_default_body(self, ectx, tname): if (ectx.Ber()): if (ectx.constraints_check and self.HasValueConstraint()): body = ectx.eth_fn_call('dissect_%(ER)s_constrained_integer%(FN_VARIANT)s', ret='offset', par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s'), ('%(MIN_VAL)s', '%(MAX_VAL)s', '%(HF_INDEX)s', '%(VAL_PTR)s',),)) else: body = ectx.eth_fn_call('dissect_%(ER)s_integer%(FN_VARIANT)s', ret='offset', par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s', '%(HF_INDEX)s'), ('%(VAL_PTR)s',),)) elif (ectx.Per() and not self.HasValueConstraint()): body = ectx.eth_fn_call('dissect_%(ER)s_integer%(FN_VARIANT)s', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s', '%(VAL_PTR)s'),)) elif (ectx.Per() and self.HasValueConstraint()): body = ectx.eth_fn_call('dissect_%(ER)s_constrained_integer%(FN_VARIANT)s', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'), ('%(MIN_VAL)s', '%(MAX_VAL)s', '%(VAL_PTR)s', '%(EXT)s'),)) else: body = '#error Can not decode %s' % (tname) return body #--- BitStringType ------------------------------------------------------------ class BitStringType (Type): def to_python (self, ctx): return "asn1.BITSTRING_class ([%s])" % (",".join ( [x.to_python (ctx) for x in self.named_list])) def eth_tname(self): if self.named_list: return Type.eth_tname(self) elif not self.HasConstraint(): return 'BIT_STRING' elif self.constr.IsSize(): return 'BIT_STRING' + '_' + self.constr.eth_constrname() else: return '#' + self.type + '_' + str(id(self)) def GetTTag(self, ectx): return ('BER_CLASS_UNI', 'BER_UNI_TAG_BITSTRING') def eth_ftype(self, ectx): return ('FT_BYTES', 'BASE_NONE') def eth_need_tree(self): return self.named_list def eth_need_pdu(self, ectx): pdu = None if self.HasContentsConstraint(): t = self.constr.GetContents(ectx) if t and (ectx.default_containing_variant in ('_pdu', '_pdu_new')): pdu = { 'type' : t, 'new' : ectx.default_containing_variant == '_pdu_new' } return pdu def eth_named_bits(self): bits = [] if (self.named_list): for e in (self.named_list): bits.append((int(e.val), e.ident)) return bits def eth_type_default_pars(self, ectx, tname): pars = Type.eth_type_default_pars(self, ectx, tname) pars['LEN_PTR'] = 'NULL' (pars['MIN_VAL'], pars['MAX_VAL'], pars['EXT']) = self.eth_get_size_constr(ectx) if 'ETT_INDEX' not in pars: pars['ETT_INDEX'] = '-1' pars['TABLE'] = 'NULL' if self.eth_named_bits(): pars['TABLE'] = '%(PROTOP)s%(TNAME)s_bits' if self.HasContentsConstraint(): pars['FN_VARIANT'] = ectx.default_containing_variant t = self.constr.GetContents(ectx) if t: if pars['FN_VARIANT'] in ('_pdu', '_pdu_new'): t = ectx.field[t]['ethname'] pars['TYPE_REF_PROTO'] = '' pars['TYPE_REF_TNAME'] = t pars['TYPE_REF_FN'] = 'dissect_%(TYPE_REF_TNAME)s' else: t = ectx.type[t]['ethname'] pars['TYPE_REF_PROTO'] = ectx.eth_type[t]['proto'] pars['TYPE_REF_TNAME'] = t pars['TYPE_REF_FN'] = 'dissect_%(TYPE_REF_PROTO)s_%(TYPE_REF_TNAME)s' else: pars['TYPE_REF_FN'] = 'NULL' return pars def eth_type_default_table(self, ectx, tname): #print "eth_type_default_table(tname='%s')" % (tname) table = '' bits = self.eth_named_bits() if (bits and ectx.Ber()): table = ectx.eth_bits(tname, bits) return table def eth_type_default_body(self, ectx, tname): if (ectx.Ber()): if (ectx.constraints_check and self.HasSizeConstraint()): body = ectx.eth_fn_call('dissect_%(ER)s_constrained_bitstring', ret='offset', par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s'), ('%(MIN_VAL)s', '%(MAX_VAL)s', '%(TABLE)s', '%(HF_INDEX)s', '%(ETT_INDEX)s',), ('%(VAL_PTR)s',),)) else: body = ectx.eth_fn_call('dissect_%(ER)s_bitstring', ret='offset', par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s'), ('%(TABLE)s', '%(HF_INDEX)s', '%(ETT_INDEX)s',), ('%(VAL_PTR)s',),)) elif (ectx.Per()): if self.HasContentsConstraint(): body = ectx.eth_fn_call('dissect_%(ER)s_bit_string_containing%(FN_VARIANT)s', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'), ('%(MIN_VAL)s', '%(MAX_VAL)s', '%(EXT)s', '%(TYPE_REF_FN)s'),)) else: body = ectx.eth_fn_call('dissect_%(ER)s_bit_string', ret='offset', par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'), ('%(MIN_VAL)s', '%(MAX_VAL)s', '%(EXT)s', '%(VAL_PTR)s', '%(LEN_PTR)s'),)) else: body = '#error Can not decode %s' % (tname) return body #--- BStringValue ------------------------------------------------------------ bstring_tab = { '0000' : '0', '0001' : '1', '0010' : '2', '0011' : '3', '0100' : '4', '0101' : '5', '0110' : '6', '0111' : '7', '1000' : '8', '1001' : '9', '1010' : 'A', '1011' : 'B', '1100' : 'C', '1101' : 'D', '1110' : 'E', '1111' : 'F', } class BStringValue (Value): def to_str(self, ectx): v = self.val[1:-2] if len(v) % 8: v += '0' * (8 - len(v) % 8) vv = '0x' for i in (list(range(0, len(v), 4))): vv += bstring_tab[v[i:i+4]] return vv #--- HStringValue ------------------------------------------------------------ class HStringValue (Value): def to_str(self, ectx): vv = '0x' vv += self.val[1:-2] return vv def __int__(self): return int(self.val[1:-2], 16) #--- FieldSpec ---------------------------------------------------------------- class FieldSpec (Node): def __init__(self,*args, **kw) : self.name = None Node.__init__ (self,*args, **kw) def SetName(self, name): self.name = name def get_repr(self): return ['#UNSUPPORTED_' + self.type] def fld_repr(self): repr = [self.name] repr.extend(self.get_repr()) return repr class TypeFieldSpec (FieldSpec): def get_repr(self): return [] class FixedTypeValueFieldSpec (FieldSpec): def get_repr(self): if isinstance(self.typ, Type_Ref): repr = ['TypeReference', self.typ.val] else: repr = [self.typ.type] return repr class VariableTypeValueFieldSpec (FieldSpec): def get_repr(self): return ['_' + self.type] class FixedTypeValueSetFieldSpec (FieldSpec): def get_repr(self): return ['_' + self.type] class ObjectFieldSpec (FieldSpec): def get_repr(self): return ['ClassReference', self.cls.val] class ObjectSetFieldSpec (FieldSpec): def get_repr(self): return ['ClassReference', self.cls.val] #============================================================================== def p_module_list_1 (t): 'module_list : module_list ModuleDefinition' t[0] = t[1] + [t[2]] def p_module_list_2 (t): 'module_list : ModuleDefinition' t[0] = [t[1]] #--- ITU-T Recommendation X.680 ----------------------------------------------- # 11 ASN.1 lexical items -------------------------------------------------------- # 11.2 Type references def p_type_ref (t): 'type_ref : UCASE_IDENT' t[0] = Type_Ref(val=t[1]) # 11.3 Identifiers def p_identifier (t): 'identifier : LCASE_IDENT' t[0] = t[1] # 11.4 Value references # cause reduce/reduce conflict #def p_valuereference (t): # 'valuereference : LCASE_IDENT' # t[0] = Value_Ref(val=t[1]) # 11.5 Module references def p_modulereference (t): 'modulereference : UCASE_IDENT' t[0] = t[1] # 12 Module definition -------------------------------------------------------- # 12.1 def p_ModuleDefinition (t): 'ModuleDefinition : ModuleIdentifier DEFINITIONS TagDefault ASSIGNMENT ModuleBegin BEGIN ModuleBody END' t[0] = Module (ident = t[1], tag_def = t[3], body = t[7]) def p_ModuleBegin (t): 'ModuleBegin : ' if t[-4].val == 'Remote-Operations-Information-Objects': x880_module_begin() def p_TagDefault_1 (t): '''TagDefault : EXPLICIT TAGS | IMPLICIT TAGS | AUTOMATIC TAGS ''' t[0] = Default_Tags (dfl_tag = t[1]) def p_TagDefault_2 (t): 'TagDefault : ' # 12.2 The "TagDefault" is taken as EXPLICIT TAGS if it is "empty". t[0] = Default_Tags (dfl_tag = 'EXPLICIT') def p_ModuleIdentifier_1 (t): 'ModuleIdentifier : modulereference DefinitiveIdentifier' # name, oid t [0] = Node('module_ident', val = t[1], ident = t[2]) def p_ModuleIdentifier_2 (t): 'ModuleIdentifier : modulereference' # name, oid t [0] = Node('module_ident', val = t[1], ident = None) def p_DefinitiveIdentifier (t): 'DefinitiveIdentifier : ObjectIdentifierValue' t[0] = t[1] #def p_module_ref (t): # 'module_ref : UCASE_IDENT' # t[0] = t[1] def p_ModuleBody_1 (t): 'ModuleBody : Exports Imports AssignmentList' t[0] = Module_Body (exports = t[1], imports = t[2], assign_list = t[3]) def p_ModuleBody_2 (t): 'ModuleBody : ' t[0] = Node ('module_body', exports = [], imports = [], assign_list = []) def p_Exports_1 (t): 'Exports : EXPORTS syms_exported SEMICOLON' t[0] = t[2] def p_Exports_2 (t): 'Exports : EXPORTS ALL SEMICOLON' t[0] = [ 'ALL' ] def p_Exports_3 (t): 'Exports : ' t[0] = [ 'ALL' ] def p_syms_exported_1 (t): 'syms_exported : exp_sym_list' t[0] = t[1] def p_syms_exported_2 (t): 'syms_exported : ' t[0] = [] def p_exp_sym_list_1 (t): 'exp_sym_list : Symbol' t[0] = [t[1]] def p_exp_sym_list_2 (t): 'exp_sym_list : exp_sym_list COMMA Symbol' t[0] = t[1] + [t[3]] def p_Imports_1 (t): 'Imports : importsbegin IMPORTS SymbolsImported SEMICOLON' t[0] = t[3] global lcase_ident_assigned lcase_ident_assigned = {} def p_importsbegin (t): 'importsbegin : ' global lcase_ident_assigned global g_conform lcase_ident_assigned = {} lcase_ident_assigned.update(g_conform.use_item('ASSIGNED_ID', 'OBJECT_IDENTIFIER')) def p_Imports_2 (t): 'Imports : ' t[0] = [] def p_SymbolsImported_1(t): 'SymbolsImported : ' t[0] = [] def p_SymbolsImported_2 (t): 'SymbolsImported : SymbolsFromModuleList' t[0] = t[1] def p_SymbolsFromModuleList_1 (t): 'SymbolsFromModuleList : SymbolsFromModuleList SymbolsFromModule' t[0] = t[1] + [t[2]] def p_SymbolsFromModuleList_2 (t): 'SymbolsFromModuleList : SymbolsFromModule' t[0] = [t[1]] def p_SymbolsFromModule (t): 'SymbolsFromModule : SymbolList FROM GlobalModuleReference' t[0] = Node ('SymbolList', symbol_list = t[1], module = t[3]) for s in (t[0].symbol_list): if (isinstance(s, Value_Ref)): lcase_ident_assigned[s.val] = t[3] import_symbols_from_module(t[0].module, t[0].symbol_list) def import_symbols_from_module(module, symbol_list): if module.val == 'Remote-Operations-Information-Objects': for i in range(len(symbol_list)): s = symbol_list[i] if isinstance(s, Type_Ref) or isinstance(s, Class_Ref): x880_import(s.val) if isinstance(s, Type_Ref) and is_class_ident(s.val): symbol_list[i] = Class_Ref (val = s.val) return for i in range(len(symbol_list)): s = symbol_list[i] if isinstance(s, Type_Ref) and is_class_ident("$%s$%s" % (module.val, s.val)): import_class_from_module(module.val, s.val) if isinstance(s, Type_Ref) and is_class_ident(s.val): symbol_list[i] = Class_Ref (val = s.val) def p_GlobalModuleReference (t): 'GlobalModuleReference : modulereference AssignedIdentifier' t [0] = Node('module_ident', val = t[1], ident = t[2]) def p_AssignedIdentifier_1 (t): 'AssignedIdentifier : ObjectIdentifierValue' t[0] = t[1] def p_AssignedIdentifier_2 (t): 'AssignedIdentifier : LCASE_IDENT_ASSIGNED' t[0] = t[1] def p_AssignedIdentifier_3 (t): 'AssignedIdentifier : ' pass def p_SymbolList_1 (t): 'SymbolList : Symbol' t[0] = [t[1]] def p_SymbolList_2 (t): 'SymbolList : SymbolList COMMA Symbol' t[0] = t[1] + [t[3]] def p_Symbol (t): '''Symbol : Reference | ParameterizedReference''' t[0] = t[1] def p_Reference_1 (t): '''Reference : type_ref | objectclassreference ''' t[0] = t[1] def p_Reference_2 (t): '''Reference : LCASE_IDENT_ASSIGNED | identifier ''' # instead of valuereference wich causes reduce/reduce conflict t[0] = Value_Ref(val=t[1]) def p_AssignmentList_1 (t): 'AssignmentList : AssignmentList Assignment' t[0] = t[1] + [t[2]] def p_AssignmentList_2 (t): 'AssignmentList : Assignment SEMICOLON' t[0] = [t[1]] def p_AssignmentList_3 (t): 'AssignmentList : Assignment' t[0] = [t[1]] def p_Assignment (t): '''Assignment : TypeAssignment | ValueAssignment | ValueSetTypeAssignment | ObjectClassAssignment | ObjectAssignment | ObjectSetAssignment | ParameterizedAssignment | pyquote ''' t[0] = t[1] # 13 Referencing type and value definitions ----------------------------------- # 13.1 def p_DefinedType (t): '''DefinedType : ExternalTypeReference | type_ref | ParameterizedType''' t[0] = t[1] def p_DefinedValue_1(t): '''DefinedValue : ExternalValueReference''' t[0] = t[1] def p_DefinedValue_2(t): '''DefinedValue : identifier ''' # instead of valuereference wich causes reduce/reduce conflict t[0] = Value_Ref(val=t[1]) # 13.6 def p_ExternalTypeReference (t): 'ExternalTypeReference : modulereference DOT type_ref' t[0] = Node ('ExternalTypeReference', module = t[1], typ = t[3]) def p_ExternalValueReference (t): 'ExternalValueReference : modulereference DOT identifier' t[0] = Node ('ExternalValueReference', module = t[1], ident = t[3]) # 15 Assigning types and values ----------------------------------------------- # 15.1 def p_TypeAssignment (t): 'TypeAssignment : UCASE_IDENT ASSIGNMENT Type' t[0] = t[3] t[0].SetName(t[1]) # 15.2 def p_ValueAssignment (t): 'ValueAssignment : LCASE_IDENT ValueType ASSIGNMENT Value' t[0] = ValueAssignment(ident = t[1], typ = t[2], val = t[4]) # only "simple" types are supported to simplify grammer def p_ValueType (t): '''ValueType : type_ref | BooleanType | IntegerType | ObjectIdentifierType | OctetStringType | RealType ''' t[0] = t[1] # 15.6 def p_ValueSetTypeAssignment (t): 'ValueSetTypeAssignment : UCASE_IDENT ValueType ASSIGNMENT ValueSet' t[0] = Node('ValueSetTypeAssignment', name=t[1], typ=t[2], val=t[4]) # 15.7 def p_ValueSet (t): 'ValueSet : lbraceignore rbraceignore' t[0] = None # 16 Definition of types and values ------------------------------------------- # 16.1 def p_Type (t): '''Type : BuiltinType | ReferencedType | ConstrainedType''' t[0] = t[1] # 16.2 def p_BuiltinType (t): '''BuiltinType : AnyType | BitStringType | BooleanType | CharacterStringType | ChoiceType | EmbeddedPDVType | EnumeratedType | ExternalType | InstanceOfType | IntegerType | NullType | ObjectClassFieldType | ObjectIdentifierType | OctetStringType | RealType | RelativeOIDType | SequenceType | SequenceOfType | SetType | SetOfType | TaggedType''' t[0] = t[1] # 16.3 def p_ReferencedType (t): '''ReferencedType : DefinedType | UsefulType | SelectionType''' t[0] = t[1] # 16.5 def p_NamedType (t): 'NamedType : identifier Type' t[0] = t[2] t[0].SetName (t[1]) # 16.7 def p_Value (t): '''Value : BuiltinValue | ReferencedValue | ObjectClassFieldValue''' t[0] = t[1] # 16.9 def p_BuiltinValue (t): '''BuiltinValue : BooleanValue | ChoiceValue | IntegerValue | ObjectIdentifierValue | RealValue | SequenceValue | hex_string | binary_string | char_string''' # XXX we don't support {data} here t[0] = t[1] # 16.11 def p_ReferencedValue (t): '''ReferencedValue : DefinedValue | ValueFromObject''' t[0] = t[1] # 16.13 #def p_NamedValue (t): # 'NamedValue : identifier Value' # t[0] = Node ('NamedValue', ident = t[1], value = t[2]) # 17 Notation for the boolean type -------------------------------------------- # 17.1 def p_BooleanType (t): 'BooleanType : BOOLEAN' t[0] = BooleanType () # 17.2 def p_BooleanValue (t): '''BooleanValue : TRUE | FALSE''' t[0] = t[1] # 18 Notation for the integer type -------------------------------------------- # 18.1 def p_IntegerType_1 (t): 'IntegerType : INTEGER' t[0] = IntegerType (named_list = None) def p_IntegerType_2 (t): 'IntegerType : INTEGER LBRACE NamedNumberList RBRACE' t[0] = IntegerType(named_list = t[3]) def p_NamedNumberList_1 (t): 'NamedNumberList : NamedNumber' t[0] = [t[1]] def p_NamedNumberList_2 (t): 'NamedNumberList : NamedNumberList COMMA NamedNumber' t[0] = t[1] + [t[3]] def p_NamedNumber (t): '''NamedNumber : identifier LPAREN SignedNumber RPAREN | identifier LPAREN DefinedValue RPAREN''' t[0] = NamedNumber(ident = t[1], val = t[3]) def p_SignedNumber_1 (t): 'SignedNumber : NUMBER' t[0] = t [1] def p_SignedNumber_2 (t): 'SignedNumber : MINUS NUMBER' t[0] = '-' + t[2] # 18.9 def p_IntegerValue (t): 'IntegerValue : SignedNumber' t[0] = t [1] # 19 Notation for the enumerated type ----------------------------------------- # 19.1 def p_EnumeratedType (t): 'EnumeratedType : ENUMERATED LBRACE Enumerations RBRACE' t[0] = EnumeratedType (val = t[3]['val'], ext = t[3]['ext']) def p_Enumerations_1 (t): 'Enumerations : Enumeration' t[0] = { 'val' : t[1], 'ext' : None } def p_Enumerations_2 (t): 'Enumerations : Enumeration COMMA ELLIPSIS ExceptionSpec' t[0] = { 'val' : t[1], 'ext' : [] } def p_Enumerations_3 (t): 'Enumerations : Enumeration COMMA ELLIPSIS ExceptionSpec COMMA Enumeration' t[0] = { 'val' : t[1], 'ext' : t[6] } def p_Enumeration_1 (t): 'Enumeration : EnumerationItem' t[0] = [t[1]] def p_Enumeration_2 (t): 'Enumeration : Enumeration COMMA EnumerationItem' t[0] = t[1] + [t[3]] def p_EnumerationItem (t): '''EnumerationItem : Identifier | NamedNumber''' t[0] = t[1] def p_Identifier (t): 'Identifier : identifier' t[0] = Node ('Identifier', ident = t[1]) # 20 Notation for the real type ----------------------------------------------- # 20.1 def p_RealType (t): 'RealType : REAL' t[0] = RealType () # 20.6 def p_RealValue (t): '''RealValue : REAL_NUMBER | SpecialRealValue''' t[0] = t [1] def p_SpecialRealValue (t): '''SpecialRealValue : PLUS_INFINITY | MINUS_INFINITY''' t[0] = t[1] # 21 Notation for the bitstring type ------------------------------------------ # 21.1 def p_BitStringType_1 (t): 'BitStringType : BIT STRING' t[0] = BitStringType (named_list = None) def p_BitStringType_2 (t): 'BitStringType : BIT STRING LBRACE NamedBitList RBRACE' t[0] = BitStringType (named_list = t[4]) def p_NamedBitList_1 (t): 'NamedBitList : NamedBit' t[0] = [t[1]] def p_NamedBitList_2 (t): 'NamedBitList : NamedBitList COMMA NamedBit' t[0] = t[1] + [t[3]] def p_NamedBit (t): '''NamedBit : identifier LPAREN NUMBER RPAREN | identifier LPAREN DefinedValue RPAREN''' t[0] = NamedNumber (ident = t[1], val = t[3]) # 22 Notation for the octetstring type ---------------------------------------- # 22.1 def p_OctetStringType (t): 'OctetStringType : OCTET STRING' t[0] = OctetStringType () # 23 Notation for the null type ----------------------------------------------- # 23.1 def p_NullType (t): 'NullType : NULL' t[0] = NullType () # 23.3 def p_NullValue (t): 'NullValue : NULL' t[0] = NullValue () # 24 Notation for sequence types ---------------------------------------------- # 24.1 def p_SequenceType_1 (t): 'SequenceType : SEQUENCE LBRACE RBRACE' t[0] = SequenceType (elt_list = []) def p_SequenceType_2 (t): 'SequenceType : SEQUENCE LBRACE ComponentTypeLists RBRACE' t[0] = SequenceType (elt_list = t[3]['elt_list']) if 'ext_list' in t[3]: t[0].ext_list = t[3]['ext_list'] if 'elt_list2' in t[3]: t[0].elt_list2 = t[3]['elt_list2'] def p_ExtensionAndException_1 (t): 'ExtensionAndException : ELLIPSIS' t[0] = [] def p_OptionalExtensionMarker_1 (t): 'OptionalExtensionMarker : COMMA ELLIPSIS' t[0] = True def p_OptionalExtensionMarker_2 (t): 'OptionalExtensionMarker : ' t[0] = False def p_ComponentTypeLists_1 (t): 'ComponentTypeLists : ComponentTypeList' t[0] = {'elt_list' : t[1]} def p_ComponentTypeLists_2 (t): 'ComponentTypeLists : ComponentTypeList COMMA ExtensionAndException OptionalExtensionMarker' t[0] = {'elt_list' : t[1], 'ext_list' : []} def p_ComponentTypeLists_3 (t): 'ComponentTypeLists : ComponentTypeList COMMA ExtensionAndException ExtensionAdditionList OptionalExtensionMarker' t[0] = {'elt_list' : t[1], 'ext_list' : t[4]} def p_ComponentTypeLists_4 (t): 'ComponentTypeLists : ComponentTypeList COMMA ExtensionAndException ExtensionEndMarker COMMA ComponentTypeList' t[0] = {'elt_list' : t[1], 'ext_list' : [], 'elt_list2' : t[6]} def p_ComponentTypeLists_5 (t): 'ComponentTypeLists : ComponentTypeList COMMA ExtensionAndException ExtensionAdditionList ExtensionEndMarker COMMA ComponentTypeList' t[0] = {'elt_list' : t[1], 'ext_list' : t[4], 'elt_list2' : t[7]} def p_ComponentTypeLists_6 (t): 'ComponentTypeLists : ExtensionAndException OptionalExtensionMarker' t[0] = {'elt_list' : [], 'ext_list' : []} def p_ComponentTypeLists_7 (t): 'ComponentTypeLists : ExtensionAndException ExtensionAdditionList OptionalExtensionMarker' t[0] = {'elt_list' : [], 'ext_list' : t[2]} def p_ExtensionEndMarker (t): 'ExtensionEndMarker : COMMA ELLIPSIS' pass def p_ExtensionAdditionList_1 (t): 'ExtensionAdditionList : COMMA ExtensionAddition' t[0] = [t[2]] def p_ExtensionAdditionList_2 (t): 'ExtensionAdditionList : ExtensionAdditionList COMMA ExtensionAddition' t[0] = t[1] + [t[3]] def p_ExtensionAddition_1 (t): 'ExtensionAddition : ExtensionAdditionGroup' t[0] = Node ('elt_type', val = t[1], optional = 0) def p_ExtensionAddition_2 (t): 'ExtensionAddition : ComponentType' t[0] = t[1] def p_ExtensionAdditionGroup (t): 'ExtensionAdditionGroup : LVERBRACK VersionNumber ComponentTypeList RVERBRACK' t[0] = ExtensionAdditionGroup (ver = t[2], elt_list = t[3]) def p_VersionNumber_1 (t): 'VersionNumber : ' def p_VersionNumber_2 (t): 'VersionNumber : NUMBER COLON' t[0] = t[1] def p_ComponentTypeList_1 (t): 'ComponentTypeList : ComponentType' t[0] = [t[1]] def p_ComponentTypeList_2 (t): 'ComponentTypeList : ComponentTypeList COMMA ComponentType' t[0] = t[1] + [t[3]] def p_ComponentType_1 (t): 'ComponentType : NamedType' t[0] = Node ('elt_type', val = t[1], optional = 0) def p_ComponentType_2 (t): 'ComponentType : NamedType OPTIONAL' t[0] = Node ('elt_type', val = t[1], optional = 1) def p_ComponentType_3 (t): 'ComponentType : NamedType DEFAULT DefaultValue' t[0] = Node ('elt_type', val = t[1], optional = 1, default = t[3]) def p_ComponentType_4 (t): 'ComponentType : COMPONENTS OF Type' t[0] = Node ('components_of', typ = t[3]) def p_DefaultValue_1 (t): '''DefaultValue : ReferencedValue | BooleanValue | ChoiceValue | IntegerValue | RealValue | hex_string | binary_string | char_string | ObjectClassFieldValue''' t[0] = t[1] def p_DefaultValue_2 (t): 'DefaultValue : lbraceignore rbraceignore' t[0] = '' # 24.17 def p_SequenceValue_1 (t): 'SequenceValue : LBRACE RBRACE' t[0] = [] #def p_SequenceValue_2 (t): # 'SequenceValue : LBRACE ComponentValueList RBRACE' # t[0] = t[2] #def p_ComponentValueList_1 (t): # 'ComponentValueList : NamedValue' # t[0] = [t[1]] #def p_ComponentValueList_2 (t): # 'ComponentValueList : ComponentValueList COMMA NamedValue' # t[0] = t[1] + [t[3]] # 25 Notation for sequence-of types ------------------------------------------- # 25.1 def p_SequenceOfType (t): '''SequenceOfType : SEQUENCE OF Type | SEQUENCE OF NamedType''' t[0] = SequenceOfType (val = t[3], size_constr = None) # 26 Notation for set types --------------------------------------------------- # 26.1 def p_SetType_1 (t): 'SetType : SET LBRACE RBRACE' t[0] = SetType (elt_list = []) def p_SetType_2 (t): 'SetType : SET LBRACE ComponentTypeLists RBRACE' t[0] = SetType (elt_list = t[3]['elt_list']) if 'ext_list' in t[3]: t[0].ext_list = t[3]['ext_list'] if 'elt_list2' in t[3]: t[0].elt_list2 = t[3]['elt_list2'] # 27 Notation for set-of types ------------------------------------------------ # 27.1 def p_SetOfType (t): '''SetOfType : SET OF Type | SET OF NamedType''' t[0] = SetOfType (val = t[3]) # 28 Notation for choice types ------------------------------------------------ # 28.1 def p_ChoiceType (t): 'ChoiceType : CHOICE LBRACE AlternativeTypeLists RBRACE' if 'ext_list' in t[3]: t[0] = ChoiceType (elt_list = t[3]['elt_list'], ext_list = t[3]['ext_list']) else: t[0] = ChoiceType (elt_list = t[3]['elt_list']) def p_AlternativeTypeLists_1 (t): 'AlternativeTypeLists : AlternativeTypeList' t[0] = {'elt_list' : t[1]} def p_AlternativeTypeLists_2 (t): 'AlternativeTypeLists : AlternativeTypeList COMMA ExtensionAndException ExtensionAdditionAlternatives OptionalExtensionMarker' t[0] = {'elt_list' : t[1], 'ext_list' : t[4]} def p_ExtensionAdditionAlternatives_1 (t): 'ExtensionAdditionAlternatives : ExtensionAdditionAlternativesList' t[0] = t[1] def p_ExtensionAdditionAlternatives_2 (t): 'ExtensionAdditionAlternatives : ' t[0] = [] def p_ExtensionAdditionAlternativesList_1 (t): 'ExtensionAdditionAlternativesList : COMMA ExtensionAdditionAlternative' t[0] = t[2] def p_ExtensionAdditionAlternativesList_2 (t): 'ExtensionAdditionAlternativesList : ExtensionAdditionAlternativesList COMMA ExtensionAdditionAlternative' t[0] = t[1] + t[3] def p_ExtensionAdditionAlternative_1 (t): 'ExtensionAdditionAlternative : NamedType' t[0] = [t[1]] def p_ExtensionAdditionAlternative_2 (t): 'ExtensionAdditionAlternative : ExtensionAdditionAlternativesGroup' t[0] = t[1] def p_ExtensionAdditionAlternativesGroup (t): 'ExtensionAdditionAlternativesGroup : LVERBRACK VersionNumber AlternativeTypeList RVERBRACK' t[0] = t[3] def p_AlternativeTypeList_1 (t): 'AlternativeTypeList : NamedType' t[0] = [t[1]] def p_AlternativeTypeList_2 (t): 'AlternativeTypeList : AlternativeTypeList COMMA NamedType' t[0] = t[1] + [t[3]] # 28.10 def p_ChoiceValue_1 (t): '''ChoiceValue : identifier COLON Value | identifier COLON NullValue ''' val = t[3] if not isinstance(val, Value): val = Value(val=val) t[0] = ChoiceValue (choice = t[1], val = val) # 29 Notation for selection types # 29.1 def p_SelectionType (t): # 'SelectionType : identifier LT Type' t[0] = SelectionType (typ = t[3], sel = t[1]) # 30 Notation for tagged types ------------------------------------------------ # 30.1 def p_TaggedType_1 (t): 'TaggedType : Tag Type' t[1].mode = 'default' t[0] = t[2] t[0].AddTag(t[1]) def p_TaggedType_2 (t): '''TaggedType : Tag IMPLICIT Type | Tag EXPLICIT Type''' t[1].mode = t[2] t[0] = t[3] t[0].AddTag(t[1]) def p_Tag (t): 'Tag : LBRACK Class ClassNumber RBRACK' t[0] = Tag(cls = t[2], num = t[3]) def p_ClassNumber_1 (t): 'ClassNumber : number' t[0] = t[1] def p_ClassNumber_2 (t): 'ClassNumber : DefinedValue' t[0] = t[1] def p_Class_1 (t): '''Class : UNIVERSAL | APPLICATION | PRIVATE''' t[0] = t[1] def p_Class_2 (t): 'Class :' t[0] = 'CONTEXT' # 31 Notation for the object identifier type ---------------------------------- # 31.1 def p_ObjectIdentifierType (t): 'ObjectIdentifierType : OBJECT IDENTIFIER' t[0] = ObjectIdentifierType() # 31.3 def p_ObjectIdentifierValue (t): 'ObjectIdentifierValue : LBRACE oid_comp_list RBRACE' t[0] = ObjectIdentifierValue (comp_list=t[2]) def p_oid_comp_list_1 (t): 'oid_comp_list : oid_comp_list ObjIdComponents' t[0] = t[1] + [t[2]] def p_oid_comp_list_2 (t): 'oid_comp_list : ObjIdComponents' t[0] = [t[1]] def p_ObjIdComponents (t): '''ObjIdComponents : NameForm | NumberForm | NameAndNumberForm''' t[0] = t[1] def p_NameForm (t): '''NameForm : LCASE_IDENT | LCASE_IDENT_ASSIGNED''' t [0] = t[1] def p_NumberForm (t): '''NumberForm : NUMBER''' # | DefinedValue''' t [0] = t[1] def p_NameAndNumberForm (t): '''NameAndNumberForm : LCASE_IDENT_ASSIGNED LPAREN NumberForm RPAREN | LCASE_IDENT LPAREN NumberForm RPAREN''' t[0] = Node('name_and_number', ident = t[1], number = t[3]) # 32 Notation for the relative object identifier type ------------------------- # 32.1 def p_RelativeOIDType (t): 'RelativeOIDType : RELATIVE_OID' t[0] = RelativeOIDType() # 33 Notation for the embedded-pdv type --------------------------------------- # 33.1 def p_EmbeddedPDVType (t): 'EmbeddedPDVType : EMBEDDED PDV' t[0] = EmbeddedPDVType() # 34 Notation for the external type ------------------------------------------- # 34.1 def p_ExternalType (t): 'ExternalType : EXTERNAL' t[0] = ExternalType() # 36 Notation for character string types -------------------------------------- # 36.1 def p_CharacterStringType (t): '''CharacterStringType : RestrictedCharacterStringType | UnrestrictedCharacterStringType''' t[0] = t[1] # 37 Definition of restricted character string types -------------------------- def p_RestrictedCharacterStringType_1 (t): 'RestrictedCharacterStringType : BMPString' t[0] = BMPStringType () def p_RestrictedCharacterStringType_2 (t): 'RestrictedCharacterStringType : GeneralString' t[0] = GeneralStringType () def p_RestrictedCharacterStringType_3 (t): 'RestrictedCharacterStringType : GraphicString' t[0] = GraphicStringType () def p_RestrictedCharacterStringType_4 (t): 'RestrictedCharacterStringType : IA5String' t[0] = IA5StringType () def p_RestrictedCharacterStringType_5 (t): 'RestrictedCharacterStringType : ISO646String' t[0] = ISO646StringType () def p_RestrictedCharacterStringType_6 (t): 'RestrictedCharacterStringType : NumericString' t[0] = NumericStringType () def p_RestrictedCharacterStringType_7 (t): 'RestrictedCharacterStringType : PrintableString' t[0] = PrintableStringType () def p_RestrictedCharacterStringType_8 (t): 'RestrictedCharacterStringType : TeletexString' t[0] = TeletexStringType () def p_RestrictedCharacterStringType_9 (t): 'RestrictedCharacterStringType : T61String' t[0] = T61StringType () def p_RestrictedCharacterStringType_10 (t): 'RestrictedCharacterStringType : UniversalString' t[0] = UniversalStringType () def p_RestrictedCharacterStringType_11 (t): 'RestrictedCharacterStringType : UTF8String' t[0] = UTF8StringType () def p_RestrictedCharacterStringType_12 (t): 'RestrictedCharacterStringType : VideotexString' t[0] = VideotexStringType () def p_RestrictedCharacterStringType_13 (t): 'RestrictedCharacterStringType : VisibleString' t[0] = VisibleStringType () # 40 Definition of unrestricted character string types ------------------------ # 40.1 def p_UnrestrictedCharacterStringType (t): 'UnrestrictedCharacterStringType : CHARACTER STRING' t[0] = UnrestrictedCharacterStringType () # 41 Notation for types defined in clauses 42 to 44 --------------------------- # 42 Generalized time --------------------------------------------------------- def p_UsefulType_1 (t): 'UsefulType : GeneralizedTime' t[0] = GeneralizedTime() # 43 Universal time ----------------------------------------------------------- def p_UsefulType_2 (t): 'UsefulType : UTCTime' t[0] = UTCTime() # 44 The object descriptor type ----------------------------------------------- def p_UsefulType_3 (t): 'UsefulType : ObjectDescriptor' t[0] = ObjectDescriptor() # 45 Constrained types -------------------------------------------------------- # 45.1 def p_ConstrainedType_1 (t): 'ConstrainedType : Type Constraint' t[0] = t[1] t[0].AddConstraint(t[2]) def p_ConstrainedType_2 (t): 'ConstrainedType : TypeWithConstraint' t[0] = t[1] # 45.5 def p_TypeWithConstraint_1 (t): '''TypeWithConstraint : SET Constraint OF Type | SET SizeConstraint OF Type''' t[0] = SetOfType (val = t[4], constr = t[2]) def p_TypeWithConstraint_2 (t): '''TypeWithConstraint : SEQUENCE Constraint OF Type | SEQUENCE SizeConstraint OF Type''' t[0] = SequenceOfType (val = t[4], constr = t[2]) def p_TypeWithConstraint_3 (t): '''TypeWithConstraint : SET Constraint OF NamedType | SET SizeConstraint OF NamedType''' t[0] = SetOfType (val = t[4], constr = t[2]) def p_TypeWithConstraint_4 (t): '''TypeWithConstraint : SEQUENCE Constraint OF NamedType | SEQUENCE SizeConstraint OF NamedType''' t[0] = SequenceOfType (val = t[4], constr = t[2]) # 45.6 # 45.7 def p_Constraint (t): 'Constraint : LPAREN ConstraintSpec ExceptionSpec RPAREN' t[0] = t[2] def p_ConstraintSpec (t): '''ConstraintSpec : ElementSetSpecs | GeneralConstraint''' t[0] = t[1] # 46 Element set specification ------------------------------------------------ # 46.1 def p_ElementSetSpecs_1 (t): 'ElementSetSpecs : RootElementSetSpec' t[0] = t[1] def p_ElementSetSpecs_2 (t): 'ElementSetSpecs : RootElementSetSpec COMMA ELLIPSIS' t[0] = t[1] t[0].ext = True def p_ElementSetSpecs_3 (t): 'ElementSetSpecs : RootElementSetSpec COMMA ELLIPSIS COMMA AdditionalElementSetSpec' t[0] = t[1] t[0].ext = True def p_RootElementSetSpec (t): 'RootElementSetSpec : ElementSetSpec' t[0] = t[1] def p_AdditionalElementSetSpec (t): 'AdditionalElementSetSpec : ElementSetSpec' t[0] = t[1] def p_ElementSetSpec (t): 'ElementSetSpec : Unions' t[0] = t[1] def p_Unions_1 (t): 'Unions : Intersections' t[0] = t[1] def p_Unions_2 (t): 'Unions : UElems UnionMark Intersections' t[0] = Constraint(type = 'Union', subtype = [t[1], t[3]]) def p_UElems (t): 'UElems : Unions' t[0] = t[1] def p_Intersections_1 (t): 'Intersections : IntersectionElements' t[0] = t[1] def p_Intersections_2 (t): 'Intersections : IElems IntersectionMark IntersectionElements' t[0] = Constraint(type = 'Intersection', subtype = [t[1], t[3]]) def p_IElems (t): 'IElems : Intersections' t[0] = t[1] def p_IntersectionElements (t): 'IntersectionElements : Elements' t[0] = t[1] def p_UnionMark (t): '''UnionMark : BAR | UNION''' def p_IntersectionMark (t): '''IntersectionMark : CIRCUMFLEX | INTERSECTION''' # 46.5 def p_Elements_1 (t): 'Elements : SubtypeElements' t[0] = t[1] def p_Elements_2 (t): 'Elements : LPAREN ElementSetSpec RPAREN' t[0] = t[2] # 47 Subtype elements --------------------------------------------------------- # 47.1 General def p_SubtypeElements (t): '''SubtypeElements : SingleValue | ContainedSubtype | ValueRange | PermittedAlphabet | SizeConstraint | TypeConstraint | InnerTypeConstraints | PatternConstraint''' t[0] = t[1] # 47.2 Single value # 47.2.1 def p_SingleValue (t): 'SingleValue : Value' t[0] = Constraint(type = 'SingleValue', subtype = t[1]) # 47.3 Contained subtype # 47.3.1 def p_ContainedSubtype (t): 'ContainedSubtype : Includes Type' t[0] = Constraint(type = 'ContainedSubtype', subtype = t[2]) def p_Includes (t): '''Includes : INCLUDES | ''' # 47.4 Value range # 47.4.1 def p_ValueRange (t): 'ValueRange : LowerEndpoint RANGE UpperEndpoint' t[0] = Constraint(type = 'ValueRange', subtype = [t[1], t[3]]) # 47.4.3 def p_LowerEndpoint_1 (t): 'LowerEndpoint : LowerEndValue' t[0] = t[1] def p_LowerEndpoint_2 (t): 'LowerEndpoint : LowerEndValue LT' t[0] = t[1] # but not inclusive range def p_UpperEndpoint_1 (t): 'UpperEndpoint : UpperEndValue' t[0] = t[1] def p_UpperEndpoint_2 (t): 'UpperEndpoint : LT UpperEndValue' t[0] = t[1] # but not inclusive range # 47.4.4 def p_LowerEndValue (t): '''LowerEndValue : Value | MIN''' t[0] = t[1] # XXX def p_UpperEndValue (t): '''UpperEndValue : Value | MAX''' t[0] = t[1] # 47.5 Size constraint # 47.5.1 def p_SizeConstraint (t): 'SizeConstraint : SIZE Constraint' t[0] = Constraint (type = 'Size', subtype = t[2]) # 47.6 Type constraint # 47.6.1 def p_TypeConstraint (t): 'TypeConstraint : Type' t[0] = Constraint (type = 'Type', subtype = t[1]) # 47.7 Permitted alphabet # 47.7.1 def p_PermittedAlphabet (t): 'PermittedAlphabet : FROM Constraint' t[0] = Constraint (type = 'From', subtype = t[2]) # 47.8 Inner subtyping # 47.8.1 def p_InnerTypeConstraints (t): '''InnerTypeConstraints : WITH COMPONENT SingleTypeConstraint | WITH COMPONENTS MultipleTypeConstraints''' pass # ignore PER invisible constraint # 47.8.3 def p_SingleTypeConstraint (t): 'SingleTypeConstraint : Constraint' t[0] = t[1] # 47.8.4 def p_MultipleTypeConstraints (t): '''MultipleTypeConstraints : FullSpecification | PartialSpecification''' t[0] = t[1] def p_FullSpecification (t): 'FullSpecification : LBRACE TypeConstraints RBRACE' t[0] = t[2] def p_PartialSpecification (t): 'PartialSpecification : LBRACE ELLIPSIS COMMA TypeConstraints RBRACE' t[0] = t[4] def p_TypeConstraints_1 (t): 'TypeConstraints : named_constraint' t [0] = [t[1]] def p_TypeConstraints_2 (t): 'TypeConstraints : TypeConstraints COMMA named_constraint' t[0] = t[1] + [t[3]] def p_named_constraint_1 (t): 'named_constraint : identifier constraint' return Node ('named_constraint', ident = t[1], constr = t[2]) def p_named_constraint_2 (t): 'named_constraint : constraint' return Node ('named_constraint', constr = t[1]) def p_constraint (t): 'constraint : value_constraint presence_constraint' t[0] = Node ('constraint', value = t[1], presence = t[2]) def p_value_constraint_1 (t): 'value_constraint : Constraint' t[0] = t[1] def p_value_constraint_2 (t): 'value_constraint : ' pass def p_presence_constraint_1 (t): '''presence_constraint : PRESENT | ABSENT | OPTIONAL''' t[0] = t[1] def p_presence_constraint_2 (t): '''presence_constraint : ''' pass # 47.9 Pattern constraint # 47.9.1 def p_PatternConstraint (t): 'PatternConstraint : PATTERN Value' t[0] = Constraint (type = 'Pattern', subtype = t[2]) # 49 The exception identifier # 49.4 def p_ExceptionSpec_1 (t): 'ExceptionSpec : EXCLAMATION ExceptionIdentification' pass def p_ExceptionSpec_2 (t): 'ExceptionSpec : ' pass def p_ExceptionIdentification (t): '''ExceptionIdentification : SignedNumber | DefinedValue | Type COLON Value ''' pass # /*-----------------------------------------------------------------------*/ # /* Value Notation Productions */ # /*-----------------------------------------------------------------------*/ def p_binary_string (t): 'binary_string : BSTRING' t[0] = BStringValue(val = t[1]) def p_hex_string (t): 'hex_string : HSTRING' t[0] = HStringValue(val = t[1]) def p_char_string (t): 'char_string : QSTRING' t[0] = t[1] def p_number (t): 'number : NUMBER' t[0] = t[1] #--- ITU-T Recommendation X.208 ----------------------------------------------- # 27 Notation for the any type ------------------------------------------------ # 27.1 def p_AnyType (t): '''AnyType : ANY | ANY DEFINED BY identifier''' t[0] = AnyType() #--- ITU-T Recommendation X.681 ----------------------------------------------- # 7 ASN.1 lexical items ------------------------------------------------------- # 7.1 Information object class references def p_objectclassreference (t): 'objectclassreference : CLASS_IDENT' t[0] = Class_Ref(val=t[1]) # 7.2 Information object references def p_objectreference (t): 'objectreference : LCASE_IDENT' t[0] = t[1] # 7.3 Information object set references #def p_objectsetreference (t): # 'objectsetreference : UCASE_IDENT' # t[0] = t[1] # 7.4 Type field references # ucasefieldreference # 7.5 Value field references # lcasefieldreference # 7.6 Value set field references # ucasefieldreference # 7.7 Object field references # lcasefieldreference # 7.8 Object set field references # ucasefieldreference def p_ucasefieldreference (t): 'ucasefieldreference : AMPERSAND UCASE_IDENT' t[0] = '&' + t[2] def p_lcasefieldreference (t): 'lcasefieldreference : AMPERSAND LCASE_IDENT' t[0] = '&' + t[2] # 8 Referencing definitions # 8.1 def p_DefinedObjectClass (t): '''DefinedObjectClass : objectclassreference | UsefulObjectClassReference''' t[0] = t[1] global obj_class obj_class = t[0].val def p_DefinedObject (t): '''DefinedObject : objectreference''' t[0] = t[1] # 8.4 def p_UsefulObjectClassReference (t): '''UsefulObjectClassReference : TYPE_IDENTIFIER | ABSTRACT_SYNTAX''' t[0] = Class_Ref(val=t[1]) # 9 Information object class definition and assignment # 9.1 def p_ObjectClassAssignment (t): '''ObjectClassAssignment : CLASS_IDENT ASSIGNMENT ObjectClass | UCASE_IDENT ASSIGNMENT ObjectClass''' t[0] = t[3] t[0].SetName(t[1]) if isinstance(t[0], ObjectClassDefn): t[0].reg_types() # 9.2 def p_ObjectClass (t): '''ObjectClass : DefinedObjectClass | ObjectClassDefn | ParameterizedObjectClass ''' t[0] = t[1] # 9.3 def p_ObjectClassDefn (t): '''ObjectClassDefn : CLASS LBRACE FieldSpecs RBRACE | CLASS LBRACE FieldSpecs RBRACE WithSyntaxSpec''' t[0] = ObjectClassDefn(fields = t[3]) def p_FieldSpecs_1 (t): 'FieldSpecs : FieldSpec' t[0] = [t[1]] def p_FieldSpecs_2 (t): 'FieldSpecs : FieldSpecs COMMA FieldSpec' t[0] = t[1] + [t[3]] def p_WithSyntaxSpec (t): 'WithSyntaxSpec : WITH SYNTAX lbraceignore rbraceignore' t[0] = None # 9.4 def p_FieldSpec (t): '''FieldSpec : TypeFieldSpec | FixedTypeValueFieldSpec | VariableTypeValueFieldSpec | FixedTypeValueSetFieldSpec | ObjectFieldSpec | ObjectSetFieldSpec ''' t[0] = t[1] # 9.5 def p_TypeFieldSpec (t): '''TypeFieldSpec : ucasefieldreference | ucasefieldreference TypeOptionalitySpec ''' t[0] = TypeFieldSpec() t[0].SetName(t[1]) def p_TypeOptionalitySpec_1 (t): 'TypeOptionalitySpec ::= OPTIONAL' pass def p_TypeOptionalitySpec_2 (t): 'TypeOptionalitySpec ::= DEFAULT Type' pass # 9.6 def p_FixedTypeValueFieldSpec (t): '''FixedTypeValueFieldSpec : lcasefieldreference Type | lcasefieldreference Type UNIQUE | lcasefieldreference Type ValueOptionalitySpec | lcasefieldreference Type UNIQUE ValueOptionalitySpec ''' t[0] = FixedTypeValueFieldSpec(typ = t[2]) t[0].SetName(t[1]) def p_ValueOptionalitySpec_1 (t): 'ValueOptionalitySpec ::= OPTIONAL' pass def p_ValueOptionalitySpec_2 (t): 'ValueOptionalitySpec ::= DEFAULT Value' pass # 9.8 def p_VariableTypeValueFieldSpec (t): '''VariableTypeValueFieldSpec : lcasefieldreference FieldName | lcasefieldreference FieldName ValueOptionalitySpec ''' t[0] = VariableTypeValueFieldSpec() t[0].SetName(t[1]) # 9.9 def p_FixedTypeValueSetFieldSpec (t): '''FixedTypeValueSetFieldSpec : ucasefieldreference Type | ucasefieldreference Type ValueSetOptionalitySpec ''' t[0] = FixedTypeValueSetFieldSpec() t[0].SetName(t[1]) def p_ValueSetOptionalitySpec_1 (t): 'ValueSetOptionalitySpec ::= OPTIONAL' pass def p_ValueSetOptionalitySpec_2 (t): 'ValueSetOptionalitySpec ::= DEFAULT ValueSet' pass # 9.11 def p_ObjectFieldSpec (t): '''ObjectFieldSpec : lcasefieldreference DefinedObjectClass | lcasefieldreference DefinedObjectClass ObjectOptionalitySpec ''' t[0] = ObjectFieldSpec(cls=t[2]) t[0].SetName(t[1]) global obj_class obj_class = None def p_ObjectOptionalitySpec_1 (t): 'ObjectOptionalitySpec ::= OPTIONAL' pass def p_ObjectOptionalitySpec_2 (t): 'ObjectOptionalitySpec ::= DEFAULT Object' pass # 9.12 def p_ObjectSetFieldSpec (t): '''ObjectSetFieldSpec : ucasefieldreference DefinedObjectClass | ucasefieldreference DefinedObjectClass ObjectSetOptionalitySpec ''' t[0] = ObjectSetFieldSpec(cls=t[2]) t[0].SetName(t[1]) def p_ObjectSetOptionalitySpec_1 (t): 'ObjectSetOptionalitySpec ::= OPTIONAL' pass def p_ObjectSetOptionalitySpec_2 (t): 'ObjectSetOptionalitySpec ::= DEFAULT ObjectSet' pass # 9.13 def p_PrimitiveFieldName (t): '''PrimitiveFieldName : ucasefieldreference | lcasefieldreference ''' t[0] = t[1] # 9.13 def p_FieldName_1 (t): 'FieldName : PrimitiveFieldName' t[0] = t[1] def p_FieldName_2 (t): 'FieldName : FieldName DOT PrimitiveFieldName' t[0] = t[1] + '.' + t[3] # 11 Information object definition and assignment # 11.1 def p_ObjectAssignment (t): 'ObjectAssignment : objectreference DefinedObjectClass ASSIGNMENT Object' t[0] = ObjectAssignment (ident = t[1], cls=t[2].val, val=t[4]) global obj_class obj_class = None # 11.3 def p_Object (t): '''Object : DefinedObject | ObjectDefn | ParameterizedObject''' t[0] = t[1] # 11.4 def p_ObjectDefn (t): 'ObjectDefn : lbraceobject bodyobject rbraceobject' t[0] = t[2] # {...} block of object definition def p_lbraceobject(t): 'lbraceobject : braceobjectbegin LBRACE' t[0] = t[1] def p_braceobjectbegin(t): 'braceobjectbegin : ' global lexer global obj_class if set_class_syntax(obj_class): state = 'INITIAL' else: lexer.level = 1 state = 'braceignore' lexer.push_state(state) def p_rbraceobject(t): 'rbraceobject : braceobjectend RBRACE' t[0] = t[2] def p_braceobjectend(t): 'braceobjectend : ' global lexer lexer.pop_state() set_class_syntax(None) def p_bodyobject_1 (t): 'bodyobject : ' t[0] = { } def p_bodyobject_2 (t): 'bodyobject : cls_syntax_list' t[0] = t[1] def p_cls_syntax_list_1 (t): 'cls_syntax_list : cls_syntax_list cls_syntax' t[0] = t[1] t[0].update(t[2]) def p_cls_syntax_list_2 (t): 'cls_syntax_list : cls_syntax' t[0] = t[1] # X.681 def p_cls_syntax_1 (t): 'cls_syntax : Type IDENTIFIED BY Value' t[0] = { get_class_fieled(' ') : t[1], get_class_fieled(' '.join((t[2], t[3]))) : t[4] } def p_cls_syntax_2 (t): 'cls_syntax : HAS PROPERTY Value' t[0] = { get_class_fieled(' '.join(t[1:-1])) : t[-1:][0] } # X.880 def p_cls_syntax_3 (t): '''cls_syntax : ERRORS ObjectSet | LINKED ObjectSet | RETURN RESULT BooleanValue | SYNCHRONOUS BooleanValue | INVOKE PRIORITY Value | RESULT_PRIORITY Value | PRIORITY Value | ALWAYS RESPONDS BooleanValue | IDEMPOTENT BooleanValue ''' t[0] = { get_class_fieled(' '.join(t[1:-1])) : t[-1:][0] } def p_cls_syntax_4 (t): '''cls_syntax : ARGUMENT Type | RESULT Type | PARAMETER Type ''' t[0] = { get_class_fieled(t[1]) : t[2] } def p_cls_syntax_5 (t): 'cls_syntax : CODE Value' fld = get_class_fieled(t[1]); t[0] = { fld : t[2] } if isinstance(t[2], ChoiceValue): fldt = fld + '.' + t[2].choice t[0][fldt] = t[2] def p_cls_syntax_6 (t): '''cls_syntax : ARGUMENT Type OPTIONAL BooleanValue | RESULT Type OPTIONAL BooleanValue | PARAMETER Type OPTIONAL BooleanValue ''' t[0] = { get_class_fieled(t[1]) : t[2], get_class_fieled(' '.join((t[1], t[3]))) : t[4] } # 12 Information object set definition and assignment # 12.1 def p_ObjectSetAssignment (t): 'ObjectSetAssignment : UCASE_IDENT CLASS_IDENT ASSIGNMENT ObjectSet' t[0] = Node('ObjectSetAssignment', name=t[1], cls=t[2], val=t[4]) # 12.3 def p_ObjectSet (t): 'ObjectSet : lbraceignore rbraceignore' t[0] = None # 14 Notation for the object class field type --------------------------------- # 14.1 def p_ObjectClassFieldType (t): 'ObjectClassFieldType : DefinedObjectClass DOT FieldName' t[0] = get_type_from_class(t[1], t[3]) # 14.6 def p_ObjectClassFieldValue (t): '''ObjectClassFieldValue : OpenTypeFieldVal''' t[0] = t[1] def p_OpenTypeFieldVal (t): '''OpenTypeFieldVal : Type COLON Value | NullType COLON NullValue''' t[0] = t[3] # 15 Information from objects ------------------------------------------------- # 15.1 def p_ValueFromObject (t): 'ValueFromObject : LCASE_IDENT DOT FieldName' t[0] = t[1] + '.' + t[3] # Annex C - The instance-of type ---------------------------------------------- # C.2 def p_InstanceOfType (t): 'InstanceOfType : INSTANCE OF DefinedObjectClass' t[0] = InstanceOfType() # --- tables --- useful_object_class_types = { # Annex A 'TYPE-IDENTIFIER.&id' : lambda : ObjectIdentifierType(), 'TYPE-IDENTIFIER.&Type' : lambda : OpenType(), # Annex B 'ABSTRACT-SYNTAX.&id' : lambda : ObjectIdentifierType(), 'ABSTRACT-SYNTAX.&Type' : lambda : OpenType(), 'ABSTRACT-SYNTAX.&property' : lambda : BitStringType(), } object_class_types = { } object_class_typerefs = { } object_class_classrefs = { } # dummy types class _VariableTypeValueFieldSpec (AnyType): pass class _FixedTypeValueSetFieldSpec (AnyType): pass class_types_creator = { 'BooleanType' : lambda : BooleanType(), 'IntegerType' : lambda : IntegerType(), 'ObjectIdentifierType' : lambda : ObjectIdentifierType(), 'OpenType' : lambda : OpenType(), # dummy types '_VariableTypeValueFieldSpec' : lambda : _VariableTypeValueFieldSpec(), '_FixedTypeValueSetFieldSpec' : lambda : _FixedTypeValueSetFieldSpec(), } class_names = { } x681_syntaxes = { 'TYPE-IDENTIFIER' : { ' ' : '&Type', 'IDENTIFIED' : 'IDENTIFIED', #'BY' : 'BY', 'IDENTIFIED BY' : '&id', }, 'ABSTRACT-SYNTAX' : { ' ' : '&Type', 'IDENTIFIED' : 'IDENTIFIED', #'BY' : 'BY', 'IDENTIFIED BY' : '&id', 'HAS' : 'HAS', 'PROPERTY' : 'PROPERTY', 'HAS PROPERTY' : '&property', }, } class_syntaxes_enabled = { 'TYPE-IDENTIFIER' : True, 'ABSTRACT-SYNTAX' : True, } class_syntaxes = { 'TYPE-IDENTIFIER' : x681_syntaxes['TYPE-IDENTIFIER'], 'ABSTRACT-SYNTAX' : x681_syntaxes['ABSTRACT-SYNTAX'], } class_current_syntax = None def get_syntax_tokens(syntaxes): tokens = { } for s in (syntaxes): for k in (list(syntaxes[s].keys())): if k.find(' ') < 0: tokens[k] = k tokens[k] = tokens[k].replace('-', '_') return list(tokens.values()) tokens = tokens + get_syntax_tokens(x681_syntaxes) def set_class_syntax(syntax): global class_syntaxes_enabled global class_current_syntax #print "set_class_syntax", syntax, class_current_syntax if class_syntaxes_enabled.get(syntax, False): class_current_syntax = syntax return True else: class_current_syntax = None return False def is_class_syntax(name): global class_syntaxes global class_current_syntax #print "is_class_syntax", name, class_current_syntax if not class_current_syntax: return False return name in class_syntaxes[class_current_syntax] def get_class_fieled(name): if not class_current_syntax: return None return class_syntaxes[class_current_syntax][name] def is_class_ident(name): return name in class_names def add_class_ident(name): #print "add_class_ident", name class_names[name] = name def get_type_from_class(cls, fld): flds = fld.split('.') if (isinstance(cls, Class_Ref)): key = cls.val + '.' + flds[0] else: key = cls + '.' + flds[0] if key in object_class_classrefs: return get_type_from_class(object_class_classrefs[key], '.'.join(flds[1:])) if key in object_class_typerefs: return Type_Ref(val=object_class_typerefs[key]) creator = lambda : AnyType() creator = useful_object_class_types.get(key, creator) creator = object_class_types.get(key, creator) return creator() def set_type_to_class(cls, fld, pars): #print "set_type_to_class", cls, fld, pars key = cls + '.' + fld typename = 'OpenType' if (len(pars) > 0): typename = pars[0] else: pars.append(typename) typeref = None if (len(pars) > 1): if (isinstance(pars[1], Class_Ref)): pars[1] = pars[1].val typeref = pars[1] msg = None if key in object_class_types: msg = object_class_types[key]().type if key in object_class_typerefs: msg = "TypeReference " + object_class_typerefs[key] if key in object_class_classrefs: msg = "ClassReference " + object_class_classrefs[key] if msg == ' '.join(pars): msg = None if msg: msg0 = "Can not define CLASS field %s as '%s'\n" % (key, ' '.join(pars)) msg1 = "Already defined as '%s'" % (msg) raise CompError(msg0 + msg1) if (typename == 'ClassReference'): if not typeref: return False object_class_classrefs[key] = typeref return True if (typename == 'TypeReference'): if not typeref: return False object_class_typerefs[key] = typeref return True creator = class_types_creator.get(typename) if creator: object_class_types[key] = creator return True else: return False def import_class_from_module(mod, cls): add_class_ident(cls) mcls = "$%s$%s" % (mod, cls) for k in list(object_class_classrefs.keys()): kk = k.split('.', 1) if kk[0] == mcls: object_class_classrefs[cls + '.' + kk[0]] = object_class_classrefs[k] for k in list(object_class_typerefs.keys()): kk = k.split('.', 1) if kk[0] == mcls: object_class_typerefs[cls + '.' + kk[0]] = object_class_typerefs[k] for k in list(object_class_types.keys()): kk = k.split('.', 1) if kk[0] == mcls: object_class_types[cls + '.' + kk[0]] = object_class_types[k] #--- ITU-T Recommendation X.682 ----------------------------------------------- # 8 General constraint specification ------------------------------------------ # 8.1 def p_GeneralConstraint (t): '''GeneralConstraint : UserDefinedConstraint | TableConstraint | ContentsConstraint''' t[0] = t[1] # 9 User-defined constraints -------------------------------------------------- # 9.1 def p_UserDefinedConstraint (t): 'UserDefinedConstraint : CONSTRAINED BY LBRACE UserDefinedConstraintParameterList RBRACE' t[0] = Constraint(type = 'UserDefined', subtype = t[4]) def p_UserDefinedConstraintParameterList_1 (t): 'UserDefinedConstraintParameterList : ' t[0] = [] def p_UserDefinedConstraintParameterList_2 (t): 'UserDefinedConstraintParameterList : UserDefinedConstraintParameter' t[0] = [t[1]] def p_UserDefinedConstraintParameterList_3 (t): 'UserDefinedConstraintParameterList : UserDefinedConstraintParameterList COMMA UserDefinedConstraintParameter' t[0] = t[1] + [t[3]] # 9.3 def p_UserDefinedConstraintParameter (t): 'UserDefinedConstraintParameter : Type' t[0] = t[1] # 10 Table constraints, including component relation constraints -------------- # 10.3 def p_TableConstraint (t): '''TableConstraint : SimpleTableConstraint | ComponentRelationConstraint''' t[0] = Constraint(type = 'Table', subtype = t[1]) def p_SimpleTableConstraint (t): 'SimpleTableConstraint : LBRACE UCASE_IDENT RBRACE' t[0] = t[2] # 10.7 def p_ComponentRelationConstraint (t): 'ComponentRelationConstraint : LBRACE UCASE_IDENT RBRACE LBRACE AtNotations RBRACE' t[0] = t[2] + str(t[5]) def p_AtNotations_1 (t): 'AtNotations : AtNotation' t[0] = [t[1]] def p_AtNotations_2 (t): 'AtNotations : AtNotations COMMA AtNotation' t[0] = t[1] + [t[3]] def p_AtNotation_1 (t): 'AtNotation : AT ComponentIdList' t[0] = '@' + t[2] def p_AtNotation_2 (t): 'AtNotation : AT DOT Level ComponentIdList' t[0] = '@.' + t[3] + t[4] def p_Level_1 (t): 'Level : DOT Level' t[0] = '.' + t[2] def p_Level_2 (t): 'Level : ' t[0] = '' def p_ComponentIdList_1 (t): 'ComponentIdList : LCASE_IDENT' t[0] = t[1] def p_ComponentIdList_2 (t): 'ComponentIdList : ComponentIdList DOT LCASE_IDENT' t[0] = t[1] + '.' + t[3] # 11 Contents constraints ----------------------------------------------------- # 11.1 def p_ContentsConstraint (t): 'ContentsConstraint : CONTAINING type_ref' t[0] = Constraint(type = 'Contents', subtype = t[2]) #--- ITU-T Recommendation X.683 ----------------------------------------------- # 8 Parameterized assignments ------------------------------------------------- # 8.1 def p_ParameterizedAssignment (t): '''ParameterizedAssignment : ParameterizedTypeAssignment | ParameterizedObjectClassAssignment | ParameterizedObjectAssignment | ParameterizedObjectSetAssignment''' t[0] = t[1] # 8.2 def p_ParameterizedTypeAssignment (t): 'ParameterizedTypeAssignment : UCASE_IDENT ParameterList ASSIGNMENT Type' t[0] = t[4] t[0].SetName(t[1]) # t[0].SetName(t[1] + 'xxx') def p_ParameterizedObjectClassAssignment (t): '''ParameterizedObjectClassAssignment : CLASS_IDENT ParameterList ASSIGNMENT ObjectClass | UCASE_IDENT ParameterList ASSIGNMENT ObjectClass''' t[0] = t[4] t[0].SetName(t[1]) if isinstance(t[0], ObjectClassDefn): t[0].reg_types() def p_ParameterizedObjectAssignment (t): 'ParameterizedObjectAssignment : objectreference ParameterList DefinedObjectClass ASSIGNMENT Object' t[0] = ObjectAssignment (ident = t[1], cls=t[3].val, val=t[5]) global obj_class obj_class = None def p_ParameterizedObjectSetAssignment (t): 'ParameterizedObjectSetAssignment : UCASE_IDENT ParameterList DefinedObjectClass ASSIGNMENT ObjectSet' t[0] = Node('ObjectSetAssignment', name=t[1], cls=t[3].val, val=t[5]) # 8.3 def p_ParameterList (t): 'ParameterList : lbraceignore rbraceignore' #def p_ParameterList (t): # 'ParameterList : LBRACE Parameters RBRACE' # t[0] = t[2] #def p_Parameters_1 (t): # 'Parameters : Parameter' # t[0] = [t[1]] #def p_Parameters_2 (t): # 'Parameters : Parameters COMMA Parameter' # t[0] = t[1] + [t[3]] #def p_Parameter_1 (t): # 'Parameter : Type COLON Reference' # t[0] = [t[1], t[3]] #def p_Parameter_2 (t): # 'Parameter : Reference' # t[0] = t[1] # 9 Referencing parameterized definitions ------------------------------------- # 9.1 def p_ParameterizedReference (t): 'ParameterizedReference : Reference LBRACE RBRACE' t[0] = t[1] #t[0].val += 'xxx' # 9.2 def p_ParameterizedType (t): 'ParameterizedType : type_ref ActualParameterList' t[0] = t[1] #t[0].val += 'xxx' def p_ParameterizedObjectClass (t): 'ParameterizedObjectClass : DefinedObjectClass ActualParameterList' t[0] = t[1] #t[0].val += 'xxx' def p_ParameterizedObject (t): 'ParameterizedObject : DefinedObject ActualParameterList' t[0] = t[1] #t[0].val += 'xxx' # 9.5 def p_ActualParameterList (t): 'ActualParameterList : lbraceignore rbraceignore' #def p_ActualParameterList (t): # 'ActualParameterList : LBRACE ActualParameters RBRACE' # t[0] = t[2] #def p_ActualParameters_1 (t): # 'ActualParameters : ActualParameter' # t[0] = [t[1]] #def p_ActualParameters_2 (t): # 'ActualParameters : ActualParameters COMMA ActualParameter' # t[0] = t[1] + [t[3]] #def p_ActualParameter (t): # '''ActualParameter : Type # | Value''' # t[0] = t[1] #--- ITU-T Recommendation X.880 ----------------------------------------------- x880_classes = { 'OPERATION' : { '&ArgumentType' : [], '&argumentTypeOptional' : [ 'BooleanType' ], '&returnResult' : [ 'BooleanType' ], '&ResultType' : [], '&resultTypeOptional' : [ 'BooleanType' ], '&Errors' : [ 'ClassReference', 'ERROR' ], '&Linked' : [ 'ClassReference', 'OPERATION' ], '&synchronous' : [ 'BooleanType' ], '&idempotent' : [ 'BooleanType' ], '&alwaysReturns' : [ 'BooleanType' ], '&InvokePriority' : [ '_FixedTypeValueSetFieldSpec' ], '&ResultPriority' : [ '_FixedTypeValueSetFieldSpec' ], '&operationCode' : [ 'TypeReference', 'Code' ], }, 'ERROR' : { '&ParameterType' : [], '&parameterTypeOptional' : [ 'BooleanType' ], '&ErrorPriority' : [ '_FixedTypeValueSetFieldSpec' ], '&errorCode' : [ 'TypeReference', 'Code' ], }, 'OPERATION-PACKAGE' : { '&Both' : [ 'ClassReference', 'OPERATION' ], '&Consumer' : [ 'ClassReference', 'OPERATION' ], '&Supplier' : [ 'ClassReference', 'OPERATION' ], '&id' : [ 'ObjectIdentifierType' ], }, 'CONNECTION-PACKAGE' : { '&bind' : [ 'ClassReference', 'OPERATION' ], '&unbind' : [ 'ClassReference', 'OPERATION' ], '&responderCanUnbind' : [ 'BooleanType' ], '&unbindCanFail' : [ 'BooleanType' ], '&id' : [ 'ObjectIdentifierType' ], }, 'CONTRACT' : { '&connection' : [ 'ClassReference', 'CONNECTION-PACKAGE' ], '&OperationsOf' : [ 'ClassReference', 'OPERATION-PACKAGE' ], '&InitiatorConsumerOf' : [ 'ClassReference', 'OPERATION-PACKAGE' ], '&InitiatorSupplierOf' : [ 'ClassReference', 'OPERATION-PACKAGE' ], '&id' : [ 'ObjectIdentifierType' ], }, 'ROS-OBJECT-CLASS' : { '&Is' : [ 'ClassReference', 'ROS-OBJECT-CLASS' ], '&Initiates' : [ 'ClassReference', 'CONTRACT' ], '&Responds' : [ 'ClassReference', 'CONTRACT' ], '&InitiatesAndResponds' : [ 'ClassReference', 'CONTRACT' ], '&id' : [ 'ObjectIdentifierType' ], }, } x880_syntaxes = { 'OPERATION' : { 'ARGUMENT' : '&ArgumentType', 'ARGUMENT OPTIONAL' : '&argumentTypeOptional', 'RESULT' : '&ResultType', 'RESULT OPTIONAL' : '&resultTypeOptional', 'RETURN' : 'RETURN', 'RETURN RESULT' : '&returnResult', 'ERRORS' : '&Errors', 'LINKED' : '&Linked', 'SYNCHRONOUS' : '&synchronous', 'IDEMPOTENT' : '&idempotent', 'ALWAYS' : 'ALWAYS', 'RESPONDS' : 'RESPONDS', 'ALWAYS RESPONDS' : '&alwaysReturns', 'INVOKE' : 'INVOKE', 'PRIORITY' : 'PRIORITY', 'INVOKE PRIORITY' : '&InvokePriority', 'RESULT-PRIORITY': '&ResultPriority', 'CODE' : '&operationCode', }, 'ERROR' : { 'PARAMETER' : '&ParameterType', 'PARAMETER OPTIONAL' : '&parameterTypeOptional', 'PRIORITY' : '&ErrorPriority', 'CODE' : '&errorCode', }, # 'OPERATION-PACKAGE' : { # }, # 'CONNECTION-PACKAGE' : { # }, # 'CONTRACT' : { # }, # 'ROS-OBJECT-CLASS' : { # }, } def x880_module_begin(): #print "x880_module_begin()" for name in list(x880_classes.keys()): add_class_ident(name) def x880_import(name): if name in x880_syntaxes: class_syntaxes_enabled[name] = True class_syntaxes[name] = x880_syntaxes[name] if name in x880_classes: add_class_ident(name) for f in (list(x880_classes[name].keys())): set_type_to_class(name, f, x880_classes[name][f]) tokens = tokens + get_syntax_tokens(x880_syntaxes) # {...} OID value #def p_lbrace_oid(t): # 'lbrace_oid : brace_oid_begin LBRACE' # t[0] = t[1] #def p_brace_oid_begin(t): # 'brace_oid_begin : ' # global in_oid # in_oid = True #def p_rbrace_oid(t): # 'rbrace_oid : brace_oid_end RBRACE' # t[0] = t[2] #def p_brace_oid_end(t): # 'brace_oid_end : ' # global in_oid # in_oid = False # {...} block to be ignored def p_lbraceignore(t): 'lbraceignore : braceignorebegin LBRACE' t[0] = t[1] def p_braceignorebegin(t): 'braceignorebegin : ' global lexer lexer.level = 1 lexer.push_state('braceignore') def p_rbraceignore(t): 'rbraceignore : braceignoreend RBRACE' t[0] = t[2] def p_braceignoreend(t): 'braceignoreend : ' global lexer lexer.pop_state() def p_error(t): global input_file raise ParseError(t, input_file) def p_pyquote (t): '''pyquote : PYQUOTE''' t[0] = PyQuote (val = t[1]) def testlex (s): lexer.input (s) while True: token = lexer.token () if not token: break print(token) def do_module (ast, defined_dict): assert (ast.type == 'Module') ctx = Ctx (defined_dict) print(ast.to_python (ctx)) print(ctx.output_assignments ()) print(ctx.output_pyquotes ()) def eth_do_module (ast, ectx): assert (ast.type == 'Module') if ectx.dbg('s'): print(ast.str_depth(0)) ast.to_eth(ectx) def testyacc(s, fn, defined_dict): ast = yacc.parse(s, debug=0) time_str = time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime()) print("""#!/usr/bin/env python # Auto-generated from %s at %s from PyZ3950 import asn1""" % (fn, time_str)) for module in ast: eth_do_module (module, defined_dict) # Wireshark compiler def eth_usage(): print(""" asn2wrs [-h|?] [-d dbg] [-b] [-p proto] [-c cnf_file] [-e] input_file(s) ... -h|? : Usage -b : BER (default is PER) -u : Unaligned (default is aligned) -p proto : Protocol name (implies -S). Default is module-name from input_file (renamed by #.MODULE if present) -o name : Output files name core (default is <proto>) -O dir : Output directory for dissector -c cnf_file : Conformance file -I path : Path for conformance file includes -e : Create conformance file for exported types -E : Just create conformance file for exported types -S : Single output for multiple modules -s template : Single file output (template is input file without .c/.h extension) -k : Keep intermediate files though single file output is used -L : Suppress #line directive from .cnf file -D dir : Directory for input_file(s) (default: '.') -C : Add check for SIZE constraints -r prefix : Remove the prefix from type names input_file(s) : Input ASN.1 file(s) -d dbg : Debug output, dbg = [l][y][p][s][a][t][c][m][o] l - lex y - yacc p - parsing s - internal ASN.1 structure a - list of assignments t - tables c - conformance values m - list of compiled modules with dependency o - list of output files """) def eth_main(): global input_file global g_conform global lexer print("ASN.1 to Wireshark dissector compiler"); try: opts, args = getopt.getopt(sys.argv[1:], "h?d:D:buXp:FTo:O:c:I:eESs:kLCr:"); except getopt.GetoptError: eth_usage(); sys.exit(2) if len(args) < 1: eth_usage(); sys.exit(2) conform = EthCnf() conf_to_read = None output = EthOut() ectx = EthCtx(conform, output) ectx.encoding = 'per' ectx.proto_opt = None ectx.fld_opt = {} ectx.tag_opt = False ectx.outnm_opt = None ectx.aligned = True ectx.dbgopt = '' ectx.new = True ectx.expcnf = False ectx.justexpcnf = False ectx.merge_modules = False ectx.group_by_prot = False ectx.conform.last_group = 0 ectx.conform.suppress_line = False; ectx.output.outnm = None ectx.output.single_file = None ectx.constraints_check = False; for o, a in opts: if o in ("-h", "-?"): eth_usage(); sys.exit(2) if o in ("-c",): conf_to_read = a if o in ("-I",): ectx.conform.include_path.append(a) if o in ("-E",): ectx.expcnf = True ectx.justexpcnf = True if o in ("-D",): ectx.srcdir = a if o in ("-C",): ectx.constraints_check = True if o in ("-X",): warnings.warn("Command line option -X is obsolete and can be removed") if o in ("-T",): warnings.warn("Command line option -T is obsolete and can be removed") if conf_to_read: ectx.conform.read(conf_to_read) for o, a in opts: if o in ("-h", "-?", "-c", "-I", "-E", "-D", "-C", "-X", "-T"): pass # already processed else: par = [] if a: par.append(a) ectx.conform.set_opt(o, par, "commandline", 0) (ld, yd, pd) = (0, 0, 0); if ectx.dbg('l'): ld = 1 if ectx.dbg('y'): yd = 1 if ectx.dbg('p'): pd = 2 lexer = lex.lex(debug=ld) yacc.yacc(method='LALR', debug=yd) g_conform = ectx.conform ast = [] for fn in args: input_file = fn lexer.lineno = 1 if (ectx.srcdir): fn = ectx.srcdir + '/' + fn # Read ASN.1 definition, trying one of the common encodings. data = open(fn, "rb").read() for encoding in ('utf-8', 'windows-1252'): try: data = data.decode(encoding) break except: warnings.warn_explicit("Decoding %s as %s failed, trying next." % (fn, encoding), UserWarning, '', 0) # Py2 compat, name.translate in eth_output_hf_arr fails with unicode if not isinstance(data, str): data = data.encode('utf-8') ast.extend(yacc.parse(data, lexer=lexer, debug=pd)) ectx.eth_clean() if (ectx.merge_modules): # common output for all module ectx.eth_clean() for module in ast: eth_do_module(module, ectx) ectx.eth_prepare() ectx.eth_do_output() elif (ectx.groups()): # group by protocols/group groups = [] pr2gr = {} if (ectx.group_by_prot): # group by protocols for module in ast: prot = module.get_proto(ectx) if prot not in pr2gr: pr2gr[prot] = len(groups) groups.append([]) groups[pr2gr[prot]].append(module) else: # group by groups pass for gm in (groups): ectx.eth_clean() for module in gm: eth_do_module(module, ectx) ectx.eth_prepare() ectx.eth_do_output() else: # output for each module for module in ast: ectx.eth_clean() eth_do_module(module, ectx) ectx.eth_prepare() ectx.eth_do_output() if ectx.dbg('m'): ectx.dbg_modules() if ectx.dbg('c'): ectx.conform.dbg_print() if not ectx.justexpcnf: ectx.conform.unused_report() if ectx.dbg('o'): ectx.output.dbg_print() ectx.output.make_single_file() # Python compiler def main(): testfn = testyacc if len (sys.argv) == 1: while True: s = input ('Query: ') if len (s) == 0: break testfn (s, 'console', {}) else: defined_dict = {} for fn in sys.argv [1:]: f = open (fn, "r") testfn (f.read (), fn, defined_dict) f.close () lexer.lineno = 1 #--- BODY --------------------------------------------------------------------- if __name__ == '__main__': if (os.path.splitext(os.path.basename(sys.argv[0]))[0].lower() in ('asn2wrs', 'asn2eth')): eth_main() else: main() #------------------------------------------------------------------------------ # # Editor modelines - http://www.wireshark.org/tools/modelines.html # # c-basic-offset: 4; tab-width: 8; indent-tabs-mode: nil # vi: set shiftwidth=4 tabstop=8 expandtab: # :indentSize=4:tabSize=8:noTabs=true:<|fim▁end|>
<|file_name|>ScriptingFileSystemObject.py<|end_file_name|><|fim▁begin|>import os import string import random import logging from thug.ActiveX.modules import WScriptShell from thug.ActiveX.modules import TextStream from thug.ActiveX.modules import File from thug.ActiveX.modules import Folder from thug.OS.Windows import win32_files from thug.OS.Windows import win32_folders log = logging.getLogger("Thug") def BuildPath(self, arg0, arg1): # pylint:disable=unused-argument log.ThugLogging.add_behavior_warn(f'[Scripting.FileSystemObject ActiveX] BuildPath("{arg0}", "{arg1}")') return f"{arg0}\\{arg1}" def CopyFile(self, source, destination, overwritefiles = False): # pylint:disable=unused-argument log.ThugLogging.add_behavior_warn(f'[Scripting.FileSystemObject ActiveX] CopyFile("{source}", "{destination}")') log.TextFiles[destination] = log.TextFiles[source] def DeleteFile(self, filespec, force = False): # pylint:disable=unused-argument log.ThugLogging.add_behavior_warn(f'[Scripting.FileSystemObject ActiveX] DeleteFile("{filespec}", {force})') def CreateTextFile(self, filename, overwrite = False, _unicode = False): # pylint:disable=unused-argument log.ThugLogging.add_behavior_warn(f'[Scripting.FileSystemObject ActiveX] CreateTextFile("{filename}", ' f'"{overwrite}", ' f'"{_unicode}")') stream = TextStream.TextStream() stream._filename = filename return stream def CreateFolder(self, path): # pylint:disable=unused-argument log.ThugLogging.add_behavior_warn(f'[Scripting.FileSystemObject ActiveX] CreateFolder("{path}")') return Folder.Folder(path) def FileExists(self, filespec): # pylint:disable=unused-argument log.ThugLogging.add_behavior_warn(f'[Scripting.FileSystemObject ActiveX] FileExists("{filespec}")') if not filespec: return True if filespec.lower() in win32_files: return True if getattr(log, "TextFiles", None) and filespec in log.TextFiles: return True return False def FolderExists(self, folder): # pylint:disable=unused-argument log.ThugLogging.add_behavior_warn(f'[Scripting.FileSystemObject ActiveX] FolderExists("{folder}")') return str(folder).lower() in win32_folders def GetExtensionName(self, path): # pylint:disable=unused-argument log.ThugLogging.add_behavior_warn(f'[Scripting.FileSystemObject ActiveX] GetExtensionName("{path}")') ext = os.path.splitext(path)[1] return ext if ext else "" <|fim▁hole|> def GetSpecialFolder(self, arg): log.ThugLogging.add_behavior_warn(f'[Scripting.FileSystemObject ActiveX] GetSpecialFolder("{arg}")') arg = int(arg) folder = '' if arg == 0: folder = WScriptShell.ExpandEnvironmentStrings(self, "%windir%") elif arg == 1: folder = WScriptShell.ExpandEnvironmentStrings(self, "%SystemRoot%\\system32") elif arg == 2: folder = WScriptShell.ExpandEnvironmentStrings(self, "%TEMP%") log.ThugLogging.add_behavior_warn(f'[Scripting.FileSystemObject ActiveX] Returning {folder} for GetSpecialFolder("{arg}")') return folder def GetTempName(self): # pylint:disable=unused-argument log.ThugLogging.add_behavior_warn('[Scripting.FileSystemObject ActiveX] GetTempName()') return ''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(8)) def MoveFile(self, source, destination): # pylint:disable=unused-argument log.ThugLogging.add_behavior_warn(f'[Scripting.FileSystemObject ActiveX] MoveFile("{source}", "{destination}")') log.TextFiles[destination] = log.TextFiles[source] del log.TextFiles[source] def OpenTextFile(self, sFilePathAndName, ForWriting = True, flag = True): log.ThugLogging.add_behavior_warn(f'[Scripting.FileSystemObject ActiveX] OpenTextFile("{sFilePathAndName}", ' f'"{ForWriting}" ,' f'"{flag}")') log.ThugLogging.log_exploit_event(self._window.url, "Scripting.FileSystemObject ActiveX", "OpenTextFile", data = { "filename" : sFilePathAndName, "ForWriting": ForWriting, "flag" : flag }, forward = False) if getattr(log, 'TextFiles', None) is None: log.TextFiles = {} if sFilePathAndName in log.TextFiles: return log.TextFiles[sFilePathAndName] stream = TextStream.TextStream() stream._filename = sFilePathAndName if log.ThugOpts.local and sFilePathAndName in (log.ThugLogging.url, ): # pragma: no cover with open(sFilePathAndName, encoding = 'utf-8', mode = 'r') as fd: data = fd.read() stream.Write(data) log.TextFiles[sFilePathAndName] = stream return stream<|fim▁end|>
def GetFile(self, filespec): # pylint:disable=unused-argument log.ThugLogging.add_behavior_warn(f'[Scripting.FileSystemObject ActiveX] GetFile("{filespec}")') return File.File(filespec)
<|file_name|>listconfig.py<|end_file_name|><|fim▁begin|>from evostream.default import api from evostream.management.base import BaseEvoStreamCommand class Command(BaseEvoStreamCommand): help = 'Returns a list with all push/pull configurations.' <|fim▁hole|> def get_results(self, *args, **options): return api.list_config()<|fim▁end|>
requires_system_checks = False
<|file_name|>admonitions.py<|end_file_name|><|fim▁begin|># $Id: admonitions.py 7681 2013-07-12 07:52:27Z milde $ # Author: David Goodger <[email protected]> # Copyright: This module has been placed in the public domain. """ Admonition directives. """ __docformat__ = 'reStructuredText' from docutils.parsers.rst import Directive from docutils.parsers.rst import states, directives from docutils.parsers.rst.roles import set_classes from docutils import nodes <|fim▁hole|>class BaseAdmonition(Directive): final_argument_whitespace = True option_spec = {'class': directives.class_option, 'name': directives.unchanged} has_content = True node_class = None """Subclasses must set this to the appropriate admonition node class.""" def run(self): set_classes(self.options) self.assert_has_content() text = '\n'.join(self.content) admonition_node = self.node_class(text, **self.options) self.add_name(admonition_node) if self.node_class is nodes.admonition: title_text = self.arguments[0] textnodes, messages = self.state.inline_text(title_text, self.lineno) title = nodes.title(title_text, '', *textnodes) title.source, title.line = ( self.state_machine.get_source_and_line(self.lineno)) admonition_node += title admonition_node += messages if not 'classes' in self.options: admonition_node['classes'] += ['admonition-' + nodes.make_id(title_text)] self.state.nested_parse(self.content, self.content_offset, admonition_node) return [admonition_node] class Admonition(BaseAdmonition): required_arguments = 1 node_class = nodes.admonition class Attention(BaseAdmonition): node_class = nodes.attention class Caution(BaseAdmonition): node_class = nodes.caution class Danger(BaseAdmonition): node_class = nodes.danger class Error(BaseAdmonition): node_class = nodes.error class Hint(BaseAdmonition): node_class = nodes.hint class Important(BaseAdmonition): node_class = nodes.important class Note(BaseAdmonition): node_class = nodes.note class Tip(BaseAdmonition): node_class = nodes.tip class Warning(BaseAdmonition): node_class = nodes.warning<|fim▁end|>
<|file_name|>index.js<|end_file_name|><|fim▁begin|><|fim▁hole|>const assert = require('assert')<|fim▁end|>
<|file_name|>datatablecolresizedemo.ts<|end_file_name|><|fim▁begin|>import {Component,OnInit} from '@angular/core'; import {ROUTER_DIRECTIVES} from '@angular/router'; import {HTTP_PROVIDERS} from '@angular/http'; import {DataTable} from '../../../components/datatable/datatable'; import {CodeHighlighter} from '../../../components/codehighlighter/codehighlighter'; import {TabView} from '../../../components/tabview/tabview'; import {TabPanel} from '../../../components/tabview/tabpanel'; import {Car} from '../domain/car'; import {Column} from '../../../components/column/column'; import {DataTableSubmenu} from './datatablesubmenu.component'; import {CarService} from '../service/carservice'; @Component({ templateUrl: 'showcase/demo/datatable/datatablecolresizedemo.html', directives: [DataTable,Column,DataTableSubmenu,TabPanel,TabView,CodeHighlighter,ROUTER_DIRECTIVES], providers: [HTTP_PROVIDERS,CarService] }) export class DataTableColResizeDemo implements OnInit { cars: Car[]; constructor(private carService: CarService) { } ngOnInit() {<|fim▁hole|> } }<|fim▁end|>
this.carService.getCarsSmall().then(cars => this.cars = cars);
<|file_name|>0004_rmpub.py<|end_file_name|><|fim▁begin|>from south.db import db from django.db import models from askmeanything.models import * class Migration: def forwards(self, orm): "Write your forwards migration here" def backwards(self, orm): "Write your backwards migration here" models = { 'askmeanything.poll': { 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polls'", 'to': "orm['auth.User']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'open': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}), 'question': ('django.db.models.fields.CharField', [], {'max_length': '200'}) }, 'askmeanything.response': { 'answer': ('django.db.models.fields.CharField', [], {'max_length': '200'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'poll': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'responses'", 'to': "orm['askmeanything.Poll']"}), 'votes': ('django.db.models.fields.IntegerField', [], {'default': '0'}) }, 'auth.group': { 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'unique_together': "(('content_type', 'codename'),)"}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) },<|fim▁hole|> 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'unique_together': "(('app_label', 'model'),)", 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) } } complete_apps = ['askmeanything']<|fim▁end|>
'auth.user': { 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
<|file_name|>testkeys.rs<|end_file_name|><|fim▁begin|>extern crate safex; extern crate rustc_serialize; use safex::genesis::key_generation::KeyPair; fn main() { let our_key = KeyPair::create().ok().expect("error"); let the_secret = KeyPair::private_key_tobase64(our_key.secret); print!("your base64 private key {:?} \n", the_secret); let the_string = KeyPair::address_base58(&our_key.public); print!("your Hash160 Public Key: {:?} \n", the_string); let the_keys = KeyPair::from_secret(our_key.secret).unwrap(); let the_secret = KeyPair::private_key_tobase64(the_keys.secret); print!("your base64 private key {:?} \n", the_secret); let the_string = KeyPair::address_base58(&the_keys.public); print!("your Hash160 Public Key: {:?} \n", the_string); let the_string = KeyPair::scriptaddress_base58(&the_keys.public); print!("your ScriptHash Hash160 Public Key: {:?} \n", the_string); let the_keys = KeyPair::from_secret(our_key.secret).unwrap(); let mut this_vec: Vec<u8> = Vec::new(); this_vec.push(099999); let our_signature = KeyPair::sign(&the_keys.secret, this_vec); let mut this_vec: Vec<u8> = Vec::new(); this_vec.push(099999); let extract_pub = KeyPair::recover(our_signature, this_vec); let the_string = KeyPair::address_base58(&extract_pub); print!("your Hash160 Public Key: {:?} \n", the_string); let mut this_vec: Vec<u8> = Vec::new(); this_vec.push(099999); let our_signature = KeyPair::sign(&the_keys.secret, this_vec); let mut this_vec: Vec<u8> = Vec::new(); this_vec.push(099999); let verified = KeyPair::verify(&extract_pub, our_signature,this_vec); print!("Verification status: {:?}\n", verified); let our_key = KeyPair::create().ok().expect("error"); let the_secret = KeyPair::private_key_tobase64(our_key.secret); print!("your base64 private key {:?} \n", the_secret); let the_string = KeyPair::address_base58(&our_key.public); print!("your Hash160 Public Key: {:?} \n", the_string); let the_newkeys = KeyPair::keypair_frombase64(the_secret); let the_string = KeyPair::address_base58(&the_newkeys.public); print!("your Hash160 Public Key: {:?} \n", the_string); let our_key = KeyPair::create().ok().expect("error"); let the_secret = KeyPair::private_key_towif(our_key.secret); print!("your wif private key {:?} \n", the_secret); <|fim▁hole|>}<|fim▁end|>
let our_key = KeyPair::create().ok().expect("error"); println!("{:?}", our_key.secret);
<|file_name|>ps1a.py<|end_file_name|><|fim▁begin|><|fim▁hole|>f = 3<|fim▁end|>
d = 1 e = 2
<|file_name|>collection.d.ts<|end_file_name|><|fim▁begin|>export declare var Map: MapConstructor; export declare var Set: SetConstructor; export declare class MapWrapper { static clone<K, V>(m: Map<K, V>): Map<K, V>; static createFromStringMap<T>(stringMap: { [key: string]: T; }): Map<string, T>; static toStringMap<T>(m: Map<string, T>): { [key: string]: T; }; static createFromPairs(pairs: any[]): Map<any, any>; static clearValues(m: Map<any, any>): void; static iterable<T>(m: T): T; static keys<K>(m: Map<K, any>): K[]; static values<V>(m: Map<any, V>): V[]; } /** * Wraps Javascript Objects */ export declare class StringMapWrapper { static create(): { [k: string]: any; }; static contains(map: { [key: string]: any; }, key: string): boolean; static get<V>(map: { [key: string]: V; }, key: string): V; static set<V>(map: { [key: string]: V; }, key: string, value: V): void; static keys(map: { [key: string]: any; }): string[]; static isEmpty(map: { [key: string]: any; }): boolean; static delete(map: { [key: string]: any; }, key: string): void; static forEach<K, V>(map: { [key: string]: V; }, callback: Function): void; static merge<V>(m1: { [key: string]: V; }, m2: { [key: string]: V; }): { [key: string]: V; }; static equals<V>(m1: { [key: string]: V; }, m2: { [key: string]: V; }): boolean; } export interface Predicate<T> { (value: T, index?: number, array?: T[]): boolean;<|fim▁hole|>} export declare class ListWrapper { static createFixedSize(size: number): any[]; static createGrowableSize(size: number): any[]; static clone<T>(array: T[]): T[]; static forEachWithIndex<T>(array: T[], fn: (t: T, n: number) => void): void; static first<T>(array: T[]): T; static last<T>(array: T[]): T; static indexOf<T>(array: T[], value: T, startIndex?: number): number; static contains<T>(list: T[], el: T): boolean; static reversed<T>(array: T[]): T[]; static concat(a: any[], b: any[]): any[]; static insert<T>(list: T[], index: number, value: T): void; static removeAt<T>(list: T[], index: number): T; static removeAll<T>(list: T[], items: T[]): void; static remove<T>(list: T[], el: T): boolean; static clear(list: any[]): void; static isEmpty(list: any[]): boolean; static fill(list: any[], value: any, start?: number, end?: number): void; static equals(a: any[], b: any[]): boolean; static slice<T>(l: T[], from?: number, to?: number): T[]; static splice<T>(l: T[], from: number, length: number): T[]; static sort<T>(l: T[], compareFn?: (a: T, b: T) => number): void; static toString<T>(l: T[]): string; static toJSON<T>(l: T[]): string; static maximum<T>(list: T[], predicate: (t: T) => number): T; } export declare function isListLikeIterable(obj: any): boolean; export declare function iterateListLike(obj: any, fn: Function): void; export declare class SetWrapper { static createFromList<T>(lst: T[]): Set<T>; static has<T>(s: Set<T>, key: T): boolean; static delete<K>(m: Set<K>, k: K): void; }<|fim▁end|>
<|file_name|>RenderingHintTest.java<|end_file_name|><|fim▁begin|>package uk.co.bluegecko.core.swing.table.rendering; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.awt.Color; import java.awt.Font; import org.junit.Before; import org.junit.Test; public class RenderingHintTest { private Font font; private Color color; @Before public final void setUp() { font = Font.decode( "Monospaced-12" ); color = new Color( 0x808080 ); } @Test public final void testWeightExceeds() { final FontHint min = new FontHint( HintWeight.MIN_WEIGHT ); final FontHint low = new FontHint( HintWeight.LOW_WEIGHT ); final FontHint def = new FontHint( HintWeight.DEFAULT_WEIGHT ); final FontHint selected = new FontHint( HintWeight.SELECTED_WEIGHT ); final FontHint high = new FontHint( HintWeight.HIGH_WEIGHT ); final FontHint focused = new FontHint( HintWeight.FOCUSED_WEIGHT ); final FontHint max = new FontHint( HintWeight.MAX_WEIGHT ); assertFalse( "min-min", min.exceeds( min ) ); assertFalse( "min-low", min.exceeds( low ) ); assertTrue( "low-min", low.exceeds( min ) ); assertTrue( "default-low", def.exceeds( low ) ); assertTrue( "selected-default", selected.exceeds( def ) ); assertTrue( "high-selected", high.exceeds( selected ) ); assertTrue( "focused-high", focused.exceeds( high ) ); assertTrue( "max-focused", max.exceeds( focused ) ); } @Test public final void testGetValueNone() { assertEquals( font, new FontHint( HintWeight.MAX_WEIGHT ).getValue( font ) ); assertNull( new FontHint( HintWeight.MAX_WEIGHT ).getValue() ); } @Test public final void testGetValueNonDerived() { final Font value = Font.decode( "Monospaced-BOLD-14" ); assertEquals( value, new FontHint( HintWeight.MAX_WEIGHT, value ).getValue( font ) ); assertEquals( value, new FontHint( HintWeight.MAX_WEIGHT, value ).getValue() ); } @Test public final void testGetValueDerived() { final Font value = Font.decode( "Monospaced-14" ); final FontHint fontHint = new FontHint( HintWeight.MAX_WEIGHT ) { private static final long serialVersionUID = 1L; @Override protected Font derive( final Font original ) { return original.deriveFont( 14.0f ); } }; assertEquals( value, fontHint.getValue( font ) ); assertNull( fontHint.getValue() ); } @Test public final void testFontHintSize() { final Font value = Font.decode( "Monospaced-14" ); assertEquals( value, FontHint.size( HintWeight.MAX_WEIGHT, 14 ) .getValue( font ) ); } @Test public final void testFontHintLarger() { final Font value = Font.decode( "Monospaced-14" ); assertEquals( value, FontHint.larger( HintWeight.MAX_WEIGHT, 2 ) .getValue( font ) ); } @Test public final void testFontHintSmaller() { final Font value = Font.decode( "Monospaced-10" ); assertEquals( value, FontHint.smaller( HintWeight.MAX_WEIGHT, 2 ) .getValue( font ) ); } @Test public final void testFontHintScaled() { final Font value = Font.decode( "Monospaced-6" ); assertEquals( value, FontHint.scaled( HintWeight.MAX_WEIGHT, 0.5f ) .getValue( font ) ); } @Test public final void testFontHintStyle() { final Font value = Font.decode( "Monospaced-BOLD-12" ); assertEquals( value, FontHint.style( HintWeight.MAX_WEIGHT, Font.BOLD ) .getValue( font ) ); } @Test public final void testFontHintStyleAndSize() { final Font value = Font.decode( "Monospaced-BOLD-14" ); assertEquals( value, FontHint.style( HintWeight.MAX_WEIGHT, Font.BOLD, 14 ) .getValue( font ) ); } <|fim▁hole|> public final void testForegroundHintDarker() { final Color value = new Color( 0x595959 ); assertEquals( value, ForegroundHint.darker( HintWeight.MAX_WEIGHT ) .getValue( color ) ); } @Test public final void testForegroundHintBrighter() { final Color value = new Color( 0xB6B6B6 ); assertEquals( value, ForegroundHint.brighter( HintWeight.MAX_WEIGHT ) .getValue( color ) ); } @Test public final void testBackgroundHintDarker() { final Color value = new Color( 0x595959 ); assertEquals( value, BackgroundHint.darker( HintWeight.MAX_WEIGHT ) .getValue( color ) ); } @Test public final void testBackgroundHintBrighter() { final Color value = new Color( 0xB6B6B6 ); assertEquals( value, BackgroundHint.brighter( HintWeight.MAX_WEIGHT ) .getValue( color ) ); } }<|fim▁end|>
@Test
<|file_name|>floats.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ use crate::block::FormattingContextType; use crate::flow::{Flow, FlowFlags, GetBaseFlow, ImmutableFlowUtils}; use crate::persistent_list::PersistentList; use app_units::{Au, MAX_AU}; use std::cmp::{max, min}; use std::fmt; use style::computed_values::float::T as StyleFloat; use style::logical_geometry::{LogicalRect, LogicalSize, WritingMode}; use style::values::computed::Size; /// The kind of float: left or right. #[derive(Clone, Copy, Debug, Serialize)] pub enum FloatKind { Left, Right, } impl FloatKind { pub fn from_property(property: StyleFloat) -> Option<FloatKind> { match property { StyleFloat::None => None, StyleFloat::Left => Some(FloatKind::Left), StyleFloat::Right => Some(FloatKind::Right), } } } /// The kind of clearance: left, right, or both. #[derive(Clone, Copy)] pub enum ClearType { Left, Right, Both, } /// Information about a single float. #[derive(Clone, Copy)] struct Float { /// The boundaries of this float. bounds: LogicalRect<Au>, /// The kind of float: left or right. kind: FloatKind, } impl fmt::Debug for Float { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "bounds={:?} kind={:?}", self.bounds, self.kind) } } /// Information about the floats next to a flow. #[derive(Clone)] struct FloatList { /// Information about each of the floats here. floats: PersistentList<Float>, /// Cached copy of the maximum block-start offset of the float. max_block_start: Option<Au>, } impl FloatList { fn new() -> FloatList { FloatList { floats: PersistentList::new(), max_block_start: None, } } /// Returns true if the list is allocated and false otherwise. If false, there are guaranteed /// not to be any floats. fn is_present(&self) -> bool { self.floats.len() > 0 } } impl fmt::Debug for FloatList { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "max_block_start={:?} floats={}", self.max_block_start, self.floats.len() )?; for float in self.floats.iter() { write!(f, " {:?}", float)?; } Ok(()) } } /// All the information necessary to place a float. pub struct PlacementInfo { /// The dimensions of the float. pub size: LogicalSize<Au>, /// The minimum block-start of the float, as determined by earlier elements. pub ceiling: Au, /// The maximum inline-end position of the float, generally determined by the containing block. pub max_inline_size: Au, /// The kind of float. pub kind: FloatKind, } impl fmt::Debug for PlacementInfo { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "size={:?} ceiling={:?} max_inline_size={:?} kind={:?}", self.size, self.ceiling, self.max_inline_size, self.kind ) } } fn range_intersect( block_start_1: Au, block_end_1: Au, block_start_2: Au, block_end_2: Au, ) -> (Au, Au) { ( max(block_start_1, block_start_2), min(block_end_1, block_end_2), ) } /// Encapsulates information about floats. This is optimized to avoid allocation if there are /// no floats, and to avoid copying when translating the list of floats downward. #[derive(Clone)] pub struct Floats { /// The list of floats. list: FloatList, /// The offset of the flow relative to the first float. offset: LogicalSize<Au>, /// The writing mode of these floats. pub writing_mode: WritingMode, } impl fmt::Debug for Floats { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if !self.list.is_present() { write!(f, "[empty]") } else { write!(f, "offset={:?} floats={:?}", self.offset, self.list) } } } impl Floats { /// Creates a new `Floats` object. pub fn new(writing_mode: WritingMode) -> Floats { Floats { list: FloatList::new(), offset: LogicalSize::zero(writing_mode), writing_mode: writing_mode, } } /// Adjusts the recorded offset of the flow relative to the first float. pub fn translate(&mut self, delta: LogicalSize<Au>) { self.offset = self.offset + delta } /// Returns the position of the last float in flow coordinates. pub fn last_float_pos(&self) -> Option<LogicalRect<Au>> { match self.list.floats.front() { None => None, Some(float) => Some(float.bounds.translate_by_size(self.offset)), } } /// Returns a rectangle that encloses the region from block-start to block-start + block-size, /// with inline-size small enough that it doesn't collide with any floats. max_x is the /// inline-size beyond which floats have no effect. (Generally this is the containing block /// inline-size.) pub fn available_rect( &self, block_start: Au, block_size: Au, max_x: Au, ) -> Option<LogicalRect<Au>> { let list = &self.list; let block_start = block_start - self.offset.block; debug!("available_rect: trying to find space at {:?}", block_start); // Relevant dimensions for the inline-end-most inline-start float let mut max_inline_start = Au(0) - self.offset.inline; let mut l_block_start = None; let mut l_block_end = None; // Relevant dimensions for the inline-start-most inline-end float let mut min_inline_end = max_x - self.offset.inline; let mut r_block_start = None; let mut r_block_end = None; // Find the float collisions for the given range in the block direction. for float in list.floats.iter() { debug!("available_rect: Checking for collision against float"); let float_pos = float.bounds.start; let float_size = float.bounds.size; debug!("float_pos: {:?}, float_size: {:?}", float_pos, float_size); match float.kind { FloatKind::Left if float_pos.i + float_size.inline > max_inline_start && float_pos.b + float_size.block > block_start && float_pos.b < block_start + block_size => { max_inline_start = float_pos.i + float_size.inline; l_block_start = Some(float_pos.b); l_block_end = Some(float_pos.b + float_size.block); debug!( "available_rect: collision with inline_start float: new \ max_inline_start is {:?}", max_inline_start ); }, FloatKind::Right if float_pos.i < min_inline_end && float_pos.b + float_size.block > block_start && float_pos.b < block_start + block_size => { min_inline_end = float_pos.i; r_block_start = Some(float_pos.b); r_block_end = Some(float_pos.b + float_size.block); debug!( "available_rect: collision with inline_end float: new min_inline_end \ is {:?}", min_inline_end ); }, FloatKind::Left | FloatKind::Right => {}, } } // Extend the vertical range of the rectangle to the closest floats. // If there are floats on both sides, take the intersection of the // two areas. Also make sure we never return a block-start smaller than the // given upper bound. let (block_start, block_end) = match (r_block_start, r_block_end, l_block_start, l_block_end) { ( Some(r_block_start), Some(r_block_end), Some(l_block_start), Some(l_block_end), ) => range_intersect( max(block_start, r_block_start), r_block_end, max(block_start, l_block_start), l_block_end, ), (None, None, Some(l_block_start), Some(l_block_end)) => { (max(block_start, l_block_start), l_block_end) }, (Some(r_block_start), Some(r_block_end), None, None) => { (max(block_start, r_block_start), r_block_end) }, (None, None, None, None) => return None, _ => panic!("Reached unreachable state when computing float area"), }; // FIXME(eatkinson): This assertion is too strong and fails in some cases. It is OK to // return negative inline-sizes since we check against that inline-end away, but we should // still understand why they occur and add a stronger assertion here. // assert!(max_inline-start < min_inline-end); assert!(block_start <= block_end, "Float position error"); Some(LogicalRect::new( self.writing_mode, max_inline_start + self.offset.inline, block_start + self.offset.block, min_inline_end - max_inline_start, block_end - block_start, )) } /// Adds a new float to the list. pub fn add_float(&mut self, info: &PlacementInfo) { let new_info = PlacementInfo { size: info.size, ceiling: match self.list.max_block_start { None => info.ceiling, Some(max_block_start) => max(info.ceiling, max_block_start + self.offset.block), }, max_inline_size: info.max_inline_size, kind: info.kind, }; debug!("add_float: added float with info {:?}", new_info); let new_float = Float { bounds: LogicalRect::from_point_size( self.writing_mode, self.place_between_floats(&new_info).start - self.offset, info.size, ), kind: info.kind, }; self.list.floats = self.list.floats.prepend_elem(new_float); self.list.max_block_start = match self.list.max_block_start { None => Some(new_float.bounds.start.b), Some(max_block_start) => Some(max(max_block_start, new_float.bounds.start.b)), } } /// Given the three sides of the bounding rectangle in the block-start direction, finds the /// largest block-size that will result in the rectangle not colliding with any floats. Returns /// `None` if that block-size is infinite. fn max_block_size_for_bounds( &self, inline_start: Au, block_start: Au, inline_size: Au, ) -> Option<Au> { let list = &self.list; let block_start = block_start - self.offset.block; let inline_start = inline_start - self.offset.inline; let mut max_block_size = None; for float in list.floats.iter() { if float.bounds.start.b + float.bounds.size.block > block_start && float.bounds.start.i + float.bounds.size.inline > inline_start && float.bounds.start.i < inline_start + inline_size { let new_y = float.bounds.start.b; max_block_size = Some(min(max_block_size.unwrap_or(new_y), new_y)); } } max_block_size.map(|h| h + self.offset.block) } /// Given placement information, finds the closest place a fragment can be positioned without /// colliding with any floats. pub fn place_between_floats(&self, info: &PlacementInfo) -> LogicalRect<Au> { debug!("place_between_floats: Placing object with {:?}", info.size); // If no floats, use this fast path. if !self.list.is_present() { match info.kind { FloatKind::Left => { return LogicalRect::new( self.writing_mode, Au(0), info.ceiling, info.max_inline_size, MAX_AU, ); }, FloatKind::Right => { return LogicalRect::new( self.writing_mode, info.max_inline_size - info.size.inline, info.ceiling, info.max_inline_size, MAX_AU, ); }, } } // Can't go any higher than previous floats or previous elements in the document. let mut float_b = info.ceiling; loop { let maybe_location = self.available_rect(float_b, info.size.block, info.max_inline_size); debug!( "place_float: got available rect: {:?} for block-pos: {:?}", maybe_location, float_b ); match maybe_location { // If there are no floats blocking us, return the current location // TODO(eatkinson): integrate with overflow None => { return match info.kind { FloatKind::Left => LogicalRect::new( self.writing_mode, Au(0), float_b, info.max_inline_size, MAX_AU, ), FloatKind::Right => LogicalRect::new( self.writing_mode, info.max_inline_size - info.size.inline, float_b, info.max_inline_size, MAX_AU, ), }; }, Some(rect) => { assert_ne!( rect.start.b + rect.size.block, float_b, "Non-terminating float placement" ); // Place here if there is enough room if rect.size.inline >= info.size.inline { let block_size = self.max_block_size_for_bounds( rect.start.i, rect.start.b, rect.size.inline, ); let block_size = block_size.unwrap_or(MAX_AU); return match info.kind { FloatKind::Left => LogicalRect::new( self.writing_mode, rect.start.i, float_b, rect.size.inline, block_size, ), FloatKind::Right => LogicalRect::new( self.writing_mode, rect.start.i + rect.size.inline - info.size.inline, float_b, rect.size.inline, block_size, ), }; } // Try to place at the next-lowest location. // Need to be careful of fencepost errors. float_b = rect.start.b + rect.size.block; }, } } } pub fn clearance(&self, clear: ClearType) -> Au { let list = &self.list; let mut clearance = Au(0); for float in list.floats.iter() { match (clear, float.kind) { (ClearType::Left, FloatKind::Left) | (ClearType::Right, FloatKind::Right) | (ClearType::Both, _) => { let b = self.offset.block + float.bounds.start.b + float.bounds.size.block; clearance = max(clearance, b); }, _ => {}, } } clearance } pub fn is_present(&self) -> bool { self.list.is_present() } } /// The speculated inline sizes of floats flowing through or around a flow (depending on whether /// the flow is a block formatting context). These speculations are always *upper bounds*; the /// actual inline sizes might be less. Note that this implies that a speculated value of zero is a /// guarantee that there will be no floats on that side. /// /// This is used for two purposes: (a) determining whether we can lay out blocks in parallel; (b) /// guessing the inline-sizes of block formatting contexts in an effort to lay them out in /// parallel. #[derive(Clone, Copy)] pub struct SpeculatedFloatPlacement { /// The estimated inline size (an upper bound) of the left floats flowing through this flow. pub left: Au, /// The estimated inline size (an upper bound) of the right floats flowing through this flow. pub right: Au, } impl fmt::Debug for SpeculatedFloatPlacement { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "L {:?} R {:?}", self.left, self.right) } } impl SpeculatedFloatPlacement { /// Returns a `SpeculatedFloatPlacement` objects with both left and right speculated inline /// sizes initialized to zero. pub fn zero() -> SpeculatedFloatPlacement { SpeculatedFloatPlacement { left: Au(0), right: Au(0), } } /// Given the speculated inline size of the floats out for the inorder predecessor of this /// flow, computes the speculated inline size of the floats flowing in. pub fn compute_floats_in(&mut self, flow: &mut dyn Flow) { let base_flow = flow.base(); if base_flow.flags.contains(FlowFlags::CLEARS_LEFT) { self.left = Au(0) } if base_flow.flags.contains(FlowFlags::CLEARS_RIGHT) { self.right = Au(0) } } /// Given the speculated inline size of the floats out for this flow's last child, computes the /// speculated inline size of the floats out for this flow. pub fn compute_floats_out(&mut self, flow: &mut dyn Flow) { if flow.is_block_like() { let block_flow = flow.as_block(); if block_flow.formatting_context_type() != FormattingContextType::None { *self = block_flow.base.speculated_float_placement_in; } else { if self.left > Au(0) || self.right > Au(0) { let speculated_inline_content_edge_offsets = block_flow.fragment.guess_inline_content_edge_offsets(); if self.left > Au(0) && speculated_inline_content_edge_offsets.start > Au(0) { self.left = self.left + speculated_inline_content_edge_offsets.start } if self.right > Au(0) && speculated_inline_content_edge_offsets.end > Au(0) { self.right = self.right + speculated_inline_content_edge_offsets.end<|fim▁hole|> self.left, block_flow.base.speculated_float_placement_in.left, ); self.right = max( self.right, block_flow.base.speculated_float_placement_in.right, ); } } let base_flow = flow.base(); if !base_flow.flags.is_float() { return; } let mut float_inline_size = base_flow.intrinsic_inline_sizes.preferred_inline_size; if float_inline_size == Au(0) { if flow.is_block_like() { // Hack: If the size of the float is not fixed, then there's no // way we can guess at its size now. So just pick an arbitrary // nonzero value (in this case, 1px) so that the layout // traversal logic will know that objects later in the document // might flow around this float. let inline_size = flow.as_block().fragment.style.content_inline_size(); let fixed = match inline_size { Size::Auto => false, Size::LengthPercentage(ref lp) => { lp.0.is_definitely_zero() || lp.0.maybe_to_used_value(None).is_some() }, }; if !fixed { float_inline_size = Au::from_px(1) } } } match base_flow.flags.float_kind() { StyleFloat::None => {}, StyleFloat::Left => self.left = self.left + float_inline_size, StyleFloat::Right => self.right = self.right + float_inline_size, } } /// Given a flow, computes the speculated inline size of the floats in of its first child. pub fn compute_floats_in_for_first_child( parent_flow: &mut dyn Flow, ) -> SpeculatedFloatPlacement { if !parent_flow.is_block_like() { return parent_flow.base().speculated_float_placement_in; } let parent_block_flow = parent_flow.as_block(); if parent_block_flow.formatting_context_type() != FormattingContextType::None { return SpeculatedFloatPlacement::zero(); } let mut placement = parent_block_flow.base.speculated_float_placement_in; let speculated_inline_content_edge_offsets = parent_block_flow .fragment .guess_inline_content_edge_offsets(); if speculated_inline_content_edge_offsets.start > Au(0) { placement.left = if placement.left > speculated_inline_content_edge_offsets.start { placement.left - speculated_inline_content_edge_offsets.start } else { Au(0) } } if speculated_inline_content_edge_offsets.end > Au(0) { placement.right = if placement.right > speculated_inline_content_edge_offsets.end { placement.right - speculated_inline_content_edge_offsets.end } else { Au(0) } } placement } }<|fim▁end|>
} } self.left = max(
<|file_name|>prefs.js<|end_file_name|><|fim▁begin|>pref('devcache.debug', false); <|fim▁hole|><|fim▁end|>
pref('devcache.enabled', true); pref('devcache.patterns', '');
<|file_name|>page-not-found.component.ts<|end_file_name|><|fim▁begin|>import { Component } from '@angular/core'; import { I18n } from '../i18n/i18n'; @Component({<|fim▁hole|> </div> ` }) export class PageNotFoundComponent { i18n = I18n; }<|fim▁end|>
template: ` <div> <h1>{{i18n.t.pageNotFound.title}}</h1> <p>{{i18n.t.pageNotFound.description}}</p>
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>extern crate gcc; use std::env; fn main() { let mut cfg = gcc::Config::new(); if env::var("TARGET").unwrap().contains("windows") { cfg.define("_WIN32", None); cfg.define("BZ_EXPORT", None); } cfg.include("bzip2-1.0.6") .define("BZ_NO_STDIO", None) .file("bzip2-1.0.6/blocksort.c") .file("bzip2-1.0.6/huffman.c") .file("bzip2-1.0.6/crctable.c") .file("bzip2-1.0.6/randtable.c") .file("bzip2-1.0.6/compress.c") .file("bzip2-1.0.6/decompress.c") .file("bzip2-1.0.6/bzlib.c")<|fim▁hole|><|fim▁end|>
.compile("libbz2.a"); }
<|file_name|>ICrudAction.ts<|end_file_name|><|fim▁begin|>export interface ICrudAction {<|fim▁hole|> value: string; }<|fim▁end|>
CANCEL: string; FAIL: string; FINISH: string; REQUEST: string;
<|file_name|>bitcoin_lv_LV.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="lv_LV" version="2.1"> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="+14"/> <source>About Loco</source> <translation type="unfinished"/> </message> <message> <location line="+39"/> <source>&lt;b&gt;Loco&lt;/b&gt; version</source> <translation type="unfinished"/> </message> <message> <location line="+41"/> <source>Copyright © 2009-2014 The Bitcoin developers Copyright © 2012-2014 The NovaCoin developers Copyright © 2016 The Loco developers</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or &lt;a href=&quot;http://www.opensource.org/licenses/mit-license.php&quot;&gt;http://www.opensource.org/licenses/mit-license.php&lt;/a&gt;. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (&lt;a href=&quot;https://www.openssl.org/&quot;&gt;https://www.openssl.org/&lt;/a&gt;) and cryptographic software written by Eric Young (&lt;a href=&quot;mailto:[email protected]&quot;&gt;[email protected]&lt;/a&gt;) and UPnP software written by Thomas Bernard.</source> <translation type="unfinished"/> </message> </context> <context> <name>AddressBookPage</name> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>Address Book</source> <translation type="unfinished"/> </message> <message> <location line="+22"/> <source>Double-click to edit address or label</source> <translation>Adresi vai nosaukumu rediģē ar dubultklikšķi</translation> </message> <message> <location line="+24"/> <source>Create a new address</source> <translation>Izveidot jaunu adresi</translation> </message> <message> <location line="+10"/> <source>Copy the currently selected address to the system clipboard</source> <translation>Kopēt iezīmēto adresi uz starpliktuvi</translation> </message> <message> <location line="-7"/> <source>&amp;New Address</source> <translation type="unfinished"/> </message> <message> <location line="-43"/> <source>These are your Loco addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source> <translation type="unfinished"/> </message> <message> <location line="+53"/> <source>&amp;Copy Address</source> <translation>&amp;Kopēt adresi</translation> </message> <message> <location line="+7"/> <source>Show &amp;QR Code</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Sign a message to prove you own a Loco address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <source>Delete the currently selected address from the list</source> <translation type="unfinished"/> </message> <message> <location line="-10"/> <source>Verify a message to ensure it was signed with a specified Loco address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Verify Message</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>&amp;Delete</source> <translation>&amp;Dzēst</translation> </message> <message> <location filename="../addressbookpage.cpp" line="+65"/> <source>Copy &amp;Label</source> <translation>Kopēt &amp;Nosaukumu</translation> </message> <message> <location line="+2"/> <source>&amp;Edit</source> <translation>&amp;Rediģēt</translation> </message> <message> <location line="+250"/> <source>Export Address Book Data</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Fails ar komatu kā atdalītāju (*.csv)</translation> </message> <message> <location line="+13"/> <source>Error exporting</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation type="unfinished"/> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="+145"/> <source>Label</source> <translation>Nosaukums</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Adrese</translation> </message> <message> <location line="+36"/> <source>(no label)</source> <translation>(bez nosaukuma)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="+26"/> <source>Passphrase Dialog</source> <translation>Paroles dialogs</translation> </message> <message> <location line="+21"/> <source>Enter passphrase</source> <translation>Ierakstiet paroli</translation> </message> <message> <location line="+14"/> <source>New passphrase</source> <translation>Jauna parole</translation> </message> <message> <location line="+14"/> <source>Repeat new passphrase</source> <translation>Jaunā parole vēlreiz</translation> </message> <message> <location line="+33"/> <source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>For staking only</source> <translation type="unfinished"/> </message> <message> <location filename="../askpassphrasedialog.cpp" line="+38"/> <source>Encrypt wallet</source> <translation>Šifrēt maciņu</translation> </message> <message> <location line="+7"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>Lai veikto šo darbību, maciņš jāatslēdz ar paroli.</translation> </message> <message> <location line="+5"/> <source>Unlock wallet</source> <translation>Atslēgt maciņu</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>Šai darbībai maciņš jāatšifrē ar maciņa paroli.</translation> </message> <message> <location line="+5"/> <source>Decrypt wallet</source> <translation>Atšifrēt maciņu</translation> </message> <message> <location line="+3"/> <source>Change passphrase</source> <translation>Mainīt paroli</translation> </message> <message> <location line="+1"/> <source>Enter the old and new passphrase to the wallet.</source> <translation>Ierakstiet maciņa veco un jauno paroli.</translation> </message> <message> <location line="+45"/> <source>Confirm wallet encryption</source> <translation>Apstiprināt maciņa šifrēšanu</translation> </message> <message> <location line="+1"/> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR Loco's&lt;/b&gt;!</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Are you sure you wish to encrypt your wallet?</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation type="unfinished"/> </message> <message> <location line="+103"/> <location line="+24"/> <source>Warning: The Caps Lock key is on!</source> <translation type="unfinished"/> </message> <message> <location line="-133"/> <location line="+60"/> <source>Wallet encrypted</source> <translation>Maciņš nošifrēts</translation> </message> <message> <location line="-140"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;ten or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation type="unfinished"/> </message> <message> <location line="+82"/> <source>Loco will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your Loco's from being stolen by malware infecting your computer.</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <location line="+7"/> <location line="+44"/> <location line="+6"/> <source>Wallet encryption failed</source> <translation>Maciņa šifrēšana neizdevās</translation> </message> <message> <location line="-56"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>Maciņa šifrēšana neizdevās programmas kļūdas dēļ. Jūsu maciņš netika šifrēts.</translation> </message> <message> <location line="+7"/> <location line="+50"/> <source>The supplied passphrases do not match.</source> <translation>Ievadītās paroles nav vienādas.</translation> </message> <message> <location line="-38"/> <source>Wallet unlock failed</source> <translation>Maciņu atšifrēt neizdevās</translation> </message> <message> <location line="+1"/> <location line="+12"/> <location line="+19"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>Maciņa atšifrēšanai ievadītā parole nav pareiza.</translation> </message> <message> <location line="-20"/> <source>Wallet decryption failed</source> <translation>Maciņu neizdevās atšifrēt</translation> </message> <message> <location line="+14"/> <source>Wallet passphrase was successfully changed.</source> <translation type="unfinished"/> </message> </context> <context> <name>BitcoinGUI</name> <message> <location filename="../bitcoingui.cpp" line="+297"/> <source>Sign &amp;message...</source> <translation>Parakstīt &amp;ziņojumu...</translation> </message> <message> <location line="-64"/> <source>Show general overview of wallet</source> <translation>Rādīt vispārēju maciņa pārskatu</translation> </message> <message> <location line="+17"/> <source>&amp;Transactions</source> <translation>&amp;Transakcijas</translation> </message> <message> <location line="+1"/> <source>Browse transaction history</source> <translation>Skatīt transakciju vēsturi</translation> </message> <message> <location line="+5"/> <source>&amp;Address Book</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Edit the list of stored addresses and labels</source> <translation type="unfinished"/> </message> <message> <location line="-18"/> <source>Show the list of addresses for receiving payments</source> <translation type="unfinished"/> </message> <message> <location line="+34"/> <source>E&amp;xit</source> <translation>&amp;Iziet</translation> </message> <message> <location line="+1"/> <source>Quit application</source> <translation>Aizvērt programmu</translation> </message> <message> <location line="+4"/> <source>Show information about Loco</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>About &amp;Qt</source> <translation>Par &amp;Qt</translation> </message> <message> <location line="+1"/> <source>Show information about Qt</source> <translation>Parādīt informāciju par Qt</translation> </message> <message> <location line="+2"/> <source>&amp;Options...</source> <translation>&amp;Iespējas</translation> </message> <message> <location line="+4"/> <source>&amp;Encrypt Wallet...</source> <translation>Š&amp;ifrēt maciņu...</translation> </message> <message> <location line="+2"/> <source>&amp;Backup Wallet...</source> <translation>&amp;Izveidot maciņa rezerves kopiju</translation> </message> <message> <location line="+2"/> <source>&amp;Change Passphrase...</source> <translation>&amp;Mainīt paroli</translation> </message> <message> <location line="+9"/> <source>&amp;Export...</source> <translation type="unfinished"/> </message> <message> <location line="-55"/> <source>Send Loco's to a Loco address</source> <translation type="unfinished"/> </message> <message> <location line="+39"/> <source>Modify configuration options for Loco</source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <source>Export the data in the current tab to a file</source> <translation type="unfinished"/> </message> <message> <location line="-13"/> <source>Encrypt or decrypt wallet</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Backup wallet to another location</source> <translation>Izveidot maciņa rezerves kopiju citur</translation> </message> <message> <location line="+2"/> <source>Change the passphrase used for wallet encryption</source> <translation>Mainīt maciņa šifrēšanas paroli</translation> </message> <message> <location line="+10"/> <source>&amp;Debug window</source> <translation>&amp;Debug logs</translation> </message> <message> <location line="+1"/> <source>Open debugging and diagnostic console</source> <translation>Atvērt atkļūdošanas un diagnostikas konsoli</translation> </message> <message> <location line="-5"/> <source>&amp;Verify message...</source> <translation>&amp;Pārbaudīt ziņojumu...</translation> </message> <message> <location line="-214"/> <location line="+551"/> <source>Loco</source> <translation type="unfinished"/> </message> <message> <location line="-551"/> <source>Wallet</source> <translation>Maciņš</translation> </message> <message> <location line="+193"/> <source>&amp;About Loco</source> <translation type="unfinished"/> </message> <message> <location line="+9"/> <source>&amp;Show / Hide</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Unlock wallet</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>&amp;Lock Wallet</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Lock wallet</source> <translation type="unfinished"/> </message> <message> <location line="+28"/> <source>&amp;File</source> <translation>&amp;Fails</translation> </message> <message> <location line="+8"/> <source>&amp;Settings</source> <translation>&amp;Uzstādījumi</translation> </message> <message> <location line="+8"/> <source>&amp;Help</source> <translation>&amp;Palīdzība</translation> </message> <message> <location line="+17"/> <source>Tabs toolbar</source> <translation>Ciļņu rīkjosla</translation> </message> <message> <location line="+46"/> <location line="+9"/> <source>[testnet]</source> <translation>[testnet]</translation> </message> <message> <location line="+0"/> <location line="+58"/> <source>Loco client</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="+70"/> <source>%n active connection(s) to Loco network</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+488"/> <source>Staking.&lt;br&gt;Your weight is %1&lt;br&gt;Network weight is %2&lt;br&gt;Expected time to earn reward is %3</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Not staking because wallet is locked</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Not staking because wallet is offline</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Not staking because wallet is syncing</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Not staking because you don&apos;t have mature Loco's</source> <translation type="unfinished"/> </message> <message> <location line="-808"/> <source>&amp;Dashboard</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>&amp;Receive</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>&amp;Send</source> <translation type="unfinished"/> </message> <message> <location line="+49"/> <source>&amp;Unlock Wallet...</source> <translation type="unfinished"/> </message> <message> <location line="+273"/> <source>Up to date</source> <translation>Sinhronizēts</translation> </message> <message> <location line="+43"/> <source>Catching up...</source> <translation>Sinhronizējos...</translation> </message> <message> <location line="+113"/> <source>Confirm transaction fee</source> <translation type="unfinished"/> </message> <message> <location line="+27"/> <source>Sent transaction</source> <translation>Transakcija nosūtīta</translation> </message> <message> <location line="+1"/> <source>Incoming transaction</source> <translation>Ienākoša transakcija</translation> </message> <message> <location line="+1"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>Datums: %1 Daudzums: %2 Tips: %3 Adrese: %4 </translation> </message> <message> <location line="+100"/> <location line="+15"/> <source>URI handling</source> <translation type="unfinished"/> </message> <message> <location line="-15"/> <location line="+15"/> <source>URI can not be parsed! This can be caused by an invalid Loco address or malformed URI parameters.</source> <translation type="unfinished"/> </message> <message> <location line="+9"/> <source>Wallet is &lt;b&gt;not encrypted&lt;/b&gt;</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>Maciņš ir &lt;b&gt;šifrēts&lt;/b&gt; un pašlaik &lt;b&gt;atslēgts&lt;/b&gt;</translation> </message> <message> <location line="+8"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>Maciņš ir &lt;b&gt;šifrēts&lt;/b&gt; un pašlaik &lt;b&gt;slēgts&lt;/b&gt;</translation> </message> <message> <location line="+24"/> <source>Backup Wallet</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Wallet Data (*.dat)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Backup Failed</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>There was an error trying to save the wallet data to the new location.</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="+91"/> <source>%n second(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n minute(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message numerus="yes"> <location line="-429"/> <location line="+433"/> <source>%n hour(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="-456"/> <source>Processed %1 blocks of transaction history.</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="+27"/> <location line="+433"/> <source>%n day(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message numerus="yes"> <location line="-429"/> <location line="+6"/> <source>%n week(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+0"/> <source>%1 and %2</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="+0"/> <source>%n year(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+5"/> <source>%1 behind</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>Last received block was generated %1 ago.</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Transactions after this will not yet be visible.</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Error</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Information</source> <translation type="unfinished"/> </message> <message> <location line="+69"/> <source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source> <translation type="unfinished"/> </message> <message> <location line="+324"/> <source>Not staking</source> <translation type="unfinished"/> </message> <message> <location filename="../bitcoin.cpp" line="+104"/> <source>A fatal error occurred. Loco can no longer continue safely and will quit.</source> <translation type="unfinished"/> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="+110"/> <source>Network Alert</source> <translation>Tīkla brīdinājums</translation> </message> </context> <context> <name>CoinControlDialog</name> <message> <location filename="../forms/coincontroldialog.ui" line="+14"/> <source>Coin Control</source> <translation type="unfinished"/> </message> <message> <location line="+31"/> <source>Quantity:</source> <translation type="unfinished"/> </message> <message> <location line="+32"/> <source>Bytes:</source> <translation type="unfinished"/> </message> <message> <location line="+48"/> <source>Amount:</source> <translation>Daudzums:</translation> </message> <message> <location line="+32"/> <source>Priority:</source> <translation type="unfinished"/> </message> <message> <location line="+48"/> <source>Fee:</source> <translation type="unfinished"/> </message> <message> <location line="+35"/> <source>Low Output:</source> <translation type="unfinished"/> </message> <message> <location filename="../coincontroldialog.cpp" line="+552"/> <source>no</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/coincontroldialog.ui" line="+51"/> <source>After Fee:</source> <translation type="unfinished"/> </message> <message> <location line="+35"/> <source>Change:</source> <translation type="unfinished"/> </message> <message> <location line="+69"/> <source>(un)select all</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Tree mode</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>List mode</source> <translation type="unfinished"/> </message> <message> <location line="+45"/> <source>Amount</source> <translation>Daudzums</translation> </message> <message> <location line="+5"/> <source>Label</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Address</source> <translation>Adrese</translation> </message> <message> <location line="+5"/> <source>Date</source> <translation>Datums</translation> </message> <message> <location line="+5"/> <source>Confirmations</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Confirmed</source> <translation>Apstiprināts</translation> </message> <message> <location line="+5"/> <source>Priority</source> <translation type="unfinished"/> </message> <message> <location filename="../coincontroldialog.cpp" line="-515"/> <source>Copy address</source> <translation>Kopēt adresi</translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>Kopēt nosaukumu</translation> </message> <message> <location line="+1"/> <location line="+26"/> <source>Copy amount</source> <translation>Kopēt daudzumu</translation> </message> <message> <location line="-25"/> <source>Copy transaction ID</source> <translation type="unfinished"/> </message> <message> <location line="+24"/> <source>Copy quantity</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Copy fee</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy after fee</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy bytes</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy priority</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy low output</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy change</source> <translation type="unfinished"/> </message> <message> <location line="+317"/> <source>highest</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>high</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>medium-high</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>medium</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>low-medium</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>low</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>lowest</source> <translation type="unfinished"/> </message> <message> <location line="+155"/> <source>DUST</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>yes</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>This label turns red, if the transaction size is bigger than 10000 bytes. This means a fee of at least %1 per kb is required. Can vary +/- 1 Byte per input.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transactions with higher priority get more likely into a block. This label turns red, if the priority is smaller than &quot;medium&quot;. This means a fee of at least %1 per kb is required.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>This label turns red, if any recipient receives an amount smaller than %1. This means a fee of at least %2 is required. Amounts below 0.546 times the minimum relay fee are shown as DUST.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>This label turns red, if the change is smaller than %1. This means a fee of at least %2 is required.</source> <translation type="unfinished"/> </message> <message> <location line="+36"/> <location line="+66"/> <source>(no label)</source> <translation>(bez nosaukuma)</translation> </message> <message> <location line="-9"/> <source>change from %1 (%2)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>(change)</source> <translation type="unfinished"/> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="+14"/> <source>Edit Address</source> <translation>Mainīt adrese</translation> </message> <message> <location line="+11"/> <source>&amp;Label</source> <translation>&amp;Nosaukums</translation> </message> <message> <location line="+10"/> <source>The label associated with this address book entry</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Address</source> <translation>&amp;Adrese</translation> </message> <message> <location line="+10"/> <source>The address associated with this address book entry. This can only be modified for sending addresses.</source> <translation type="unfinished"/> </message> <message> <location filename="../editaddressdialog.cpp" line="+21"/> <source>New receiving address</source> <translation>Jauna saņemšanas adrese</translation> </message> <message> <location line="+4"/> <source>New sending address</source> <translation>Jauna nosūtīšanas adrese</translation> </message> <message> <location line="+3"/> <source>Edit receiving address</source> <translation>Mainīt saņemšanas adresi</translation> </message> <message> <location line="+4"/> <source>Edit sending address</source> <translation>Mainīt nosūtīšanas adresi</translation> </message> <message> <location line="+76"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation>Nupat ierakstītā adrese &quot;%1&quot; jau atrodas adrešu grāmatā.</translation> </message> <message> <location line="-5"/> <source>The entered address &quot;%1&quot; is not a valid Loco address.</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>Could not unlock wallet.</source> <translation>Nav iespējams atslēgt maciņu.</translation> </message> <message> <location line="+5"/> <source>New key generation failed.</source> <translation>Neizdevās ģenerēt jaunu atslēgu.</translation> </message> </context> <context> <name>GUIUtil::HelpMessageBox</name> <message> <location filename="../guiutil.cpp" line="+426"/> <location line="+12"/> <source>Loco-Qt</source> <translation type="unfinished"/> </message> <message> <location line="-12"/> <source>version</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Usage:</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>command-line options</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>UI options</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Start minimized</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Show splash screen on startup (default: 1)</source> <translation type="unfinished"/> </message> </context> <context> <name>OptionsDialog</name> <message> <location filename="../forms/optionsdialog.ui" line="+14"/> <source>Options</source> <translation>Iespējas</translation> </message> <message> <location line="+16"/> <source>&amp;Main</source> <translation>&amp;Galvenais</translation> </message> <message> <location line="+6"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>Pay transaction &amp;fee</source> <translation>&amp;Maksāt par transakciju</translation> </message> <message> <location line="+31"/> <source>Reserved amount does not participate in staking and is therefore spendable at any time.</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>Reserve</source> <translation type="unfinished"/> </message> <message> <location line="+31"/> <source>Automatically start Loco after logging in to the system.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Start Loco on system login</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>&amp;Network</source> <translation>&amp;Tīkls</translation> </message> <message> <location line="+6"/> <source>Automatically open the Loco client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Map port using &amp;UPnP</source> <translation>Kartēt portu, izmantojot &amp;UPnP</translation> </message> <message> <location line="+7"/> <source>Connect to the Loco network through a SOCKS proxy (e.g. when connecting through Tor).</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Connect through SOCKS proxy:</source> <translation type="unfinished"/> </message> <message> <location line="+9"/> <source>Proxy &amp;IP:</source> <translation>Proxy &amp;IP:</translation> </message> <message> <location line="+19"/> <source>IP address of the proxy (e.g. 127.0.0.1)</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Port:</source> <translation>&amp;Ports:</translation> </message> <message> <location line="+19"/> <source>Port of the proxy (e.g. 9050)</source> <translation>Proxy ports (piem. 9050)</translation> </message> <message> <location line="+7"/> <source>SOCKS &amp;Version:</source> <translation>SOCKS &amp;Versija:</translation> </message> <message> <location line="+13"/> <source>SOCKS version of the proxy (e.g. 5)</source> <translation>proxy SOCKS versija (piem. 5)</translation> </message> <message> <location line="+36"/> <source>&amp;Window</source> <translation>&amp;Logs</translation> </message> <message> <location line="+6"/> <source>Show only a tray icon after minimizing the window.</source> <translation>Pēc loga minimizācijas rādīt tikai ikonu sistēmas teknē.</translation> </message> <message> <location line="+3"/> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>&amp;Minimizēt uz sistēmas tekni, nevis rīkjoslu</translation> </message> <message> <location line="+7"/> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation>Logu aizverot, minimizēt, nevis beigt darbu. Kad šī izvēlne iespējota, programma aizvērsies tikai pēc Beigt komandas izvēlnē.</translation> </message> <message> <location line="+3"/> <source>M&amp;inimize on close</source> <translation>M&amp;inimizēt aizverot</translation> </message> <message> <location line="+21"/> <source>&amp;Display</source> <translation>&amp;Izskats</translation> </message> <message> <location line="+8"/> <source>User Interface &amp;language:</source> <translation>Lietotāja interfeiss un &amp;valoda:</translation> </message> <message> <location line="+13"/> <source>The user interface language can be set here. This setting will take effect after restarting Loco.</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>&amp;Unit to show amounts in:</source> <translation>&amp;Vienības, kurās attēlot daudzumus:</translation> </message> <message> <location line="+13"/> <source>Choose the default subdivision unit to show in the interface and when sending Loco's.</source> <translation>Izvēlēties dalījuma vienību pēc noklusēšanas, ko izmantot interfeisā un nosūtot bitkoinus.</translation> </message> <message> <location line="+9"/> <source>Whether to show coin control features or not.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Display coin &amp;control features (experts only!)</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Whether to select the coin outputs randomly or with minimal coin age.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Minimize weight consumption (experimental)</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Use black visual theme (requires restart)</source> <translation type="unfinished"/> </message> <message> <location line="+71"/> <source>&amp;OK</source> <translation>&amp;OK</translation> </message> <message> <location line="+7"/> <source>&amp;Cancel</source> <translation>&amp;Atcelt</translation> </message> <message> <location line="+10"/> <source>&amp;Apply</source> <translation type="unfinished"/> </message> <message> <location filename="../optionsdialog.cpp" line="+53"/> <source>default</source> <translation>pēc noklusēšanas</translation> </message> <message> <location line="+149"/> <location line="+9"/> <source>Warning</source> <translation type="unfinished"/> </message> <message> <location line="-9"/> <location line="+9"/> <source>This setting will take effect after restarting Loco.</source> <translation type="unfinished"/> </message> <message> <location line="+29"/> <source>The supplied proxy address is invalid.</source> <translation>Norādītā proxy adrese nav derīga.</translation> </message> </context> <context> <name>OverviewPage</name> <message> <location filename="../forms/overviewpage.ui" line="+14"/> <source>Form</source> <translation>Forma</translation> </message> <message> <location line="+46"/> <location line="+247"/> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the Loco network after a connection is established, but this process has not completed yet.</source> <translation type="unfinished"/> </message> <message> <location line="-173"/> <source>Stake:</source> <translation type="unfinished"/> </message> <message> <location line="+32"/> <source>Unconfirmed:</source> <translation type="unfinished"/> </message> <message> <location line="-113"/> <source>Wallet</source> <translation>Maciņš</translation> </message> <message> <location line="+49"/> <source>Spendable:</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Your current spendable balance</source> <translation type="unfinished"/> </message> <message> <location line="+80"/> <source>Immature:</source> <translation>Nenobriedušu:</translation> </message> <message> <location line="+13"/> <source>Mined balance that has not yet matured</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Total:</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Your current total balance</source> <translation type="unfinished"/> </message> <message> <location line="+50"/> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation>&lt;b&gt;Pēdējās transakcijas&lt;/b&gt;</translation> </message> <message> <location line="-118"/> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source> <translation type="unfinished"/> </message> <message> <location line="-32"/> <source>Total of Loco's that was staked, and do not yet count toward the current balance</source> <translation type="unfinished"/> </message> <message> <location filename="../overviewpage.cpp" line="+116"/> <location line="+1"/> <source>out of sync</source> <translation>nav sinhronizēts</translation> </message> </context> <context> <name>PaymentServer</name> <message> <location filename="../paymentserver.cpp" line="+107"/> <source>Cannot start loco: click-to-pay handler</source> <translation type="unfinished"/> </message> </context> <context> <name>QRCodeDialog</name> <message> <location filename="../forms/qrcodedialog.ui" line="+14"/> <source>QR Code Dialog</source> <translation type="unfinished"/> </message> <message> <location line="+59"/> <source>Request Payment</source> <translation type="unfinished"/> </message> <message> <location line="+56"/> <source>Amount:</source> <translation type="unfinished"/> </message> <message> <location line="-44"/> <source>Label:</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>Message:</source> <translation type="unfinished"/> </message> <message> <location line="+71"/><|fim▁hole|> <message> <location filename="../qrcodedialog.cpp" line="+62"/> <source>Error encoding URI into QR Code.</source> <translation type="unfinished"/> </message> <message> <location line="+40"/> <source>The entered amount is invalid, please check.</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation type="unfinished"/> </message> <message> <location line="+25"/> <source>Save QR Code</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>PNG Images (*.png)</source> <translation type="unfinished"/> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="+46"/> <source>Client name</source> <translation>Klienta vārds</translation> </message> <message> <location line="+10"/> <location line="+23"/> <location line="+26"/> <location line="+23"/> <location line="+23"/> <location line="+36"/> <location line="+53"/> <location line="+23"/> <source>N/A</source> <translation>N/A</translation> </message> <message> <location line="-194"/> <source>Client version</source> <translation>Klienta versija</translation> </message> <message> <location line="-45"/> <source>&amp;Information</source> <translation>&amp;Informācija</translation> </message> <message> <location line="+68"/> <source>Using OpenSSL version</source> <translation type="unfinished"/> </message> <message> <location line="+49"/> <source>Startup time</source> <translation>Sākuma laiks</translation> </message> <message> <location line="+29"/> <source>Network</source> <translation>Tīkls</translation> </message> <message> <location line="+7"/> <source>Number of connections</source> <translation>Savienojumu skaits</translation> </message> <message> <location line="+23"/> <source>On testnet</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Block chain</source> <translation>Bloku virkne</translation> </message> <message> <location line="+7"/> <source>Current number of blocks</source> <translation>Pašreizējais bloku skaits</translation> </message> <message> <location line="+197"/> <source>&amp;Network Traffic</source> <translation type="unfinished"/> </message> <message> <location line="+52"/> <source>&amp;Clear</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Totals</source> <translation type="unfinished"/> </message> <message> <location line="+64"/> <source>In:</source> <translation type="unfinished"/> </message> <message> <location line="+80"/> <source>Out:</source> <translation type="unfinished"/> </message> <message> <location line="-383"/> <source>Last block time</source> <translation>Pēdējā bloka laiks</translation> </message> <message> <location line="+52"/> <source>&amp;Open</source> <translation>&amp;Atvērt</translation> </message> <message> <location line="+16"/> <source>Command-line options</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Show the Loco-Qt help message to get a list with possible Loco command-line options.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Show</source> <translation type="unfinished"/> </message> <message> <location line="+24"/> <source>&amp;Console</source> <translation>&amp;Konsole</translation> </message> <message> <location line="-237"/> <source>Build date</source> <translation>Kompilācijas datums</translation> </message> <message> <location line="-104"/> <source>Loco - Debug window</source> <translation type="unfinished"/> </message> <message> <location line="+25"/> <source>Loco Core</source> <translation type="unfinished"/> </message> <message> <location line="+256"/> <source>Debug log file</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Open the Loco debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation type="unfinished"/> </message> <message> <location line="+102"/> <source>Clear console</source> <translation>Notīrīt konsoli</translation> </message> <message> <location filename="../rpcconsole.cpp" line="+325"/> <source>Welcome to the Loco RPC console.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation>Izmantojiet bultiņas uz augšu un leju, lai pārvietotos pa vēsturi, un &lt;b&gt;Ctrl-L&lt;/b&gt; ekrāna notīrīšanai.</translation> </message> <message> <location line="+1"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation>Ierakstiet &lt;b&gt;help&lt;/b&gt; lai iegūtu pieejamo komandu sarakstu.</translation> </message> <message> <location line="+127"/> <source>%1 B</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>%1 KB</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>%1 MB</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>%1 GB</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>%1 m</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>%1 h</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>%1 h %2 m</source> <translation type="unfinished"/> </message> </context> <context> <name>SendLoco'sDialog</name> <message> <location filename="../forms/sendLoco'sdialog.ui" line="+14"/> <location filename="../sendLoco'sdialog.cpp" line="+181"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+6"/> <location line="+5"/> <location line="+5"/> <source>Send Loco's</source> <translation>Sūtīt bitkoinus</translation> </message> <message> <location line="+76"/> <source>Coin Control Features</source> <translation type="unfinished"/> </message> <message> <location line="+20"/> <source>Inputs...</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>automatically selected</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>Insufficient funds!</source> <translation type="unfinished"/> </message> <message> <location line="+77"/> <source>Quantity:</source> <translation type="unfinished"/> </message> <message> <location line="+22"/> <location line="+35"/> <source>0</source> <translation type="unfinished"/> </message> <message> <location line="-19"/> <source>Bytes:</source> <translation type="unfinished"/> </message> <message> <location line="+51"/> <source>Amount:</source> <translation>Daudzums:</translation> </message> <message> <location line="+22"/> <location line="+86"/> <location line="+86"/> <location line="+32"/> <source>0.00 LOC2</source> <translation type="unfinished"/> </message> <message> <location line="-191"/> <source>Priority:</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>medium</source> <translation type="unfinished"/> </message> <message> <location line="+32"/> <source>Fee:</source> <translation type="unfinished"/> </message> <message> <location line="+35"/> <source>Low Output:</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>no</source> <translation type="unfinished"/> </message> <message> <location line="+32"/> <source>After Fee:</source> <translation type="unfinished"/> </message> <message> <location line="+35"/> <source>Change</source> <translation type="unfinished"/> </message> <message> <location line="+50"/> <source>custom change address</source> <translation type="unfinished"/> </message> <message> <location line="+106"/> <source>Send to multiple recipients at once</source> <translation>Sūtīt vairākiem saņēmējiem uzreiz</translation> </message> <message> <location line="+3"/> <source>Add &amp;Recipient</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Remove all transaction fields</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Clear &amp;All</source> <translation>&amp;Notīrīt visu</translation> </message> <message> <location line="+24"/> <source>Balance:</source> <translation>Bilance:</translation> </message> <message> <location line="+16"/> <source>123.456 LOC2</source> <translation type="unfinished"/> </message> <message> <location line="+31"/> <source>Confirm the send action</source> <translation>Apstiprināt nosūtīšanu</translation> </message> <message> <location line="+3"/> <source>S&amp;end</source> <translation type="unfinished"/> </message> <message> <location filename="../sendLoco'sdialog.cpp" line="-173"/> <source>Enter a Loco address (e.g. 6834Nm8iKGE18CT1jtM85zqS19ycjzMJ2D)</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>Copy quantity</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>Kopēt daudzumu</translation> </message> <message> <location line="+1"/> <source>Copy fee</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy after fee</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy bytes</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy priority</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy low output</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy change</source> <translation type="unfinished"/> </message> <message> <location line="+86"/> <source>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Confirm send Loco's</source> <translation>Apstiprināt bitkoinu sūtīšanu</translation> </message> <message> <location line="+1"/> <source>Are you sure you want to send %1?</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source> and </source> <translation type="unfinished"/> </message> <message> <location line="+29"/> <source>The recipient address is not valid, please recheck.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>The amount to pay must be larger than 0.</source> <translation>Nosūtāmajai summai jābūt lielākai par 0.</translation> </message> <message> <location line="+5"/> <source>The amount exceeds your balance.</source> <translation>Daudzums pārsniedz pieejamo.</translation> </message> <message> <location line="+5"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation>Kopsumma pārsniedz pieejamo, ja pieskaitīta %1 transakcijas maksa.</translation> </message> <message> <location line="+6"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation>Atrastas divas vienādas adreses, vienā nosūtīšanas reizē uz katru adresi var sūtīt tikai vienreiz.</translation> </message> <message> <location line="+5"/> <source>Error: Transaction creation failed!</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Error: The transaction was rejected. This might happen if some of the Loco's in your wallet were already spent, such as if you used a copy of wallet.dat and Loco's were spent in the copy but not marked as spent here.</source> <translation type="unfinished"/> </message> <message> <location line="+247"/> <source>WARNING: Invalid Loco address</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>(no label)</source> <translation>(bez nosaukuma)</translation> </message> <message> <location line="+4"/> <source>WARNING: unknown change address</source> <translation type="unfinished"/> </message> </context> <context> <name>SendLoco'sEntry</name> <message> <location filename="../forms/sendLoco'sentry.ui" line="+14"/> <source>Form</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>A&amp;mount:</source> <translation>Apjo&amp;ms</translation> </message> <message> <location line="+13"/> <source>Pay &amp;To:</source> <translation>&amp;Saņēmējs:</translation> </message> <message> <location line="+34"/> <source>The address to send the payment to (e.g. 6834Nm8iKGE18CT1jtM85zqS19ycjzMJ2D)</source> <translation type="unfinished"/> </message> <message> <location line="+60"/> <location filename="../sendLoco'sentry.cpp" line="+26"/> <source>Enter a label for this address to add it to your address book</source> <translation>Lai pievienotu adresi adrešu grāmatai, tai jādod nosaukums</translation> </message> <message> <location line="-78"/> <source>&amp;Label:</source> <translation>&amp;Nosaukums:</translation> </message> <message> <location line="+28"/> <source>Choose address from address book</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="+7"/> <source>Paste address from clipboard</source> <translation>ielīmēt adresi no starpliktuves</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+7"/> <source>Remove this recipient</source> <translation type="unfinished"/> </message> <message> <location filename="../sendLoco'sentry.cpp" line="+1"/> <source>Enter a Loco address (e.g. 6834Nm8iKGE18CT1jtM85zqS19ycjzMJ2D)</source> <translation type="unfinished"/> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <location filename="../forms/signverifymessagedialog.ui" line="+14"/> <source>Signatures - Sign / Verify a Message</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <location line="+124"/> <source>&amp;Sign Message</source> <translation type="unfinished"/> </message> <message> <location line="-118"/> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation type="unfinished"/> </message> <message> <location line="+18"/> <source>The address to sign the message with (e.g. 6834Nm8iKGE18CT1jtM85zqS19ycjzMJ2D)</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <location line="+203"/> <source>Choose an address from the address book</source> <translation type="unfinished"/> </message> <message> <location line="-193"/> <location line="+203"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="-193"/> <source>Paste address from clipboard</source> <translation>ielīmēt adresi no starpliktuves</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+12"/> <source>Enter the message you want to sign here</source> <translation type="unfinished"/> </message> <message> <location line="+24"/> <source>Copy the current signature to the system clipboard</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>Sign the message to prove you own this Loco address</source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <source>Reset all sign message fields</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <location line="+146"/> <source>Clear &amp;All</source> <translation>&amp;Notīrīt visu</translation> </message> <message> <location line="-87"/> <location line="+70"/> <source>&amp;Verify Message</source> <translation type="unfinished"/> </message> <message> <location line="-64"/> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>The address the message was signed with (e.g. 6834Nm8iKGE18CT1jtM85zqS19ycjzMJ2D)</source> <translation type="unfinished"/> </message> <message> <location line="+40"/> <source>Verify the message to ensure it was signed with the specified Loco address</source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <source>Reset all verify message fields</source> <translation type="unfinished"/> </message> <message> <location filename="../signverifymessagedialog.cpp" line="+27"/> <location line="+3"/> <source>Enter a Loco address (e.g. 6834Nm8iKGE18CT1jtM85zqS19ycjzMJ2D)</source> <translation type="unfinished"/> </message> <message> <location line="-2"/> <source>Click &quot;Sign Message&quot; to generate signature</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Enter Loco signature</source> <translation type="unfinished"/> </message> <message> <location line="+82"/> <location line="+81"/> <source>The entered address is invalid.</source> <translation type="unfinished"/> </message> <message> <location line="-81"/> <location line="+8"/> <location line="+73"/> <location line="+8"/> <source>Please check the address and try again.</source> <translation type="unfinished"/> </message> <message> <location line="-81"/> <location line="+81"/> <source>The entered address does not refer to a key.</source> <translation type="unfinished"/> </message> <message> <location line="-73"/> <source>Wallet unlock was cancelled.</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Private key for the entered address is not available.</source> <translation type="unfinished"/> </message> <message> <location line="+12"/> <source>Message signing failed.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Message signed.</source> <translation type="unfinished"/> </message> <message> <location line="+59"/> <source>The signature could not be decoded.</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <location line="+13"/> <source>Please check the signature and try again.</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>The signature did not match the message digest.</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Message verification failed.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Message verified.</source> <translation type="unfinished"/> </message> </context> <context> <name>TrafficGraphWidget</name> <message> <location filename="../trafficgraphwidget.cpp" line="+75"/> <source>KB/s</source> <translation type="unfinished"/> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="+25"/> <source>Open until %1</source> <translation>Atvērts līdz %1</translation> </message> <message> <location line="+6"/> <source>conflicted</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>%1/offline</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>%1/unconfirmed</source> <translation>%1/neapstiprinātas</translation> </message> <message> <location line="+2"/> <source>%1 confirmations</source> <translation>%1 apstiprinājumu</translation> </message> <message> <location line="+17"/> <source>Status</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="+7"/> <source>, broadcast through %n node(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+4"/> <source>Date</source> <translation>Datums</translation> </message> <message> <location line="+7"/> <source>Source</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Generated</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <location line="+17"/> <source>From</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <location line="+22"/> <location line="+58"/> <source>To</source> <translation type="unfinished"/> </message> <message> <location line="-77"/> <location line="+2"/> <source>own address</source> <translation type="unfinished"/> </message> <message> <location line="-2"/> <source>label</source> <translation type="unfinished"/> </message> <message> <location line="+37"/> <location line="+12"/> <location line="+45"/> <location line="+17"/> <location line="+30"/> <source>Credit</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="-102"/> <source>matures in %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+2"/> <source>not accepted</source> <translation type="unfinished"/> </message> <message> <location line="+44"/> <location line="+8"/> <location line="+15"/> <location line="+30"/> <source>Debit</source> <translation type="unfinished"/> </message> <message> <location line="-39"/> <source>Transaction fee</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Net amount</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Message</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Comment</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Transaction ID</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Generated Loco's must mature 510 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Debug information</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Transaction</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Inputs</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>Amount</source> <translation>Daudzums</translation> </message> <message> <location line="+1"/> <source>true</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>false</source> <translation type="unfinished"/> </message> <message> <location line="-209"/> <source>, has not been successfully broadcast yet</source> <translation>, vēl nav veiksmīgi izziņots</translation> </message> <message numerus="yes"> <location line="-36"/> <source>Open for %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+71"/> <source>unknown</source> <translation>nav zināms</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="+14"/> <source>Transaction details</source> <translation>Transakcijas detaļas</translation> </message> <message> <location line="+6"/> <source>This pane shows a detailed description of the transaction</source> <translation>Šis panelis parāda transakcijas detaļas</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="+231"/> <source>Date</source> <translation>Datums</translation> </message> <message> <location line="+0"/> <source>Type</source> <translation>Tips</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Adrese</translation> </message> <message> <location line="+0"/> <source>Amount</source> <translation>Daudzums</translation> </message> <message> <location line="+52"/> <source>Open until %1</source> <translation>Atvērts līdz %1</translation> </message> <message> <location line="+12"/> <source>Confirmed (%1 confirmations)</source> <translation>Apstiprināts (%1 apstiprinājumu)</translation> </message> <message numerus="yes"> <location line="-15"/> <source>Open for %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+6"/> <source>Offline</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Unconfirmed</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Confirming (%1 of %2 recommended confirmations)</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Conflicted</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Immature (%1 confirmations, will be available after %2)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Neviens cits mezgls šo bloku nav saņēmis un droši vien netiks akceptēts!</translation> </message> <message> <location line="+3"/> <source>Generated but not accepted</source> <translation>Ģenerēts, taču nav akceptēts</translation> </message> <message> <location line="+42"/> <source>Received with</source> <translation>Saņemts ar</translation> </message> <message> <location line="+2"/> <source>Received from</source> <translation>Saņemts no</translation> </message> <message> <location line="+3"/> <source>Sent to</source> <translation>Nosūtīts</translation> </message> <message> <location line="+2"/> <source>Payment to yourself</source> <translation>Maksājums sev</translation> </message> <message> <location line="+2"/> <source>Mined</source> <translation>Atrasts</translation> </message> <message> <location line="+38"/> <source>(n/a)</source> <translation>(nav pieejams)</translation> </message> <message> <location line="+194"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Transakcijas statuss. Turiet peli virs šī lauka, lai redzētu apstiprinājumu skaitu.</translation> </message> <message> <location line="+2"/> <source>Date and time that the transaction was received.</source> <translation>Transakcijas saņemšanas datums un laiks.</translation> </message> <message> <location line="+2"/> <source>Type of transaction.</source> <translation>Transakcijas tips.</translation> </message> <message> <location line="+2"/> <source>Destination address of transaction.</source> <translation>Transakcijas mērķa adrese.</translation> </message> <message> <location line="+2"/> <source>Amount removed from or added to balance.</source> <translation>Bilancei pievienotais vai atņemtais daudzums.</translation> </message> </context> <context> <name>TransactionView</name> <message> <location filename="../transactionview.cpp" line="+54"/> <location line="+17"/> <source>All</source> <translation>Visi</translation> </message> <message> <location line="-16"/> <source>Today</source> <translation>Šodien</translation> </message> <message> <location line="+1"/> <source>This week</source> <translation>Šonedēļ</translation> </message> <message> <location line="+1"/> <source>This month</source> <translation>Šomēnes</translation> </message> <message> <location line="+1"/> <source>Last month</source> <translation>Pēdējais mēnesis</translation> </message> <message> <location line="+1"/> <source>This year</source> <translation>Šogad</translation> </message> <message> <location line="+1"/> <source>Range...</source> <translation>Diapazons...</translation> </message> <message> <location line="+12"/> <source>Received with</source> <translation>Saņemts ar</translation> </message> <message> <location line="+2"/> <source>Sent to</source> <translation>Nosūtīts</translation> </message> <message> <location line="+2"/> <source>To yourself</source> <translation>Sev</translation> </message> <message> <location line="+1"/> <source>Mined</source> <translation>Atrasts</translation> </message> <message> <location line="+1"/> <source>Other</source> <translation>Cits</translation> </message> <message> <location line="+7"/> <source>Enter address or label to search</source> <translation>Ierakstiet meklējamo nosaukumu vai adresi</translation> </message> <message> <location line="+7"/> <source>Min amount</source> <translation>Minimālais daudzums</translation> </message> <message> <location line="+34"/> <source>Copy address</source> <translation>Kopēt adresi</translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>Kopēt nosaukumu</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>Kopēt daudzumu</translation> </message> <message> <location line="+1"/> <source>Copy transaction ID</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Edit label</source> <translation>Mainīt nosaukumu</translation> </message> <message> <location line="+1"/> <source>Show transaction details</source> <translation>Rādīt transakcijas detaļas</translation> </message> <message> <location line="+138"/> <source>Export Transaction Data</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Fails ar komatu kā atdalītāju (*.csv)</translation> </message> <message> <location line="+8"/> <source>Confirmed</source> <translation>Apstiprināts</translation> </message> <message> <location line="+1"/> <source>Date</source> <translation>Datums</translation> </message> <message> <location line="+1"/> <source>Type</source> <translation>Tips</translation> </message> <message> <location line="+1"/> <source>Label</source> <translation>Nosaukums</translation> </message> <message> <location line="+1"/> <source>Address</source> <translation>Adrese</translation> </message> <message> <location line="+1"/> <source>Amount</source> <translation>Daudzums</translation> </message> <message> <location line="+1"/> <source>ID</source> <translation>ID</translation> </message> <message> <location line="+4"/> <source>Error exporting</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation type="unfinished"/> </message> <message> <location line="+100"/> <source>Range:</source> <translation>Diapazons:</translation> </message> <message> <location line="+8"/> <source>to</source> <translation>uz</translation> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="+208"/> <source>Sending...</source> <translation type="unfinished"/> </message> </context> <context> <name>bitcoin-core</name> <message> <location filename="../bitLoco'strings.cpp" line="+173"/> <source>Loco version</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Usage:</source> <translation>Lietojums:</translation> </message> <message> <location line="+1"/> <source>Send command to -server or locod</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>List commands</source> <translation>Komandu saraksts</translation> </message> <message> <location line="+1"/> <source>Get help for a command</source> <translation>Palīdzība par komandu</translation> </message> <message> <location line="-147"/> <source>Options:</source> <translation>Iespējas:</translation> </message> <message> <location line="+2"/> <source>Specify configuration file (default: loco.conf)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Specify pid file (default: locod.pid)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Specify wallet file (within data directory)</source> <translation type="unfinished"/> </message> <message> <location line="-1"/> <source>Specify data directory</source> <translation>Norādiet datu direktoriju</translation> </message> <message> <location line="-25"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=locorpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;Loco Alert&quot; [email protected] </source> <translation type="unfinished"/> </message> <message> <location line="+27"/> <source>Set database cache size in megabytes (default: 25)</source> <translation>Uzstādiet datu bāzes bufera izmēru megabaitos (pēc noklusēšanas: 25)</translation> </message> <message> <location line="+1"/> <source>Set database disk log size in megabytes (default: 100)</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Listen for connections on &lt;port&gt; (default: 35575 or testnet: 44575)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation>Uzturēt līdz &lt;n&gt; savienojumiem ar citiem mezgliem(pēc noklusēšanas: 125)</translation> </message> <message> <location line="+3"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation>Pievienoties mezglam, lai iegūtu citu mezglu adreses, un atvienoties</translation> </message> <message> <location line="+1"/> <source>Specify your own public address</source> <translation>Norādiet savu publisko adresi</translation> </message> <message> <location line="+4"/> <source>Bind to given address. Use [host]:port notation for IPv6</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Query for peer addresses via DNS lookup, if low on addresses (default: 1 unless -connect)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Always query for peer addresses via DNS lookup (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation>Slieksnis pārkāpējmezglu atvienošanai (pēc noklusēšanas: 100)</translation> </message> <message> <location line="+1"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation>Sekundes, cik ilgi atturēt pārkāpējmezglus no atkārtotas pievienošanās (pēc noklusēšanas: 86400)</translation> </message> <message> <location line="-37"/> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation type="unfinished"/> </message> <message> <location line="+65"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 35574 or testnet: 44574)</source> <translation type="unfinished"/> </message> <message> <location line="-17"/> <source>Accept command line and JSON-RPC commands</source> <translation>Pieņemt komandrindas un JSON-RPC komandas</translation> </message> <message> <location line="+1"/> <source>Run in the background as a daemon and accept commands</source> <translation>Darbināt fonā kā servisu un pieņemt komandas</translation> </message> <message> <location line="+1"/> <source>Use the test network</source> <translation>Izmantot testa tīklu</translation> </message> <message> <location line="-24"/> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation type="unfinished"/> </message> <message> <location line="-29"/> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation type="unfinished"/> </message> <message> <location line="+96"/> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source> <translation type="unfinished"/> </message> <message> <location line="+12"/> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation type="unfinished"/> </message> <message> <location line="-103"/> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong Loco will not work properly.</source> <translation type="unfinished"/> </message> <message> <location line="+132"/> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation type="unfinished"/> </message> <message> <location line="-18"/> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation type="unfinished"/> </message> <message> <location line="-31"/> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Block creation options:</source> <translation type="unfinished"/> </message> <message> <location line="-69"/> <source>Connect only to the specified node(s)</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation type="unfinished"/> </message> <message> <location line="+101"/> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation type="unfinished"/> </message> <message> <location line="-91"/> <source>Sync checkpoints policy (default: strict)</source> <translation type="unfinished"/> </message> <message> <location line="+89"/> <source>Invalid -tor address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Invalid amount for -reservebalance=&lt;amount&gt;</source> <translation type="unfinished"/> </message> <message> <location line="-88"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation type="unfinished"/> </message> <message> <location line="-17"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation type="unfinished"/> </message> <message> <location line="+31"/> <source>Prepend debug output with timestamp</source> <translation type="unfinished"/> </message> <message> <location line="+41"/> <source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source> <translation type="unfinished"/> </message> <message> <location line="-81"/> <source>Select the version of socks proxy to use (4-5, default: 5)</source> <translation type="unfinished"/> </message> <message> <location line="+42"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation>Debug/trace informāciju izvadīt konsolē, nevis debug.log failā</translation> </message> <message> <location line="+5"/> <source>Send trace/debug info to debugger</source> <translation type="unfinished"/> </message> <message> <location line="+30"/> <source>Set maximum block size in bytes (default: 250000)</source> <translation type="unfinished"/> </message> <message> <location line="-1"/> <source>Set minimum block size in bytes (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="-35"/> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation type="unfinished"/> </message> <message> <location line="-43"/> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation type="unfinished"/> </message> <message> <location line="+116"/> <source>Unable to sign checkpoint, wrong checkpointkey? </source> <translation type="unfinished"/> </message> <message> <location line="-86"/> <source>Use UPnP to map the listening port (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="-1"/> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation type="unfinished"/> </message> <message> <location line="-26"/> <source>Use proxy to reach tor hidden services (default: same as -proxy)</source> <translation type="unfinished"/> </message> <message> <location line="+47"/> <source>Username for JSON-RPC connections</source> <translation>JSON-RPC savienojumu lietotājvārds</translation> </message> <message> <location line="+51"/> <source>Verifying database integrity...</source> <translation type="unfinished"/> </message> <message> <location line="+44"/> <source>Error: Wallet locked, unable to create transaction!</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Error: Transaction creation failed!</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Error: The transaction was rejected! This might happen if some of the Loco's in your wallet were already spent, such as if you used a copy of wallet.dat and Loco's were spent in the copy but not marked as spent here.</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Warning</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Information</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>WARNING: syncronized checkpoint violation detected, but skipped!</source> <translation type="unfinished"/> </message> <message> <location line="-1"/> <source>Warning: This version is obsolete, upgrade required!</source> <translation type="unfinished"/> </message> <message> <location line="-54"/> <source>wallet.dat corrupt, salvage failed</source> <translation type="unfinished"/> </message> <message> <location line="-56"/> <source>Password for JSON-RPC connections</source> <translation>JSON-RPC savienojumu parole</translation> </message> <message> <location line="-32"/> <source>Sync time with other nodes. Disable if time on your system is precise e.g. syncing with NTP (default: 1)</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>When creating transactions, ignore inputs with value less than this (default: 0.01)</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Output debugging information (default: 0, supplying &lt;category&gt; is optional)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>If &lt;category&gt; is not supplied, output all debugging information.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>&lt;category&gt; can be:</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Enter regression test mode, which uses a special chain in which blocks can be solved instantly. This is intended for regression testing tools and app development.</source> <translation type="unfinished"/> </message> <message> <location line="+9"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation>Atļaut JSON-RPC savienojumus no norādītās IP adreses</translation> </message> <message> <location line="+1"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation>Nosūtīt komandas mezglam, kas darbojas adresē &lt;ip&gt; (pēc noklusēšanas: 127.0.0.1)</translation> </message> <message> <location line="+1"/> <source>Wait for RPC server to start</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Set the number of threads to service RPC calls (default: 4)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation>Izpildīt komandu, kad labāk atbilstošais bloks izmainās (%s cmd aizvieto ar bloka hešu)</translation> </message> <message> <location line="+3"/> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Require a confirmations for change (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Upgrade wallet to latest format</source> <translation>Atjaunot maciņa formātu uz jaunāko</translation> </message> <message> <location line="+1"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation>Uzstādīt atslēgu bufera izmēru uz &lt;n&gt; (pēc noklusēšanas: 100)</translation> </message> <message> <location line="+1"/> <source>Rescan the block chain for missing wallet transactions</source> <translation>Atkārtoti skanēt bloku virkni, meklējot trūkstošās maciņa transakcijas</translation> </message> <message> <location line="+3"/> <source>How thorough the block verification is (0-6, default: 1)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Imports blocks from external blk000?.dat file</source> <translation type="unfinished"/> </message> <message> <location line="+9"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>JSON-RPC savienojumiem izmantot OpenSSL (https)</translation> </message> <message> <location line="+1"/> <source>Server certificate file (default: server.cert)</source> <translation>Servera sertifikāta fails (pēc noklusēšanas: server.cert)</translation> </message> <message> <location line="+1"/> <source>Server private key (default: server.pem)</source> <translation>Servera privātā atslēga (pēc noklusēšanas: server.pem)</translation> </message> <message> <location line="+10"/> <source>Initialization sanity check failed. Loco is shutting down.</source> <translation type="unfinished"/> </message> <message> <location line="+50"/> <source>Error: Wallet unlocked for staking only, unable to create transaction.</source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <source>Error: Disk space is low!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>WARNING: Invalid checkpoint found! Displayed transactions may not be correct! You may need to upgrade, or notify developers.</source> <translation type="unfinished"/> </message> <message> <location line="-174"/> <source>This help message</source> <translation>Šis palīdzības paziņojums</translation> </message> <message> <location line="+104"/> <source>Wallet %s resides outside data directory %s.</source> <translation type="unfinished"/> </message> <message> <location line="+37"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation>Nevar pievienoties pie %s šajā datorā (pievienošanās atgrieza kļūdu %d, %s)</translation> </message> <message> <location line="-133"/> <source>Connect through socks proxy</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation>Atļaut DNS uzmeklēšanu priekš -addnode, -seednode un -connect</translation> </message> <message> <location line="+126"/> <source>Loading addresses...</source> <translation>Ielādē adreses...</translation> </message> <message> <location line="-12"/> <source>Error loading blkindex.dat</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>Nevar ielādēt wallet.dat: maciņš bojāts</translation> </message> <message> <location line="+4"/> <source>Error loading wallet.dat: Wallet requires newer version of Loco</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Wallet needed to be rewritten: restart Loco to complete</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error loading wallet.dat</source> <translation>Kļūda ielādējot wallet.dat</translation> </message> <message> <location line="-16"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation>Nederīga -proxy adrese: &apos;%s&apos;</translation> </message> <message> <location line="-1"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation>-onlynet komandā norādīts nepazīstams tīkls: &apos;%s&apos;</translation> </message> <message> <location line="-1"/> <source>Unknown -socks proxy version requested: %i</source> <translation>Pieprasīta nezināma -socks proxy versija: %i</translation> </message> <message> <location line="+4"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation>Nevar uzmeklēt -bind adresi: &apos;%s&apos;</translation> </message> <message> <location line="+2"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation>Nevar atrisināt -externalip adresi: &apos;%s&apos;</translation> </message> <message> <location line="-23"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Nederīgs daudzums priekš -paytxfree=&lt;amount&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+60"/> <source>Sending...</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Invalid amount</source> <translation>Nederīgs daudzums</translation> </message> <message> <location line="+1"/> <source>Insufficient funds</source> <translation>Nepietiek bitkoinu</translation> </message> <message> <location line="-40"/> <source>Loading block index...</source> <translation>Ielādē bloku indeksu...</translation> </message> <message> <location line="-110"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>Pievienot mezglu, kam pievienoties un turēt savienojumu atvērtu</translation> </message> <message> <location line="+125"/> <source>Unable to bind to %s on this computer. Loco is probably already running.</source> <translation type="unfinished"/> </message> <message> <location line="-101"/> <source>Fee per KB to add to transactions you send</source> <translation type="unfinished"/> </message> <message> <location line="+34"/> <source>Minimize weight consumption (experimental) (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>How many blocks to check at startup (default: 500, 0 = all)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Keep at most &lt;n&gt; unconnectable blocks in memory (default: %u)</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>Acceptable ciphers (default: TLSv1.2+HIGH:TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!3DES:@STRENGTH)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning: Deprecated argument -debugnet ignored, use -debug=net</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Invalid amount for -mininput=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Cannot obtain a lock on data directory %s. Loco is probably already running.</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Error initializing wallet database environment %s!</source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <source>Loading wallet...</source> <translation>Ielādē maciņu...</translation> </message> <message> <location line="+8"/> <source>Cannot downgrade wallet</source> <translation>Nevar maciņa formātu padarīt vecāku</translation> </message> <message> <location line="+1"/> <source>Cannot write default address</source> <translation>Nevar ierakstīt adresi pēc noklusēšanas</translation> </message> <message> <location line="+1"/> <source>Rescanning...</source> <translation>Skanēju no jauna...</translation> </message> <message> <location line="+2"/> <source>Done loading</source> <translation>Ielāde pabeigta</translation> </message> <message> <location line="-161"/> <source>To use the %s option</source> <translation>Izmantot opciju %s</translation> </message> <message> <location line="+200"/> <source>Error</source> <translation>Kļūda</translation> </message> <message> <location line="-18"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation>Konfigurācijas failā jāuzstāda rpcpassword=&lt;password&gt;: %s Ja fails neeksistē, izveidojiet to ar atļauju lasīšanai tikai īpašniekam.</translation> </message> </context> </TS><|fim▁end|>
<source>&amp;Save As...</source> <translation type="unfinished"/> </message>
<|file_name|>mouseevent.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::bindings::codegen::Bindings::MouseEventBinding; use dom::bindings::codegen::Bindings::MouseEventBinding::MouseEventMethods; use dom::bindings::codegen::Bindings::UIEventBinding::UIEventMethods; use dom::bindings::codegen::InheritTypes::{EventCast, UIEventCast, MouseEventDerived}; use dom::bindings::error::Fallible; use dom::bindings::global::GlobalRef; use dom::bindings::js::{MutNullableJS, JSRef, RootedReference, Temporary, OptionalSettable}; use dom::bindings::utils::reflect_dom_object; use dom::event::{Event, EventTypeId}; use dom::eventtarget::EventTarget; use dom::uievent::UIEvent; use dom::window::Window; use servo_util::str::DOMString; use std::cell::Cell; use std::default::Default; #[dom_struct] pub struct MouseEvent { uievent: UIEvent, screen_x: Cell<i32>, screen_y: Cell<i32>, client_x: Cell<i32>, client_y: Cell<i32>, ctrl_key: Cell<bool>, shift_key: Cell<bool>, alt_key: Cell<bool>, meta_key: Cell<bool>, button: Cell<i16>, related_target: MutNullableJS<EventTarget> } impl MouseEventDerived for Event { fn is_mouseevent(&self) -> bool { *self.type_id() == EventTypeId::MouseEvent } } impl MouseEvent { fn new_inherited() -> MouseEvent { MouseEvent { uievent: UIEvent::new_inherited(EventTypeId::MouseEvent), screen_x: Cell::new(0), screen_y: Cell::new(0), client_x: Cell::new(0), client_y: Cell::new(0), ctrl_key: Cell::new(false), shift_key: Cell::new(false), alt_key: Cell::new(false), meta_key: Cell::new(false), button: Cell::new(0), related_target: Default::default(), } } pub fn new_uninitialized(window: JSRef<Window>) -> Temporary<MouseEvent> { reflect_dom_object(box MouseEvent::new_inherited(), GlobalRef::Window(window), MouseEventBinding::Wrap) } pub fn new(window: JSRef<Window>, type_: DOMString, canBubble: bool, cancelable: bool, view: Option<JSRef<Window>>, detail: i32, screenX: i32, screenY: i32, clientX: i32, clientY: i32, ctrlKey: bool, altKey: bool, shiftKey: bool, metaKey: bool, button: i16, relatedTarget: Option<JSRef<EventTarget>>) -> Temporary<MouseEvent> { let ev = MouseEvent::new_uninitialized(window).root(); ev.r().InitMouseEvent(type_, canBubble, cancelable, view, detail, screenX, screenY, clientX, clientY, ctrlKey, altKey, shiftKey, metaKey, button, relatedTarget); Temporary::from_rooted(ev.r()) } pub fn Constructor(global: GlobalRef, type_: DOMString, init: &MouseEventBinding::MouseEventInit) -> Fallible<Temporary<MouseEvent>> { let event = MouseEvent::new(global.as_window(), type_, init.parent.parent.parent.bubbles, init.parent.parent.parent.cancelable, init.parent.parent.view.r(), init.parent.parent.detail, init.screenX, init.screenY, init.clientX, init.clientY, init.parent.ctrlKey, init.parent.altKey, init.parent.shiftKey, init.parent.metaKey, init.button, init.relatedTarget.r()); Ok(event) } } impl<'a> MouseEventMethods for JSRef<'a, MouseEvent> { fn ScreenX(self) -> i32 { self.screen_x.get() } fn ScreenY(self) -> i32 { self.screen_y.get() } fn ClientX(self) -> i32 { self.client_x.get() } fn ClientY(self) -> i32 { self.client_y.get() } fn CtrlKey(self) -> bool { self.ctrl_key.get() } fn ShiftKey(self) -> bool { self.shift_key.get() } fn AltKey(self) -> bool { self.alt_key.get() } fn MetaKey(self) -> bool { self.meta_key.get() } fn Button(self) -> i16 { self.button.get() } fn GetRelatedTarget(self) -> Option<Temporary<EventTarget>> { self.related_target.get() } fn InitMouseEvent(self, typeArg: DOMString, canBubbleArg: bool, cancelableArg: bool, viewArg: Option<JSRef<Window>>, detailArg: i32, screenXArg: i32, screenYArg: i32, clientXArg: i32, clientYArg: i32, ctrlKeyArg: bool, altKeyArg: bool, shiftKeyArg: bool, metaKeyArg: bool, buttonArg: i16, relatedTargetArg: Option<JSRef<EventTarget>>) { let event: JSRef<Event> = EventCast::from_ref(self); if event.dispatching() { return; } let uievent: JSRef<UIEvent> = UIEventCast::from_ref(self); uievent.InitUIEvent(typeArg, canBubbleArg, cancelableArg, viewArg, detailArg); self.screen_x.set(screenXArg); self.screen_y.set(screenYArg); self.client_x.set(clientXArg); self.client_y.set(clientYArg); self.ctrl_key.set(ctrlKeyArg); self.alt_key.set(altKeyArg);<|fim▁hole|> self.related_target.assign(relatedTargetArg); } }<|fim▁end|>
self.shift_key.set(shiftKeyArg); self.meta_key.set(metaKeyArg); self.button.set(buttonArg);
<|file_name|>300 - Maya Calendar.cpp<|end_file_name|><|fim▁begin|>#include <stdio.h> #include <string.h> char haab[19][10] = { "pop", "no", "zip", "zotz", "tzec", "xul", "yoxkin", "mol", "chen", "yax", "zac", "ceh", "mac", "kankin", "muan", "pax", "koyab", "cumhu", "uayet" }; char tzolkin[20][10] = { "imix", "ik", "akbal", "kan", "chicchan", "cimi", "manik", "lamat", "muluk", "ok", "chuen", "eb", "ben", "ix", "mem", "cib", "caban", "eznab", "canac", "ahau" }; int main(void) { int n; int d, m, y; int sum; char month[10]; scanf("%d", &n); printf("%d\n", n); while (n--) { scanf(" %d. %s %d", &d, month, &y); for (int i = 0; i < 19; i++) if (!strcmp(month, haab[i])) { m = i; break; } <|fim▁hole|> return 0; }<|fim▁end|>
sum = y*365 + m*20 + d; printf("%d %s %d\n", sum%260%13+1, tzolkin[sum%260%20], sum/260); }
<|file_name|>vec.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use test::Bencher; use std::iter::{FromIterator, repeat}; #[bench] fn bench_new(b: &mut Bencher) { b.iter(|| { let v: Vec<u32> = Vec::new(); assert_eq!(v.len(), 0); assert_eq!(v.capacity(), 0); }) } fn do_bench_with_capacity(b: &mut Bencher, src_len: usize) { b.bytes = src_len as u64; b.iter(|| { let v: Vec<u32> = Vec::with_capacity(src_len); assert_eq!(v.len(), 0); assert_eq!(v.capacity(), src_len); }) } #[bench] fn bench_with_capacity_0000(b: &mut Bencher) { do_bench_with_capacity(b, 0) } #[bench] fn bench_with_capacity_0010(b: &mut Bencher) { do_bench_with_capacity(b, 10) } #[bench] fn bench_with_capacity_0100(b: &mut Bencher) { do_bench_with_capacity(b, 100) } #[bench] fn bench_with_capacity_1000(b: &mut Bencher) { do_bench_with_capacity(b, 1000) } fn do_bench_from_fn(b: &mut Bencher, src_len: usize) { b.bytes = src_len as u64; b.iter(|| { let dst = (0..src_len).collect::<Vec<_>>(); assert_eq!(dst.len(), src_len); assert!(dst.iter().enumerate().all(|(i, x)| i == *x)); }) } #[bench] fn bench_from_fn_0000(b: &mut Bencher) { do_bench_from_fn(b, 0) } #[bench] fn bench_from_fn_0010(b: &mut Bencher) { do_bench_from_fn(b, 10) } #[bench] fn bench_from_fn_0100(b: &mut Bencher) { do_bench_from_fn(b, 100) } #[bench] fn bench_from_fn_1000(b: &mut Bencher) { do_bench_from_fn(b, 1000) } fn do_bench_from_elem(b: &mut Bencher, src_len: usize) { b.bytes = src_len as u64; b.iter(|| { let dst: Vec<usize> = repeat(5).take(src_len).collect(); assert_eq!(dst.len(), src_len); assert!(dst.iter().all(|x| *x == 5)); }) } #[bench] fn bench_from_elem_0000(b: &mut Bencher) { do_bench_from_elem(b, 0) } #[bench] fn bench_from_elem_0010(b: &mut Bencher) { do_bench_from_elem(b, 10) } #[bench] fn bench_from_elem_0100(b: &mut Bencher) { do_bench_from_elem(b, 100) } #[bench] fn bench_from_elem_1000(b: &mut Bencher) { do_bench_from_elem(b, 1000) } fn do_bench_from_slice(b: &mut Bencher, src_len: usize) { let src: Vec<_> = FromIterator::from_iter(0..src_len); b.bytes = src_len as u64; b.iter(|| { let dst = src.clone()[..].to_vec(); assert_eq!(dst.len(), src_len); assert!(dst.iter().enumerate().all(|(i, x)| i == *x)); }); } #[bench] fn bench_from_slice_0000(b: &mut Bencher) { do_bench_from_slice(b, 0) } #[bench] fn bench_from_slice_0010(b: &mut Bencher) { do_bench_from_slice(b, 10) } #[bench] fn bench_from_slice_0100(b: &mut Bencher) { do_bench_from_slice(b, 100) } #[bench] fn bench_from_slice_1000(b: &mut Bencher) { do_bench_from_slice(b, 1000) } fn do_bench_from_iter(b: &mut Bencher, src_len: usize) { let src: Vec<_> = FromIterator::from_iter(0..src_len); b.bytes = src_len as u64; b.iter(|| { let dst: Vec<_> = FromIterator::from_iter(src.clone()); assert_eq!(dst.len(), src_len); assert!(dst.iter().enumerate().all(|(i, x)| i == *x)); }); } #[bench] fn bench_from_iter_0000(b: &mut Bencher) { do_bench_from_iter(b, 0) } #[bench] fn bench_from_iter_0010(b: &mut Bencher) { do_bench_from_iter(b, 10) } #[bench] fn bench_from_iter_0100(b: &mut Bencher) { do_bench_from_iter(b, 100) } #[bench] fn bench_from_iter_1000(b: &mut Bencher) { do_bench_from_iter(b, 1000) } fn do_bench_extend(b: &mut Bencher, dst_len: usize, src_len: usize) { let dst: Vec<_> = FromIterator::from_iter(0..dst_len); let src: Vec<_> = FromIterator::from_iter(dst_len..dst_len + src_len); b.bytes = src_len as u64; b.iter(|| { let mut dst = dst.clone(); dst.extend(src.clone()); assert_eq!(dst.len(), dst_len + src_len); assert!(dst.iter().enumerate().all(|(i, x)| i == *x)); }); } #[bench] fn bench_extend_0000_0000(b: &mut Bencher) { do_bench_extend(b, 0, 0) } #[bench] fn bench_extend_0000_0010(b: &mut Bencher) { do_bench_extend(b, 0, 10) } #[bench] fn bench_extend_0000_0100(b: &mut Bencher) { do_bench_extend(b, 0, 100) } #[bench] fn bench_extend_0000_1000(b: &mut Bencher) { do_bench_extend(b, 0, 1000) } #[bench] fn bench_extend_0010_0010(b: &mut Bencher) { do_bench_extend(b, 10, 10) } #[bench] fn bench_extend_0100_0100(b: &mut Bencher) { do_bench_extend(b, 100, 100) } #[bench] fn bench_extend_1000_1000(b: &mut Bencher) { do_bench_extend(b, 1000, 1000) } fn do_bench_push_all(b: &mut Bencher, dst_len: usize, src_len: usize) { let dst: Vec<_> = FromIterator::from_iter(0..dst_len); let src: Vec<_> = FromIterator::from_iter(dst_len..dst_len + src_len); b.bytes = src_len as u64; b.iter(|| { let mut dst = dst.clone(); dst.extend_from_slice(&src); assert_eq!(dst.len(), dst_len + src_len); assert!(dst.iter().enumerate().all(|(i, x)| i == *x)); }); } #[bench] fn bench_push_all_0000_0000(b: &mut Bencher) { do_bench_push_all(b, 0, 0) } #[bench] fn bench_push_all_0000_0010(b: &mut Bencher) { do_bench_push_all(b, 0, 10) } #[bench] fn bench_push_all_0000_0100(b: &mut Bencher) { do_bench_push_all(b, 0, 100) } #[bench] fn bench_push_all_0000_1000(b: &mut Bencher) { do_bench_push_all(b, 0, 1000) } #[bench] fn bench_push_all_0010_0010(b: &mut Bencher) { do_bench_push_all(b, 10, 10) } #[bench] fn bench_push_all_0100_0100(b: &mut Bencher) { do_bench_push_all(b, 100, 100) } #[bench] fn bench_push_all_1000_1000(b: &mut Bencher) { do_bench_push_all(b, 1000, 1000) } fn do_bench_push_all_move(b: &mut Bencher, dst_len: usize, src_len: usize) { let dst: Vec<_> = FromIterator::from_iter(0..dst_len); let src: Vec<_> = FromIterator::from_iter(dst_len..dst_len + src_len); b.bytes = src_len as u64; b.iter(|| { let mut dst = dst.clone(); dst.extend(src.clone()); assert_eq!(dst.len(), dst_len + src_len); assert!(dst.iter().enumerate().all(|(i, x)| i == *x)); }); } #[bench] fn bench_push_all_move_0000_0000(b: &mut Bencher) { do_bench_push_all_move(b, 0, 0) } #[bench] fn bench_push_all_move_0000_0010(b: &mut Bencher) { do_bench_push_all_move(b, 0, 10) } #[bench] fn bench_push_all_move_0000_0100(b: &mut Bencher) { do_bench_push_all_move(b, 0, 100) } #[bench] fn bench_push_all_move_0000_1000(b: &mut Bencher) { do_bench_push_all_move(b, 0, 1000) } #[bench] fn bench_push_all_move_0010_0010(b: &mut Bencher) { do_bench_push_all_move(b, 10, 10) } #[bench] fn bench_push_all_move_0100_0100(b: &mut Bencher) { do_bench_push_all_move(b, 100, 100) } #[bench] fn bench_push_all_move_1000_1000(b: &mut Bencher) { do_bench_push_all_move(b, 1000, 1000) } fn do_bench_clone(b: &mut Bencher, src_len: usize) { let src: Vec<usize> = FromIterator::from_iter(0..src_len); b.bytes = src_len as u64; b.iter(|| { let dst = src.clone(); assert_eq!(dst.len(), src_len); assert!(dst.iter().enumerate().all(|(i, x)| i == *x)); }); } #[bench] fn bench_clone_0000(b: &mut Bencher) { do_bench_clone(b, 0) } #[bench] fn bench_clone_0010(b: &mut Bencher) { do_bench_clone(b, 10) } #[bench] fn bench_clone_0100(b: &mut Bencher) { do_bench_clone(b, 100) } #[bench] fn bench_clone_1000(b: &mut Bencher) { do_bench_clone(b, 1000) } fn do_bench_clone_from(b: &mut Bencher, times: usize, dst_len: usize, src_len: usize) { let dst: Vec<_> = FromIterator::from_iter(0..src_len); let src: Vec<_> = FromIterator::from_iter(dst_len..dst_len + src_len); b.bytes = (times * src_len) as u64; b.iter(|| { let mut dst = dst.clone(); for _ in 0..times { dst.clone_from(&src); assert_eq!(dst.len(), src_len); assert!(dst.iter().enumerate().all(|(i, x)| dst_len + i == *x)); } }); } #[bench] fn bench_clone_from_01_0000_0000(b: &mut Bencher) { do_bench_clone_from(b, 1, 0, 0) } #[bench] fn bench_clone_from_01_0000_0010(b: &mut Bencher) { do_bench_clone_from(b, 1, 0, 10) } #[bench] fn bench_clone_from_01_0000_0100(b: &mut Bencher) { do_bench_clone_from(b, 1, 0, 100) } #[bench] fn bench_clone_from_01_0000_1000(b: &mut Bencher) { do_bench_clone_from(b, 1, 0, 1000) } #[bench] fn bench_clone_from_01_0010_0010(b: &mut Bencher) { do_bench_clone_from(b, 1, 10, 10) } #[bench] fn bench_clone_from_01_0100_0100(b: &mut Bencher) { do_bench_clone_from(b, 1, 100, 100) } #[bench] fn bench_clone_from_01_1000_1000(b: &mut Bencher) { do_bench_clone_from(b, 1, 1000, 1000) } #[bench] fn bench_clone_from_01_0010_0100(b: &mut Bencher) { do_bench_clone_from(b, 1, 10, 100) } #[bench] fn bench_clone_from_01_0100_1000(b: &mut Bencher) { do_bench_clone_from(b, 1, 100, 1000) } #[bench] fn bench_clone_from_01_0010_0000(b: &mut Bencher) { do_bench_clone_from(b, 1, 10, 0) } #[bench] fn bench_clone_from_01_0100_0010(b: &mut Bencher) { do_bench_clone_from(b, 1, 100, 10) } #[bench] fn bench_clone_from_01_1000_0100(b: &mut Bencher) { do_bench_clone_from(b, 1, 1000, 100) } #[bench] fn bench_clone_from_10_0000_0000(b: &mut Bencher) { do_bench_clone_from(b, 10, 0, 0) } #[bench] fn bench_clone_from_10_0000_0010(b: &mut Bencher) { do_bench_clone_from(b, 10, 0, 10) } #[bench] fn bench_clone_from_10_0000_0100(b: &mut Bencher) { do_bench_clone_from(b, 10, 0, 100) } #[bench] fn bench_clone_from_10_0000_1000(b: &mut Bencher) { do_bench_clone_from(b, 10, 0, 1000) } #[bench] fn bench_clone_from_10_0010_0010(b: &mut Bencher) { do_bench_clone_from(b, 10, 10, 10) } #[bench] fn bench_clone_from_10_0100_0100(b: &mut Bencher) { do_bench_clone_from(b, 10, 100, 100) } #[bench] fn bench_clone_from_10_1000_1000(b: &mut Bencher) { do_bench_clone_from(b, 10, 1000, 1000) } #[bench] fn bench_clone_from_10_0010_0100(b: &mut Bencher) { do_bench_clone_from(b, 10, 10, 100) } #[bench] fn bench_clone_from_10_0100_1000(b: &mut Bencher) {<|fim▁hole|>#[bench] fn bench_clone_from_10_0010_0000(b: &mut Bencher) { do_bench_clone_from(b, 10, 10, 0) } #[bench] fn bench_clone_from_10_0100_0010(b: &mut Bencher) { do_bench_clone_from(b, 10, 100, 10) } #[bench] fn bench_clone_from_10_1000_0100(b: &mut Bencher) { do_bench_clone_from(b, 10, 1000, 100) }<|fim▁end|>
do_bench_clone_from(b, 10, 100, 1000) }
<|file_name|>post-migration.py<|end_file_name|><|fim▁begin|>############################################################################## # # Copyright (C) 2020 Compassion CH (http://www.compassion.ch) # @author: Théo Nikles <[email protected]> # # The licence is in the file __manifest__.py # ############################################################################## def migrate(cr, version): if not version: return cr.execute(""" INSERT INTO mobile_app_messages(partner_id) SELECT id FROM res_partner; UPDATE res_partner p<|fim▁hole|> """)<|fim▁end|>
SET app_messages = ( SELECT id FROM mobile_app_messages WHERE partner_id = p.id );
<|file_name|>pack.py<|end_file_name|><|fim▁begin|>""" This is used to pack testlib into a json Then you can load into database by using: manage.py loaddata <fixturename> fixturename here is `testlib.json` """ import hashlib import json from os import path, listdir def hash(binary): return hashlib.sha256(binary).hexdigest() category = ['checker', 'generator', 'validator', 'validator'] father_dir = path.dirname(__file__) output_file = open(path.join(father_dir, 'testlib.json'), 'w') data = []<|fim▁hole|>for cat in category: for file in listdir(path.join(father_dir, cat)): if file.startswith('.'): continue with open(path.join(father_dir, cat, file)) as fs: code = fs.read() with open(path.join(father_dir, cat, file), 'rb') as fs: code_binary = fs.read() data.append(dict(model='problem.SpecialProgram', fields=dict( fingerprint=hash(code_binary), filename=file, code=code, lang='cpp', category=cat, builtin=True ))) json.dump(data, output_file) output_file.close()<|fim▁end|>
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>import re from django.conf.urls import url, patterns, include from django.conf import settings from django.contrib import admin from django.views.generic import TemplateView from django.template import add_to_builtins from fabrydb.admin import fadmin<|fim▁hole|> urlpatterns = patterns('', # Landing Page url(r'^$', 'fabry.views.home', name='home'), url(r'^$', 'fabrydb.views.landing', name='landing'), url(r'^accounts/login/$', 'django.contrib.auth.views.login', name='login'), url(r'^accounts/logout/$', 'django.contrib.auth.views.logout', name='logout'), # Cilantro Pages url(r'^workspace/', TemplateView.as_view(template_name='index.html'), name='workspace'), url(r'^query/', TemplateView.as_view(template_name='index.html'), name='query'), url(r'^results/', TemplateView.as_view(template_name='index.html'), name='results'), # Serrano-compatible Endpoint url(r'^api/', include('serrano.urls')), # Administrative components url(r'^admin/', include(admin.site.urls)), url(r'^fadmin/', include(fadmin.urls), name='fadmin'), ) # if not settings.DEBUG: urlpatterns += patterns( '', (r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_ROOT}), ) # In production, these two locations must be served up statically urlpatterns += patterns('django.views.static', url(r'^{0}(?P<path>.*)$'.format(re.escape(settings.MEDIA_URL.lstrip('/'))), 'serve', { 'document_root': settings.MEDIA_ROOT }), url(r'^{0}(?P<path>.*)$'.format(re.escape(settings.STATIC_URL.lstrip('/'))), 'serve', { 'document_root': settings.STATIC_ROOT }), )<|fim▁end|>
add_to_builtins('avocado.templatetags.avocado_tags') admin.autodiscover()
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* * Copyright (c) Facebook, Inc. and its affiliates. * * This software may be used and distributed according to the terms of the<|fim▁hole|> ///! Async job scheduling utilities for a blocking application ///! ///! We have a blocking application. We have async libraries. This crate provides common utilities ///! for communicating between the blocking world and the async world. It is intended to be a guide ///! so that not all developers have to get in depth understanding of Tokio in order to use async ///! functions. ///! ///! The crate sets up a common Runtime that all async tasks run on. We use a threaded scheduler ///! which enables parallelism. The async code is expected to be ran in parallel, not just ///! concurrently. As a reminder, Python has concurrency with multiple threads but no parallelism ///! because of the global interpreter lock. ///! The Runtime will get forcefully shut down when the main thread exits. Any running background ///! work will be lost at that time. This is not a hard requirement though, we can be tweak it to ///! wait for tasks to finish but requires some ceremony around the Runtime. Since we have no need ///! for that right now so that feature is skipped for now. ///! ///! TODO(T74221415): monitoring, signal handling use futures::future::Future; use futures::stream::{BoxStream, Stream, StreamExt}; use futures::FutureExt; use futures::{pin_mut, select}; use once_cell::sync::Lazy; use std::io::{Error, ErrorKind}; use tokio::runtime::{Builder as RuntimeBuilder, Runtime}; use tokio::task::JoinHandle; static RUNTIME: Lazy<Runtime> = Lazy::new(|| { let nproc = num_cpus::get(); RuntimeBuilder::new_multi_thread() .worker_threads(nproc.min(8)) .enable_all() .build() .expect("failed to initialize the async runtime") }); pub static STREAM_BUFFER_SIZE: usize = 128; /// Spawn a task using the runtime. pub fn spawn<T>(task: T) -> JoinHandle<T::Output> where T: Future + Send + 'static, T::Output: Send + 'static, { RUNTIME.spawn(task) } /// Run the provided function on an executor dedicated to blocking operations. pub fn spawn_blocking<F, R>(func: F) -> JoinHandle<R> where F: FnOnce() -> R + Send + 'static, R: Send + 'static, { RUNTIME.spawn_blocking(func) } /// Blocks the current thread while waiting for the computation defined by the Future `f` to /// complete. /// /// Unlike `block_on_exclusive`, this can be nested without panic. pub fn block_on_future<F>(f: F) -> F::Output where F::Output: Send, F: Future + Send + 'static, { block_on_exclusive(f) } /// Blocks the current thread while waiting for the computation defined by the Future `f`. /// Also blocks other `block_on_future` calls. /// /// This is intended to be used when `f` is not `'static` and cannot be used in /// `block_on_future`. pub fn block_on_exclusive<F>(f: F) -> F::Output where F: Future, { RUNTIME.block_on(f) } /// Takes an async stream and provide its contents in the form of a regular iterator. /// /// If processing one of the items in the stream panics then the stream stops without further /// feedback. It wouldn't be a bad idea to propagate the issue somehow. /// /// This implementation will poll as long as there is space in the buffer. The sync iterator will /// be returned practically right after the function is called. Calls to `next()` on the /// iterator will block as long as there are no items to read from the buffer and stream items are /// in flight. Calls to `next()` return as soon as items are available to pop from the buffer. /// `STREAM_BUFFER_SIZE` determines the default size of the buffer. If this value is inconvenient /// then check out `RunStreamOptions` which allows tweaking the buffer size. The buffering is just /// to streamline the async/parallel computation and manage some of the synchronization overhead. /// Unless the stream `s` is buffered, the items in the stream will be processed one after the /// other. /// When you want to process items strictly one after the other without any sort of buffering, you /// should use `block_on_future(my_stream.next())`. /// Example. /// 1. unbuffered stream, stream_to_iter buffer size = 2. /// - the stream has the first item polled and the result is added to the buffer /// - the stream continues with polling the second item; while this is happening the iterator /// that is returned may start being consumed releasing capacity in the buffer; for our example /// let's say that the blocking thread hasn't reached that point yet and the stream fills the /// buffer using the second item /// - the stream will now poll the third item; assuming that the buffer is still full when the /// computation is done, it will yield the thread that is is running on until the blocking /// thread reads one of the items in the buffer. /// 2. buffered stream over 2 items (ordered), stream_to_iter buffer = 2 /// - the stream will take 2 futures from the underlying iterator and poll on them; when the /// first one returns it enquees the result in our buffer and polls the third future in the /// underlying stream. Assuming that f(x) produces r(x) we could write: /// stream: #f1, *f2, *f3, f4, f5 /// buffer: r1 /// - let's assume that the blocking thread will not consume the buffer and the next future /// finishes; the result then fills the buffer and f4 will get polled: /// stream: #f1, #f2, *f3, *f4, f5 /// buffer: r1, r2 /// - adding the result of the third future to the buffer will have to wait until the blocking /// thread consumes the returned iterator; only after that will the stream proceed with /// polling the fifth future in the stream pub fn stream_to_iter<S>(s: S) -> RunStream<S::Item> where S: Stream + Unpin + Send + 'static, S::Item: Send, { RunStreamOptions::new().run(s) } /// See `stream_to_iter`. Allows tweaking run parameters. See individual methods for parameter /// details. pub struct RunStreamOptions { buffer_size: usize, } impl RunStreamOptions { pub fn new() -> Self { Self { buffer_size: STREAM_BUFFER_SIZE, } } /// When dealing with heavy computation or objects a smaller buffer size may be appropriate. /// The current implementation does not provide a means to completely wait on polling the /// second item until the blocking thread reads the first value. pub fn buffer_size(&mut self, buffer_size: usize) -> &mut Self { self.buffer_size = buffer_size; self } /// Takes an async stream and provide it's contents in the form of a regular iterator. /// See `stream_to_iter`. pub fn run<S>(&self, mut s: S) -> RunStream<S::Item> where S: Stream + Unpin + Send + 'static, S::Item: Send, { // Why use a channel vs using `std::iter::from_fn` // It is probably a bit easier to reason about what happens when using the channel. The // implementation details of the executor and the buffered stream don't come in discussion // as when directly scheduling the next future. It's a bit of insurance against changes and // it separates the two worlds more clearly. The channel approach can be optimized to // reduce entering the async runtime context when the stream is completed faster that it is // processed on the main thread. We could also add multiple consumers. let (tx, rx) = tokio::sync::mpsc::channel(self.buffer_size); let _guard = RUNTIME.enter(); tokio::spawn(async move { while let Some(v) = s.next().await { if tx.send(v).await.is_err() { // receiver dropped; TODO(T74252041): add logging return; } } }); RunStream { rx: Some(rx) } } } /// Blocking thread handler for receiving the results following processing a `Stream`. pub struct RunStream<T> { // Option is used to workaround lifetime in Iterator::next. rx: Option<tokio::sync::mpsc::Receiver<T>>, } impl<T: Send + 'static> Iterator for RunStream<T> { type Item = T; /// Returns the items extracted from processing the stream. Will return `None` when the stream's /// end is reached or when processing an item panics. /// See `stream_to_iter`. fn next(&mut self) -> Option<Self::Item> { let mut rx = self.rx.take().unwrap(); let (next, rx) = block_on_future(async { let next = rx.recv().await; (next, rx) }); self.rx = Some(rx); next } } /// Convert a blocking iterator to an async stream. /// /// Unlike `futures::stream::iter`, the iterator's `next()` function could be /// blocking. pub fn iter_to_stream<I: Send + 'static>( iter: impl Iterator<Item = I> + Send + 'static, ) -> BoxStream<'static, I> { let stream = futures::stream::unfold(iter, |mut iter| async { let (item, iter) = tokio::task::spawn_blocking(move || { let item = iter.next(); (item, iter) }) .await .unwrap(); item.map(|item| (item, iter)) }); Box::pin(stream.fuse()) } /// Blocks on the future from python code, interrupting future execution on Ctrl+C /// Wraps future's output with Result that returns error when interrupted /// If future already returns Result, use try_block_unless_interrupted /// /// Send on this future only needed to prevent including `py` into this future pub fn block_unless_interrupted<F: Future>(f: F) -> Result<F::Output, Error> { block_on_exclusive(unless_interrupted(f)) } /// Same as block_unless_interrupted but for futures that returns Result pub fn try_block_unless_interrupted<O, E, F: Future<Output = Result<O, E>>>(f: F) -> Result<O, E> where E: Send, E: From<Error>, { block_on_exclusive(async move { Ok(unless_interrupted(f).await??) }) } async fn unless_interrupted<F: Future>(f: F) -> Result<F::Output, Error> { let f = f.fuse(); let ctrlc = tokio::signal::ctrl_c().fuse(); pin_mut!(f, ctrlc); select! { _ = ctrlc => Err(ErrorKind::Interrupted.into()), res = f => Ok(res), } } #[cfg(test)] mod tests { use super::*; use std::thread; use futures::future; use futures::stream; #[test] fn test_block_on_future() { assert_eq!(block_on_future(async { 2 + 2 }), 4); } #[test] #[should_panic] fn test_block_on_future_will_panic() { block_on_future(async { panic!("hello future"); }); } #[test] fn test_panic_in_future_does_not_poisons_runtime() { let th = thread::spawn(|| { block_on_future(async { panic!("no poison"); }) }); assert!(th.join().is_err()); assert_eq!(block_on_future(async { 2 + 2 }), 4); } #[test] fn test_block_on_future_block_on_other_thread() { let (tx, rx) = std::sync::mpsc::sync_channel(1); thread::spawn(|| { block_on_future(async move { for i in 12.. { tx.send(i).unwrap(); } }) }); assert_eq!( rx.into_iter().take(5).collect::<Vec<i32>>(), vec![12, 13, 14, 15, 16] ); } #[test] fn test_block_on_future_pseudo_parallelism() { // This test will deadlock if threads can't schedule tasks while another thread // is waiting for a result let (tx, mut rx) = tokio::sync::mpsc::channel(1); let th1 = thread::spawn(|| block_on_future(async move { rx.recv().await })); let _th2 = thread::spawn(|| block_on_future(async move { tx.send(5).await })); assert_eq!(th1.join().unwrap(), Some(5)); } #[test] fn test_stream_to_iter() { let stream = stream::iter(vec![42, 33, 12]); let iter = stream_to_iter(stream); assert_eq!(iter.collect::<Vec<_>>(), vec![42, 33, 12]); } #[test] fn test_stream_to_iter_two_instances() { let mut options = RunStreamOptions::new(); options.buffer_size(1); let mut iter1 = options.run(stream::iter(vec![42, 33, 12])); let mut iter2 = options.run(stream::iter(vec![11, 25, 67])); assert_eq!(iter2.next(), Some(11)); assert_eq!(iter2.next(), Some(25)); assert_eq!(iter1.next(), Some(42)); assert_eq!(iter2.next(), Some(67)); assert_eq!(iter2.next(), None); assert_eq!(iter1.next(), Some(33)); assert_eq!(iter1.next(), Some(12)); assert_eq!(iter1.next(), None); } #[test] fn test_stream_to_iter_some_items_panic() { let stream = stream::iter(vec![43, 33, 12, 11]) .then(future::ready) .map(|v| { assert!(v & 1 == 1); v + 1 }); let iter = stream_to_iter(stream); assert_eq!(iter.collect::<Vec<_>>(), vec![44, 34]); } #[tokio::test] async fn test_iter_to_stream() { let iter = vec![1u8, 10, 20].into_iter(); let mut stream = iter_to_stream(iter); assert_eq!(stream.next().await, Some(1)); assert_eq!(stream.next().await, Some(10)); assert_eq!(stream.next().await, Some(20)); assert_eq!(stream.next().await, None); assert_eq!(stream.next().await, None); } }<|fim▁end|>
* GNU General Public License version 2. */
<|file_name|>ng_math_under_over_layout_algorithm.cc<|end_file_name|><|fim▁begin|>// Copyright 2020 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "third_party/blink/renderer/core/layout/ng/mathml/ng_math_under_over_layout_algorithm.h" #include "third_party/blink/renderer/core/layout/ng/mathml/ng_math_layout_utils.h" #include "third_party/blink/renderer/core/layout/ng/ng_block_break_token.h" #include "third_party/blink/renderer/core/layout/ng/ng_box_fragment.h" #include "third_party/blink/renderer/core/layout/ng/ng_length_utils.h" #include "third_party/blink/renderer/core/layout/ng/ng_out_of_flow_layout_part.h" #include "third_party/blink/renderer/core/layout/ng/ng_physical_box_fragment.h" #include "third_party/blink/renderer/core/mathml/mathml_operator_element.h" #include "third_party/blink/renderer/core/mathml/mathml_under_over_element.h" namespace blink { namespace { // Describes the amount to shift to apply to the under/over boxes. // Data is populated from the OpenType MATH table. // If the OpenType MATH table is not present fallback values are used. // https://w3c.github.io/mathml-core/#base-with-underscript // https://w3c.github.io/mathml-core/#base-with-overscript struct UnderOverVerticalParameters { bool use_under_over_bar_fallback; LayoutUnit under_gap_min; LayoutUnit over_gap_min; LayoutUnit under_shift_min; LayoutUnit over_shift_min; LayoutUnit under_extra_descender; LayoutUnit over_extra_ascender; LayoutUnit accent_base_height; }; UnderOverVerticalParameters GetUnderOverVerticalParameters( const ComputedStyle& style, bool is_base_large_operator, bool is_base_stretchy_in_inline_axis) { UnderOverVerticalParameters parameters; const SimpleFontData* font_data = style.GetFont().PrimaryFont(); if (!font_data) return parameters; // https://w3c.github.io/mathml-core/#dfn-default-fallback-constant const float default_fallback_constant = 0; if (is_base_large_operator) { parameters.under_gap_min = LayoutUnit( MathConstant(style, OpenTypeMathSupport::MathConstants::kLowerLimitGapMin) .value_or(default_fallback_constant)); parameters.over_gap_min = LayoutUnit( MathConstant(style, OpenTypeMathSupport::MathConstants::kUpperLimitGapMin) .value_or(default_fallback_constant)); parameters.under_shift_min = LayoutUnit( MathConstant( style, OpenTypeMathSupport::MathConstants::kLowerLimitBaselineDropMin) .value_or(default_fallback_constant)); parameters.over_shift_min = LayoutUnit( MathConstant( style, OpenTypeMathSupport::MathConstants::kUpperLimitBaselineRiseMin) .value_or(default_fallback_constant)); parameters.under_extra_descender = LayoutUnit(); parameters.over_extra_ascender = LayoutUnit(); parameters.accent_base_height = LayoutUnit(); parameters.use_under_over_bar_fallback = false; return parameters; } if (is_base_stretchy_in_inline_axis) { parameters.under_gap_min = LayoutUnit( MathConstant( style, OpenTypeMathSupport::MathConstants::kStretchStackGapBelowMin) .value_or(default_fallback_constant)); parameters.over_gap_min = LayoutUnit( MathConstant( style, OpenTypeMathSupport::MathConstants::kStretchStackGapAboveMin) .value_or(default_fallback_constant)); parameters.under_shift_min = LayoutUnit( MathConstant( style, OpenTypeMathSupport::MathConstants::kStretchStackBottomShiftDown) .value_or(default_fallback_constant)); parameters.over_shift_min = LayoutUnit( MathConstant( style, OpenTypeMathSupport::MathConstants::kStretchStackTopShiftUp) .value_or(default_fallback_constant)); parameters.under_extra_descender = LayoutUnit(); parameters.over_extra_ascender = LayoutUnit(); parameters.accent_base_height = LayoutUnit(); parameters.use_under_over_bar_fallback = false; return parameters; } const float default_rule_thickness = RuleThicknessFallback(style); parameters.under_gap_min = LayoutUnit( MathConstant(style, OpenTypeMathSupport::MathConstants::kUnderbarVerticalGap) .value_or(3 * default_rule_thickness)); parameters.over_gap_min = LayoutUnit( MathConstant(style, OpenTypeMathSupport::MathConstants::kOverbarVerticalGap) .value_or(3 * default_rule_thickness)); parameters.under_shift_min = LayoutUnit(); parameters.over_shift_min = LayoutUnit(); parameters.under_extra_descender = LayoutUnit( MathConstant(style, OpenTypeMathSupport::MathConstants::kUnderbarExtraDescender) .value_or(default_rule_thickness)); parameters.over_extra_ascender = LayoutUnit( MathConstant(style, OpenTypeMathSupport::MathConstants::kOverbarExtraAscender) .value_or(default_rule_thickness)); parameters.accent_base_height = LayoutUnit( MathConstant(style, OpenTypeMathSupport::MathConstants::kAccentBaseHeight) .value_or(font_data->GetFontMetrics().XHeight() / 2)); parameters.use_under_over_bar_fallback = true; return parameters; } // https://w3c.github.io/mathml-core/#underscripts-and-overscripts-munder-mover-munderover bool HasAccent(const NGBlockNode& node, bool accent_under) { DCHECK(node); auto* underover = To<MathMLUnderOverElement>(node.GetDOMNode()); auto script_type = underover->GetScriptType(); DCHECK(script_type == MathScriptType::kUnderOver || (accent_under && script_type == MathScriptType::kUnder) || (!accent_under && script_type == MathScriptType::kOver)); absl::optional<bool> attribute_value = accent_under ? underover->AccentUnder() : underover->Accent(); return attribute_value && *attribute_value; } } // namespace NGMathUnderOverLayoutAlgorithm::NGMathUnderOverLayoutAlgorithm( const NGLayoutAlgorithmParams& params) : NGLayoutAlgorithm(params) { DCHECK(params.space.IsNewFormattingContext()); } void NGMathUnderOverLayoutAlgorithm::GatherChildren(NGBlockNode* base, NGBlockNode* over, NGBlockNode* under) { auto script_type = Node().ScriptType(); for (NGLayoutInputNode child = Node().FirstChild(); child; child = child.NextSibling()) { NGBlockNode block_child = To<NGBlockNode>(child); if (child.IsOutOfFlowPositioned()) { container_builder_.AddOutOfFlowChildCandidate( block_child, BorderScrollbarPadding().StartOffset()); continue; } if (!*base) { *base = block_child; continue; } switch (script_type) { case MathScriptType::kUnder: DCHECK(!*under); *under = block_child; break; case MathScriptType::kOver: DCHECK(!*over); *over = block_child; break; case MathScriptType::kUnderOver: if (!*under) { *under = block_child; continue; } DCHECK(!*over); *over = block_child; break; default: NOTREACHED(); } } } scoped_refptr<const NGLayoutResult> NGMathUnderOverLayoutAlgorithm::Layout() { DCHECK(!BreakToken()); DCHECK(IsValidMathMLScript(Node())); NGBlockNode base = nullptr; NGBlockNode over = nullptr; NGBlockNode under = nullptr; GatherChildren(&base, &over, &under); const LogicalSize border_box_size = container_builder_.InitialBorderBoxSize(); const LogicalOffset content_start_offset = BorderScrollbarPadding().StartOffset(); LayoutUnit block_offset = content_start_offset.block_offset; const auto base_properties = GetMathMLEmbellishedOperatorProperties(base); const bool is_base_large_operator = base_properties && base_properties->is_large_op; const bool is_base_stretchy_in_inline_axis = base_properties && base_properties->is_stretchy && !base_properties->is_vertical; const bool base_inherits_block_stretch_size_constraint = ConstraintSpace().TargetStretchBlockSizes().has_value(); const bool base_inherits_inline_stretch_size_constraint = !base_inherits_block_stretch_size_constraint && ConstraintSpace().HasTargetStretchInlineSize(); UnderOverVerticalParameters parameters = GetUnderOverVerticalParameters( Style(), is_base_large_operator, is_base_stretchy_in_inline_axis); // https://w3c.github.io/mathml-core/#dfn-algorithm-for-stretching-operators-along-the-inline-axis LayoutUnit inline_stretch_size; auto UpdateInlineStretchSize = [&](const scoped_refptr<const NGLayoutResult>& result) { NGFragment fragment( ConstraintSpace().GetWritingDirection(), To<NGPhysicalBoxFragment>(result->PhysicalFragment())); inline_stretch_size = std::max(inline_stretch_size, fragment.InlineSize()); }; // "Perform layout without any stretch size constraint on all the items of // LNotToStretch" bool layout_remaining_items_with_zero_inline_stretch_size = true; for (NGLayoutInputNode child = Node().FirstChild(); child; child = child.NextSibling()) { if (child.IsOutOfFlowPositioned() || IsInlineAxisStretchyOperator(To<NGBlockNode>(child))) continue;<|fim▁hole|> Node(), ChildAvailableSize(), ConstraintSpace(), child, NGCacheSlot::kMeasure); const auto child_layout_result = To<NGBlockNode>(child).Layout( child_constraint_space, nullptr /* break_token */); UpdateInlineStretchSize(child_layout_result); layout_remaining_items_with_zero_inline_stretch_size = false; } if (UNLIKELY(layout_remaining_items_with_zero_inline_stretch_size)) { // "If LNotToStretch is empty, perform layout with stretch size constraint 0 // on all the items of LToStretch. for (NGLayoutInputNode child = Node().FirstChild(); child; child = child.NextSibling()) { if (child.IsOutOfFlowPositioned()) continue; DCHECK(IsInlineAxisStretchyOperator(To<NGBlockNode>(child))); if (child == base && (base_inherits_block_stretch_size_constraint || base_inherits_inline_stretch_size_constraint)) continue; LayoutUnit zero_stretch_size; const auto child_constraint_space = CreateConstraintSpaceForMathChild( Node(), ChildAvailableSize(), ConstraintSpace(), child, NGCacheSlot::kMeasure, absl::nullopt, zero_stretch_size); const auto child_layout_result = To<NGBlockNode>(child).Layout( child_constraint_space, nullptr /* break_token */); UpdateInlineStretchSize(child_layout_result); } } auto CreateConstraintSpaceForUnderOverChild = [&](const NGBlockNode child) { if (child == base && base_inherits_block_stretch_size_constraint && IsBlockAxisStretchyOperator(To<NGBlockNode>(child))) { return CreateConstraintSpaceForMathChild( Node(), ChildAvailableSize(), ConstraintSpace(), child, NGCacheSlot::kLayout, *ConstraintSpace().TargetStretchBlockSizes()); } if (child == base && base_inherits_inline_stretch_size_constraint && IsInlineAxisStretchyOperator(To<NGBlockNode>(child))) { return CreateConstraintSpaceForMathChild( Node(), ChildAvailableSize(), ConstraintSpace(), child, NGCacheSlot::kLayout, absl::nullopt, ConstraintSpace().TargetStretchInlineSize()); } if ((child != base || (!base_inherits_block_stretch_size_constraint && !base_inherits_inline_stretch_size_constraint)) && IsInlineAxisStretchyOperator(To<NGBlockNode>(child))) { return CreateConstraintSpaceForMathChild( Node(), ChildAvailableSize(), ConstraintSpace(), child, NGCacheSlot::kLayout, absl::nullopt, inline_stretch_size); } return CreateConstraintSpaceForMathChild(Node(), ChildAvailableSize(), ConstraintSpace(), child, NGCacheSlot::kLayout); }; // TODO(crbug.com/1125136): take into account italic correction. const auto baseline_type = Style().GetFontBaseline(); const auto base_space = CreateConstraintSpaceForUnderOverChild(base); auto base_layout_result = base.Layout(base_space); auto base_margins = ComputeMarginsFor(base_space, base.Style(), ConstraintSpace()); NGBoxFragment base_fragment( ConstraintSpace().GetWritingDirection(), To<NGPhysicalBoxFragment>(base_layout_result->PhysicalFragment())); LayoutUnit base_ascent = base_fragment.BaselineOrSynthesize(baseline_type); // All children are positioned centered relative to the container (and // therefore centered relative to themselves). if (over) { const auto over_space = CreateConstraintSpaceForUnderOverChild(over); scoped_refptr<const NGLayoutResult> over_layout_result = over.Layout(over_space); NGBoxStrut over_margins = ComputeMarginsFor(over_space, over.Style(), ConstraintSpace()); NGBoxFragment over_fragment( ConstraintSpace().GetWritingDirection(), To<NGPhysicalBoxFragment>(over_layout_result->PhysicalFragment())); block_offset += parameters.over_extra_ascender + over_margins.block_start; LogicalOffset over_offset = { content_start_offset.inline_offset + over_margins.inline_start + (ChildAvailableSize().inline_size - (over_fragment.InlineSize() + over_margins.InlineSum())) / 2, block_offset}; container_builder_.AddResult(*over_layout_result, over_offset); over.StoreMargins(ConstraintSpace(), over_margins); if (parameters.use_under_over_bar_fallback) { block_offset += over_fragment.BlockSize(); if (HasAccent(Node(), false)) { if (base_ascent < parameters.accent_base_height) block_offset += parameters.accent_base_height - base_ascent; } else { block_offset += parameters.over_gap_min; } } else { LayoutUnit over_ascent = over_fragment.BaselineOrSynthesize(baseline_type); block_offset += std::max(over_fragment.BlockSize() + parameters.over_gap_min, over_ascent + parameters.over_shift_min); } block_offset += over_margins.block_end; } block_offset += base_margins.block_start; LogicalOffset base_offset = { content_start_offset.inline_offset + base_margins.inline_start + (ChildAvailableSize().inline_size - (base_fragment.InlineSize() + base_margins.InlineSum())) / 2, block_offset}; container_builder_.AddResult(*base_layout_result, base_offset); base.StoreMargins(ConstraintSpace(), base_margins); block_offset += base_fragment.BlockSize() + base_margins.block_end; if (under) { const auto under_space = CreateConstraintSpaceForUnderOverChild(under); scoped_refptr<const NGLayoutResult> under_layout_result = under.Layout(under_space); NGBoxStrut under_margins = ComputeMarginsFor(under_space, under.Style(), ConstraintSpace()); NGBoxFragment under_fragment( ConstraintSpace().GetWritingDirection(), To<NGPhysicalBoxFragment>(under_layout_result->PhysicalFragment())); block_offset += under_margins.block_start; if (parameters.use_under_over_bar_fallback) { if (!HasAccent(Node(), true)) block_offset += parameters.under_gap_min; } else { LayoutUnit under_ascent = under_fragment.BaselineOrSynthesize(baseline_type); block_offset += std::max(parameters.under_gap_min, parameters.under_shift_min - under_ascent); } LogicalOffset under_offset = { content_start_offset.inline_offset + under_margins.inline_start + (ChildAvailableSize().inline_size - (under_fragment.InlineSize() + under_margins.InlineSum())) / 2, block_offset}; block_offset += under_fragment.BlockSize(); block_offset += parameters.under_extra_descender; container_builder_.AddResult(*under_layout_result, under_offset); under.StoreMargins(ConstraintSpace(), under_margins); block_offset += under_margins.block_end; } container_builder_.SetBaseline(base_offset.block_offset + base_ascent); block_offset += BorderScrollbarPadding().block_end; LayoutUnit block_size = ComputeBlockSizeForFragment(ConstraintSpace(), Style(), BorderPadding(), block_offset, border_box_size.inline_size); container_builder_.SetIntrinsicBlockSize(block_offset); container_builder_.SetFragmentsTotalBlockSize(block_size); NGOutOfFlowLayoutPart(Node(), ConstraintSpace(), &container_builder_).Run(); return container_builder_.ToBoxFragment(); } MinMaxSizesResult NGMathUnderOverLayoutAlgorithm::ComputeMinMaxSizes( const MinMaxSizesFloatInput&) { DCHECK(IsValidMathMLScript(Node())); if (auto result = CalculateMinMaxSizesIgnoringChildren( Node(), BorderScrollbarPadding())) return *result; MinMaxSizes sizes; bool depends_on_block_constraints = false; for (NGLayoutInputNode child = Node().FirstChild(); child; child = child.NextSibling()) { if (child.IsOutOfFlowPositioned()) continue; // TODO(crbug.com/1125136): take into account italic correction. const auto child_result = ComputeMinAndMaxContentContributionForMathChild( Style(), ConstraintSpace(), To<NGBlockNode>(child), ChildAvailableSize().block_size); sizes.Encompass(child_result.sizes); depends_on_block_constraints |= child_result.depends_on_block_constraints; } sizes += BorderScrollbarPadding().InlineSum(); return MinMaxSizesResult(sizes, depends_on_block_constraints); } } // namespace blink<|fim▁end|>
const auto child_constraint_space = CreateConstraintSpaceForMathChild(
<|file_name|>transfer_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python """Test the file transfer mechanism.""" import hashlib import io import itertools import os import platform import struct import unittest from unittest import mock from absl import app from grr_response_core.lib import constants from grr_response_core.lib import utils from grr_response_core.lib.rdfvalues import client as rdf_client from grr_response_core.lib.rdfvalues import paths as rdf_paths from grr_response_core.lib.util import compatibility from grr_response_core.lib.util import temp from grr_response_server import data_store from grr_response_server import file_store from grr_response_server import flow_base from grr_response_server.databases import db from grr_response_server.flows.general import transfer from grr_response_server.rdfvalues import objects as rdf_objects from grr.test_lib import action_mocks from grr.test_lib import flow_test_lib from grr.test_lib import test_lib # pylint:mode=test class ClientMock(action_mocks.ActionMock): BUFFER_SIZE = 1024 * 1024 def __init__(self, mbr_data=None, client_id=None): self.mbr = mbr_data self.client_id = client_id def ReadBuffer(self, args): return_data = self.mbr[args.offset:args.offset + args.length] return [ rdf_client.BufferReference( data=return_data, offset=args.offset, length=len(return_data)) ] class GetMBRFlowTest(flow_test_lib.FlowTestsBaseclass): """Test the transfer mechanism.""" mbr = (b"123456789" * 1000)[:4096] def setUp(self): super().setUp() self.client_id = self.SetupClient(0) def testGetMBR(self): """Test that the GetMBR flow works.""" flow_id = flow_test_lib.TestFlowHelper( transfer.GetMBR.__name__, ClientMock(self.mbr), creator=self.test_username, client_id=self.client_id) results = flow_test_lib.GetFlowResults(self.client_id, flow_id) self.assertLen(results, 1) self.assertEqual(results[0], self.mbr) def _RunAndCheck(self, chunk_size, download_length): with utils.Stubber(constants, "CLIENT_MAX_BUFFER_SIZE", chunk_size): flow_id = flow_test_lib.TestFlowHelper( transfer.GetMBR.__name__, ClientMock(self.mbr), creator=self.test_username, client_id=self.client_id, length=download_length) results = flow_test_lib.GetFlowResults(self.client_id, flow_id) self.assertLen(results, 1) self.assertEqual(results[0], self.mbr[:download_length]) def testGetMBRChunked(self): chunk_size = 100 download_length = 15 * chunk_size self._RunAndCheck(chunk_size, download_length) # Not a multiple of the chunk size. download_length = 15 * chunk_size + chunk_size // 2 self._RunAndCheck(chunk_size, download_length) class CompareFDsMixin(object): def CompareFDs(self, fd1, fd2): # Seek the files to the end to make sure they are the same size. fd2.seek(0, 2) fd1.seek(0, 2) self.assertEqual(fd2.tell(), fd1.tell()) ranges = [ # Start of file (0, 100), # Straddle the first chunk (16 * 1024 - 100, 300), # Read past end of file (fd2.tell() - 100, 300), # Zero length reads (100, 0), ] for offset, length in ranges: fd1.seek(offset) data1 = fd1.read(length) fd2.seek(offset) data2 = fd2.read(length) self.assertEqual(data1, data2) class GetFileFlowTest(CompareFDsMixin, flow_test_lib.FlowTestsBaseclass): """Test the transfer mechanism.""" def setUp(self): super().setUp() self.client_id = self.SetupClient(0) def testGetFile(self): """Test that the GetFile flow works.""" client_mock = action_mocks.GetFileClientMock() pathspec = rdf_paths.PathSpec( pathtype=rdf_paths.PathSpec.PathType.OS, path=os.path.join(self.base_path, "test_img.dd")) flow_test_lib.TestFlowHelper( transfer.GetFile.__name__, client_mock, creator=self.test_username, client_id=self.client_id, pathspec=pathspec) # Fix path for Windows testing. pathspec.path = pathspec.path.replace("\\", "/") with open(pathspec.path, "rb") as fd2: cp = db.ClientPath.FromPathSpec(self.client_id, pathspec) fd_rel_db = file_store.OpenFile(cp) self.CompareFDs(fd2, fd_rel_db) # Only the sha256 hash of the contents should have been calculated: # in order to put file contents into the file store. history = data_store.REL_DB.ReadPathInfoHistory(cp.client_id, cp.path_type, cp.components) self.assertEqual(history[-1].hash_entry.sha256, fd_rel_db.hash_id.AsBytes()) self.assertIsNone(history[-1].hash_entry.sha1) self.assertIsNone(history[-1].hash_entry.md5) def testGetFilePathCorrection(self): """Tests that the pathspec returned is used for the aff4path.""" client_mock = action_mocks.GetFileClientMock() # Deliberately using the wrong casing. pathspec = rdf_paths.PathSpec( pathtype=rdf_paths.PathSpec.PathType.OS, path=os.path.join(self.base_path, "TEST_IMG.dd")) expected_size = os.path.getsize(os.path.join(self.base_path, "test_img.dd")) session_id = flow_test_lib.TestFlowHelper( transfer.GetFile.__name__, client_mock, creator=self.test_username, client_id=self.client_id, pathspec=pathspec) results = flow_test_lib.GetFlowResults(self.client_id, session_id) self.assertLen(results, 1) res_pathspec = results[0].pathspec # Fix path for Windows testing. pathspec.path = pathspec.path.replace("\\", "/") with open(res_pathspec.path, "rb") as fd2: fd2.seek(0, 2) cp = db.ClientPath.FromPathSpec(self.client_id, res_pathspec) fd_rel_db = file_store.OpenFile(cp) self.CompareFDs(fd2, fd_rel_db) # Only the sha256 hash of the contents should have been calculated: # in order to put file contents into the file store. history = data_store.REL_DB.ReadPathInfoHistory(cp.client_id, cp.path_type, cp.components) self.assertEqual(history[-1].hash_entry.sha256, fd_rel_db.hash_id.AsBytes()) self.assertEqual(history[-1].hash_entry.num_bytes, expected_size) self.assertIsNone(history[-1].hash_entry.sha1) self.assertIsNone(history[-1].hash_entry.md5) def testGetFileIsDirectory(self): """Tests that the flow raises when called on directory.""" client_mock = action_mocks.GetFileClientMock() with temp.AutoTempDirPath() as temp_dir: pathspec = rdf_paths.PathSpec( pathtype=rdf_paths.PathSpec.PathType.OS, path=temp_dir) with self.assertRaises(RuntimeError): flow_test_lib.TestFlowHelper( transfer.GetFile.__name__, client_mock, creator=self.test_username, client_id=self.client_id, pathspec=pathspec) def testFailsIfStatFailsAndIgnoreStatFailureFlagNotSet(self): with temp.AutoTempFilePath() as test_path: with open(test_path, "wb") as fd: fd.write(b"foo") pathspec = rdf_paths.PathSpec( pathtype=rdf_paths.PathSpec.PathType.OS, path=test_path, ) args = transfer.GetFileArgs( pathspec=pathspec, read_length=1, ) client_mock = action_mocks.GetFileWithFailingStatClientMock() with self.assertRaises(RuntimeError): flow_test_lib.TestFlowHelper( transfer.GetFile.__name__, client_mock, creator=self.test_username, client_id=self.client_id, args=args) def testWorksIfStatFailsAndIgnoreStatFailureFlagIsSet(self): with temp.AutoTempFilePath() as test_path: with open(test_path, "wb") as fd: fd.write(b"foo") pathspec = rdf_paths.PathSpec( pathtype=rdf_paths.PathSpec.PathType.OS, path=test_path, ) args = transfer.GetFileArgs( pathspec=pathspec, read_length=1, ignore_stat_failure=True, ) client_mock = action_mocks.GetFileWithFailingStatClientMock() flow_test_lib.TestFlowHelper( transfer.GetFile.__name__, client_mock, creator=self.test_username, client_id=self.client_id, args=args) def _ReadBytesWithGetFile(self, path, stat_available=False, offset=None, file_size_override=None, read_length=None): if stat_available: client_mock = action_mocks.GetFileClientMock() else: client_mock = action_mocks.GetFileWithFailingStatClientMock() pathspec = rdf_paths.PathSpec( pathtype=rdf_paths.PathSpec.PathType.OS, path=path, ) if offset is not None: pathspec.offset = offset if file_size_override is not None: pathspec.file_size_override = file_size_override args = transfer.GetFileArgs( pathspec=pathspec, ignore_stat_failure=not stat_available, ) if read_length is not None: args.read_length = read_length flow_id = flow_test_lib.TestFlowHelper( transfer.GetFile.__name__, client_mock, creator=self.test_username, client_id=self.client_id, args=args) results = flow_test_lib.GetFlowResults(self.client_id, flow_id) self.assertLen( results, 1, f"Expected 1 result for offset={offset}, " f"file_size_override={file_size_override}, " f"read_length={read_length}, ") res_pathspec = results[0].pathspec cp = db.ClientPath.FromPathSpec(self.client_id, res_pathspec) return file_store.OpenFile(cp).Read() TEST_DATA_LENGTH = transfer.GetFile.CHUNK_SIZE * 10 + 1 TEST_DATA = b"".join( itertools.islice( itertools.cycle( [b"0", b"1", b"2", b"3", b"4", b"5", b"6", b"7", b"8", b"9"]), TEST_DATA_LENGTH)) def testReadsTheWholeStatableFileWhenNoSizesPassed(self): with temp.AutoTempFilePath() as test_path: with open(test_path, "wb") as fd: fd.write(self.TEST_DATA) actual_bytes = self._ReadBytesWithGetFile(test_path, stat_available=True) self.assertEqual(self.TEST_DATA, actual_bytes) def testRaisesOnNonStatableFileWhenNoSizesPassed(self): with temp.AutoTempFilePath() as test_path: with self.assertRaises(RuntimeError): self._ReadBytesWithGetFile(test_path, stat_available=False) READ_LENGTH_INTERVALS = ( # Check for intervals within the file size. (0, 10), (10, 20), (0, transfer.GetFile.CHUNK_SIZE), (1, transfer.GetFile.CHUNK_SIZE), (1, transfer.GetFile.CHUNK_SIZE - 1), (0, transfer.GetFile.CHUNK_SIZE * 2), (1, transfer.GetFile.CHUNK_SIZE * 2), (1, transfer.GetFile.CHUNK_SIZE * 2 - 1), (TEST_DATA_LENGTH - transfer.GetFile.CHUNK_SIZE, transfer.GetFile.CHUNK_SIZE), (TEST_DATA_LENGTH - transfer.GetFile.CHUNK_SIZE - 1, transfer.GetFile.CHUNK_SIZE), (TEST_DATA_LENGTH - transfer.GetFile.CHUNK_SIZE + 1, transfer.GetFile.CHUNK_SIZE - 1), # Check for intervals outside of the file size (an EOF might # happen also on a device file, like when a disk file is read). (TEST_DATA_LENGTH - 10, 20), (TEST_DATA_LENGTH - transfer.GetFile.CHUNK_SIZE - 1, transfer.GetFile.CHUNK_SIZE + 2), ) def testWorksWithReadLengthOnSeekableFile(self): with temp.AutoTempFilePath() as test_path: with open(test_path, "wb") as fd: fd.write(self.TEST_DATA) for offset, read_length in self.READ_LENGTH_INTERVALS: with self.subTest( offset=offset, read_length=read_length, stat_available=True): actual_bytes = self._ReadBytesWithGetFile( test_path, stat_available=True, offset=offset, read_length=read_length) self.assertEqual(self.TEST_DATA[offset:offset + read_length], actual_bytes) with self.subTest( offset=offset, read_length=read_length, stat_available=False): actual_bytes = self._ReadBytesWithGetFile( test_path, stat_available=False, offset=offset, read_length=read_length) self.assertEqual(self.TEST_DATA[offset:offset + read_length], actual_bytes) def testWorksWithReadLengthOnNonSeekableFile(self): for offset, read_length in self.READ_LENGTH_INTERVALS: # Check non-seekable file that still can be stat-ed. with self.subTest( offset=offset, read_length=read_length, stat_available=True): actual_bytes = self._ReadBytesWithGetFile( "/dev/random", stat_available=True, offset=offset, read_length=read_length) # Using assertEqual instead of assertLen for easier-to-process # failure messages (as long byte sequences get dumped to stdout # in case of a failure). self.assertEqual(len(actual_bytes), read_length) # Check non-seekable file that can't be stat-ed. with self.subTest( offset=offset, read_length=read_length, stat_available=False): actual_bytes = self._ReadBytesWithGetFile( "/dev/random", stat_available=False, offset=offset, read_length=read_length) # Using assertEqual instead of assertLen for easier-to-process # failure messages (as long byte sequences get dumped to stdout # in case of a failure). self.assertEqual(len(actual_bytes), read_length) FILE_SIZE_OVERRIDE_INTERVALS = ( # Check intervals within the file boundaries. (0, 10), (10, 30), (0, transfer.GetFile.CHUNK_SIZE), (1, 1 + transfer.GetFile.CHUNK_SIZE), (1, transfer.GetFile.CHUNK_SIZE), (0, transfer.GetFile.CHUNK_SIZE * 2), (1, 1 + transfer.GetFile.CHUNK_SIZE * 2), (1, transfer.GetFile.CHUNK_SIZE * 2), (TEST_DATA_LENGTH - transfer.GetFile.CHUNK_SIZE, TEST_DATA_LENGTH), (TEST_DATA_LENGTH - transfer.GetFile.CHUNK_SIZE - 1, TEST_DATA_LENGTH - 1), (TEST_DATA_LENGTH - transfer.GetFile.CHUNK_SIZE + 1, TEST_DATA_LENGTH), # Checks intervals outside of the file size. (TEST_DATA_LENGTH - 10, TEST_DATA_LENGTH + 10), (TEST_DATA_LENGTH - transfer.GetFile.CHUNK_SIZE - 1, TEST_DATA_LENGTH + 1), ) def testWorksWithFileSizeOverrideOnSeekableFile(self): with temp.AutoTempFilePath() as test_path: with open(test_path, "wb") as fd: fd.write(self.TEST_DATA) for offset, file_size_override in self.FILE_SIZE_OVERRIDE_INTERVALS: with self.subTest( offset=offset, file_size_override=file_size_override, stat_available=True): actual_bytes = self._ReadBytesWithGetFile( test_path, stat_available=True, offset=offset, file_size_override=file_size_override) self.assertEqual(self.TEST_DATA[offset:file_size_override], actual_bytes) with self.subTest( offset=offset, file_size_override=file_size_override, stat_available=False): actual_bytes = self._ReadBytesWithGetFile( test_path, stat_available=False, offset=offset, file_size_override=file_size_override) self.assertEqual(self.TEST_DATA[offset:file_size_override], actual_bytes) def testWorksWithFileSizeOverrideOnNonSeekableFile(self): for offset, file_size_override in self.FILE_SIZE_OVERRIDE_INTERVALS: with self.subTest( offset=offset, file_size_override=file_size_override, stat_available=True): actual_bytes = self._ReadBytesWithGetFile( "/dev/random", stat_available=True, offset=offset, file_size_override=file_size_override) self.assertEqual(len(actual_bytes), file_size_override - offset) with self.subTest( offset=offset, file_size_override=file_size_override, stat_available=False): actual_bytes = self._ReadBytesWithGetFile( "/dev/random", stat_available=False, offset=offset, file_size_override=file_size_override) self.assertEqual(len(actual_bytes), file_size_override - offset) READ_LENGTH_FILE_SIZE_OVERRIDE_INTERVALS = ( # offset, read_length, file_size_override (0, 10, 5), (0, 10, 15), (0, 5, 10), (0, 15, 10), (0, transfer.GetFile.CHUNK_SIZE * 2, transfer.GetFile.CHUNK_SIZE * 2 - 1), (0, transfer.GetFile.CHUNK_SIZE * 2, transfer.GetFile.CHUNK_SIZE * 2 + 1), (1, transfer.GetFile.CHUNK_SIZE * 2, transfer.GetFile.CHUNK_SIZE * 2), (1, transfer.GetFile.CHUNK_SIZE * 2, transfer.GetFile.CHUNK_SIZE * 2 + 2), (TEST_DATA_LENGTH - transfer.GetFile.CHUNK_SIZE, transfer.GetFile.CHUNK_SIZE, TEST_DATA_LENGTH - 1), (TEST_DATA_LENGTH - transfer.GetFile.CHUNK_SIZE, transfer.GetFile.CHUNK_SIZE, TEST_DATA_LENGTH + 1), ) def testWorksWithReadLengthAndFileSizeOverrideOnSeekableFiles(self): with temp.AutoTempFilePath() as test_path: with open(test_path, "wb") as fd: fd.write(self.TEST_DATA) for (offset, read_length, file_size_override) in self.READ_LENGTH_FILE_SIZE_OVERRIDE_INTERVALS: upper_limit = min(offset + read_length, file_size_override) with self.subTest( offset=offset, read_length=read_length, file_size_override=file_size_override, stat_available=True): actual_bytes = self._ReadBytesWithGetFile( test_path, stat_available=True, offset=offset, read_length=read_length, file_size_override=file_size_override) self.assertEqual(self.TEST_DATA[offset:upper_limit], actual_bytes) with self.subTest( offset=offset, read_length=read_length, file_size_override=file_size_override, stat_available=False): actual_bytes = self._ReadBytesWithGetFile( test_path, stat_available=False, offset=offset, read_length=read_length, file_size_override=file_size_override) self.assertEqual(self.TEST_DATA[offset:upper_limit], actual_bytes) def testWorksWithReadLengthAndFileSizeOverrideOnNonSeekableFiles(self): for (offset, read_length, file_size_override) in self.READ_LENGTH_FILE_SIZE_OVERRIDE_INTERVALS: with self.subTest( offset=offset, read_length=read_length, file_size_override=file_size_override, stat_available=True): actual_bytes = self._ReadBytesWithGetFile( "/dev/random", stat_available=True, offset=offset, read_length=read_length, file_size_override=file_size_override) # Using assertEqual instead of assertLen for easier-to-process # failure messages (as long byte sequences get dumped to stdout # in case of a failure). self.assertEqual( len(actual_bytes), min(read_length, file_size_override - offset)) with self.subTest( offset=offset, read_length=read_length, file_size_override=file_size_override, stat_available=False): actual_bytes = self._ReadBytesWithGetFile( "/dev/random", stat_available=False, offset=offset, read_length=read_length, file_size_override=file_size_override) # Using assertEqual instead of assertLen for easier-to-process # failure messages (as long byte sequences get dumped to stdout # in case of a failure). self.assertEqual( len(actual_bytes), min(read_length, file_size_override - offset)) class MultiGetFileFlowTest(CompareFDsMixin, flow_test_lib.FlowTestsBaseclass): """Test the transfer mechanism.""" def setUp(self): super().setUp() self.client_id = self.SetupClient(0) @unittest.skipUnless(platform.system() == "Linux", "/proc only exists on Linux") def testMultiGetFileOfSpecialFiles(self): """Test that special /proc/ files are handled correctly. /proc/ files have the property that they are non seekable from their end (i.e. seeking them relative to the end is not supported). They also return an st_size of 0. For example: $ stat /proc/self/maps File: '/proc/self/maps' Size: 0 Blocks: 0 IO Block: 1024 regular empty file $ head /proc/self/maps 00400000-00409000 r-xp 00000000 fc:01 9180740 /usr/bin/head 00608000-00609000 r--p 00008000 fc:01 9180740 /usr/bin/head ... When we try to use the MultiGetFile flow, it deduplicates the files and since it thinks the file has a zero size, the flow will not download the file, and instead copy the zero size file into it. """ client_mock = action_mocks.MultiGetFileClientMock() # # Create a zero sized file. zero_sized_filename = os.path.join(self.temp_dir, "zero_size") with open(zero_sized_filename, "wb"): pass pathspec = rdf_paths.PathSpec( pathtype=rdf_paths.PathSpec.PathType.OS, path=zero_sized_filename) flow_test_lib.TestFlowHelper( transfer.MultiGetFile.__name__, client_mock, creator=self.test_username, file_size="1MiB", client_id=self.client_id, pathspecs=[pathspec]) # Now if we try to fetch a real /proc/ filename this will fail because the # filestore already contains the zero length file pathspec = rdf_paths.PathSpec( pathtype=rdf_paths.PathSpec.PathType.OS, path="/proc/self/environ") flow_test_lib.TestFlowHelper( transfer.MultiGetFile.__name__, client_mock, creator=self.test_username, file_size=1024 * 1024, client_id=self.client_id, pathspecs=[pathspec]) with open(pathspec.last.path, "rb") as fd: data = fd.read() cp = db.ClientPath.FromPathSpec(self.client_id, pathspec) fd_rel_db = file_store.OpenFile(cp) self.assertEqual(fd_rel_db.size, len(data)) self.assertEqual(fd_rel_db.read(), data) # Check that SHA256 hash of the file matches the contents # hash and that MD5 and SHA1 are set. history = data_store.REL_DB.ReadPathInfoHistory(cp.client_id, cp.path_type, cp.components) self.assertEqual(history[-1].hash_entry.sha256, fd_rel_db.hash_id.AsBytes()) self.assertEqual(history[-1].hash_entry.num_bytes, len(data)) self.assertIsNotNone(history[-1].hash_entry.sha1) self.assertIsNotNone(history[-1].hash_entry.md5) def testMultiGetFile(self): """Test MultiGetFile.""" client_mock = action_mocks.MultiGetFileClientMock() pathspec = rdf_paths.PathSpec( pathtype=rdf_paths.PathSpec.PathType.OS, path=os.path.join(self.base_path, "test_img.dd")) expected_size = os.path.getsize(pathspec.path) args = transfer.MultiGetFileArgs(pathspecs=[pathspec, pathspec]) with test_lib.Instrument(transfer.MultiGetFile, "_ReceiveFileStat") as receivestat_instrument: flow_test_lib.TestFlowHelper( transfer.MultiGetFile.__name__, client_mock, creator=self.test_username, client_id=self.client_id, args=args) # We should only have called StoreStat once because the two paths # requested were identical. self.assertLen(receivestat_instrument.args, 1) # Fix path for Windows testing. pathspec.path = pathspec.path.replace("\\", "/") with open(pathspec.path, "rb") as fd2: # Test the file that was created. cp = db.ClientPath.FromPathSpec(self.client_id, pathspec) fd_rel_db = file_store.OpenFile(cp) self.CompareFDs(fd2, fd_rel_db) # Check that SHA256 hash of the file matches the contents # hash and that MD5 and SHA1 are set. history = data_store.REL_DB.ReadPathInfoHistory(cp.client_id, cp.path_type, cp.components) self.assertEqual(history[-1].hash_entry.sha256, fd_rel_db.hash_id.AsBytes()) self.assertEqual(history[-1].hash_entry.num_bytes, expected_size) self.assertIsNotNone(history[-1].hash_entry.sha1) self.assertIsNotNone(history[-1].hash_entry.md5) # Setting MIN_CALL_TO_FILE_STORE to a smaller value emulates MultiGetFile's # behavior when dealing with large files. @mock.patch.object(transfer.MultiGetFile, "MIN_CALL_TO_FILE_STORE", 1) def testMultiGetFileCorrectlyFetchesSameFileMultipleTimes(self): """Test MultiGetFile.""" client_mock = action_mocks.MultiGetFileClientMock() total_num_chunks = 10 total_size = transfer.MultiGetFile.CHUNK_SIZE * total_num_chunks path = os.path.join(self.temp_dir, "test_big.txt") with io.open(path, "wb") as fd: for i in range(total_num_chunks): fd.write(struct.pack("b", i) * transfer.MultiGetFile.CHUNK_SIZE) pathspec = rdf_paths.PathSpec( pathtype=rdf_paths.PathSpec.PathType.OS, path=path) def _Check(expected_size): args = transfer.MultiGetFileArgs( pathspecs=[pathspec], file_size=expected_size) flow_test_lib.TestFlowHelper( transfer.MultiGetFile.__name__, client_mock, creator=self.test_username, client_id=self.client_id, args=args) # Test the file that was created. cp = db.ClientPath.FromPathSpec(self.client_id, pathspec) fd = file_store.OpenFile(cp) self.assertEqual(fd.size, expected_size) # Fetch the file twice to test a real-world scenario when a file is first # fetched with a smaller limit, and then - with a bigger one. # This tests against a bug in MultiGetFileLogic when first N chunks of # the file were already fetched during a previous MultiGetFileLogic run, # and as a consequence the file was considered fully fetched, even if # the max_file_size value of the current run was much bigger than # the size of the previously fetched file. _Check(transfer.MultiGetFileLogic.CHUNK_SIZE * 2) _Check(total_size) def testMultiGetFileMultiFiles(self): """Test MultiGetFile downloading many files at once.""" client_mock = action_mocks.MultiGetFileClientMock() pathspecs = [] # Make 30 files to download. for i in range(30): path = os.path.join(self.temp_dir, "test_%s.txt" % i) with io.open(path, "wb") as fd: fd.write(b"Hello") pathspecs.append( rdf_paths.PathSpec( pathtype=rdf_paths.PathSpec.PathType.OS, path=path)) args = transfer.MultiGetFileArgs( pathspecs=pathspecs, maximum_pending_files=10) flow_test_lib.TestFlowHelper( transfer.MultiGetFile.__name__, client_mock, creator=self.test_username, client_id=self.client_id, args=args) # Now open each file and make sure the data is there. for pathspec in pathspecs: cp = db.ClientPath.FromPathSpec(self.client_id, pathspec) fd_rel_db = file_store.OpenFile(cp) self.assertEqual(b"Hello", fd_rel_db.read()) # Check that SHA256 hash of the file matches the contents # hash and that MD5 and SHA1 are set. history = data_store.REL_DB.ReadPathInfoHistory(cp.client_id, cp.path_type, cp.components) self.assertEqual(history[-1].hash_entry.sha256, fd_rel_db.hash_id.AsBytes()) self.assertEqual(history[-1].hash_entry.num_bytes, 5) self.assertIsNotNone(history[-1].hash_entry.sha1) self.assertIsNotNone(history[-1].hash_entry.md5) def testMultiGetFileDeduplication(self): client_mock = action_mocks.MultiGetFileClientMock() pathspecs = [] # Make 10 files to download. for i in range(10): path = os.path.join(self.temp_dir, "test_%s.txt" % i) with io.open(path, "wb") as fd: fd.write(b"Hello") pathspecs.append( rdf_paths.PathSpec( pathtype=rdf_paths.PathSpec.PathType.OS, path=path)) # All those files are the same so the individual chunks should # only be downloaded once. By forcing maximum_pending_files=1, # there should only be a single TransferBuffer call. args = transfer.MultiGetFileArgs( pathspecs=pathspecs, maximum_pending_files=1) flow_test_lib.TestFlowHelper( transfer.MultiGetFile.__name__, client_mock, creator=self.test_username, client_id=self.client_id, args=args) self.assertEqual(client_mock.action_counts["TransferBuffer"], 1) for pathspec in pathspecs: # Check that each referenced file can be read. cp = db.ClientPath.FromPathSpec(self.client_id, pathspec) fd_rel_db = file_store.OpenFile(cp) self.assertEqual(b"Hello", fd_rel_db.read()) # Check that SHA256 hash of the file matches the contents # hash and that MD5 and SHA1 are set. history = data_store.REL_DB.ReadPathInfoHistory(cp.client_id, cp.path_type, cp.components) self.assertEqual(history[-1].hash_entry.sha256, fd_rel_db.hash_id.AsBytes()) self.assertEqual(history[-1].hash_entry.num_bytes, 5) self.assertIsNotNone(history[-1].hash_entry.sha1) self.assertIsNotNone(history[-1].hash_entry.md5) def testExistingChunks(self): client_mock = action_mocks.MultiGetFileClientMock() # Make a file to download that is three chunks long. # For the second run, we change the middle chunk. This will lead to a # different hash for the whole file and three chunks to download of which we # already have two. chunk_size = transfer.MultiGetFile.CHUNK_SIZE for data in [ b"A" * chunk_size + b"B" * chunk_size + b"C" * 100, b"A" * chunk_size + b"X" * chunk_size + b"C" * 100 ]: path = os.path.join(self.temp_dir, "test.txt") with io.open(path, "wb") as fd: fd.write(data) pathspec = rdf_paths.PathSpec( pathtype=rdf_paths.PathSpec.PathType.OS, path=path) args = transfer.MultiGetFileArgs(pathspecs=[pathspec]) flow_test_lib.TestFlowHelper( transfer.MultiGetFile.__name__, client_mock, creator=self.test_username, client_id=self.client_id, args=args) cp = db.ClientPath.FromPathSpec(self.client_id, pathspec) fd_rel_db = file_store.OpenFile(cp) self.assertEqual(fd_rel_db.size, len(data)) self.assertEqual(fd_rel_db.read(), data) # Check that SHA256 hash of the file matches the contents # hash and that MD5 and SHA1 are set. history = data_store.REL_DB.ReadPathInfoHistory(cp.client_id, cp.path_type, cp.components) self.assertEqual(history[-1].hash_entry.sha256, fd_rel_db.hash_id.AsBytes()) self.assertEqual(history[-1].hash_entry.num_bytes, len(data)) self.assertIsNotNone(history[-1].hash_entry.sha1) self.assertIsNotNone(history[-1].hash_entry.md5) # Three chunks to get for the first file, only one for the second. self.assertEqual(client_mock.action_counts["TransferBuffer"], 4) def testMultiGetFileSetsFileHashAttributeWhenMultipleChunksDownloaded(self): client_mock = action_mocks.MultiGetFileClientMock() pathspec = rdf_paths.PathSpec( pathtype=rdf_paths.PathSpec.PathType.OS, path=os.path.join(self.base_path, "test_img.dd")) expected_size = os.path.getsize(pathspec.path) args = transfer.MultiGetFileArgs(pathspecs=[pathspec]) flow_test_lib.TestFlowHelper( transfer.MultiGetFile.__name__, client_mock, creator=self.test_username, client_id=self.client_id, args=args) h = hashlib.sha256() with io.open(os.path.join(self.base_path, "test_img.dd"), "rb") as model_fd: h.update(model_fd.read()) cp = db.ClientPath.FromPathSpec(self.client_id, pathspec) fd_rel_db = file_store.OpenFile(cp) self.assertEqual(fd_rel_db.hash_id.AsBytes(), h.digest()) # Check that SHA256 hash of the file matches the contents # hash and that MD5 and SHA1 are set. history = data_store.REL_DB.ReadPathInfoHistory(cp.client_id, cp.path_type, cp.components) self.assertEqual(history[-1].hash_entry.sha256, fd_rel_db.hash_id.AsBytes()) self.assertEqual(history[-1].hash_entry.num_bytes, expected_size) self.assertIsNotNone(history[-1].hash_entry.sha1) self.assertIsNotNone(history[-1].hash_entry.md5) def testMultiGetFileSizeLimit(self): client_mock = action_mocks.MultiGetFileClientMock() image_path = os.path.join(self.base_path, "test_img.dd") pathspec = rdf_paths.PathSpec( pathtype=rdf_paths.PathSpec.PathType.OS, path=image_path) # Read a bit more than one chunk (600 * 1024). expected_size = 750 * 1024 args = transfer.MultiGetFileArgs( pathspecs=[pathspec], file_size=expected_size) flow_test_lib.TestFlowHelper( transfer.MultiGetFile.__name__, client_mock, creator=self.test_username, client_id=self.client_id, args=args) with open(image_path, "rb") as fd: expected_data = fd.read(expected_size) cp = db.ClientPath.FromPathSpec(self.client_id, pathspec) fd_rel_db = file_store.OpenFile(cp) self.assertEqual(fd_rel_db.size, expected_size) data = fd_rel_db.read(2 * expected_size) self.assertLen(data, expected_size) d = hashlib.sha256() d.update(expected_data) self.assertEqual(fd_rel_db.hash_id.AsBytes(), d.digest()) # Check that SHA256 hash of the file matches the contents # hash and that MD5 and SHA1 are set. history = data_store.REL_DB.ReadPathInfoHistory(cp.client_id, cp.path_type, cp.components) self.assertEqual(history[-1].hash_entry.sha256, fd_rel_db.hash_id.AsBytes()) self.assertEqual(history[-1].hash_entry.num_bytes, expected_size) self.assertIsNotNone(history[-1].hash_entry.sha1) self.assertIsNotNone(history[-1].hash_entry.md5) def testMultiGetFileProgressReportsFailuresAndSuccessesCorrectly(self): client_mock = action_mocks.MultiGetFileClientMock() image_path = os.path.join(self.base_path, "test_img.dd") pathspec_1 = rdf_paths.PathSpec( pathtype=rdf_paths.PathSpec.PathType.OS, path=image_path) pathspec_2 = rdf_paths.PathSpec( pathtype=rdf_paths.PathSpec.PathType.OS, path="/non/existing/path") args = transfer.MultiGetFileArgs(pathspecs=[ pathspec_1, pathspec_2, ]) flow_id = flow_test_lib.TestFlowHelper( transfer.MultiGetFile.__name__, client_mock, creator=self.test_username, client_id=self.client_id, args=args) f_obj = flow_test_lib.GetFlowObj(self.client_id, flow_id) f_instance = transfer.MultiGetFile(f_obj) p = f_instance.GetProgress() self.assertEqual(p.num_pending_hashes, 0) self.assertEqual(p.num_pending_files, 0) self.assertEqual(p.num_skipped, 0) self.assertEqual(p.num_collected, 1) self.assertEqual(p.num_failed, 1) # Check that pathspecs in the progress proto are returned in the same order # as in the args proto. self.assertEqual(p.pathspecs_progress[0].pathspec, pathspec_1) self.assertEqual(p.pathspecs_progress[1].pathspec, pathspec_2) # Check that per-pathspecs statuses are correct. self.assertEqual(p.pathspecs_progress[0].status, transfer.PathSpecProgress.Status.COLLECTED) self.assertEqual(p.pathspecs_progress[1].status, transfer.PathSpecProgress.Status.FAILED) def testMultiGetFileProgressReportsSkippedDuplicatesCorrectly(self): client_mock = action_mocks.MultiGetFileClientMock() image_path = os.path.join(self.base_path, "test_img.dd") pathspec = rdf_paths.PathSpec( pathtype=rdf_paths.PathSpec.PathType.OS, path=image_path) args = transfer.MultiGetFileArgs(pathspecs=[pathspec]) # Let the flow run to make sure the file is collected. flow_test_lib.TestFlowHelper( transfer.MultiGetFile.__name__, client_mock, creator=self.test_username, client_id=self.client_id, args=args) # Run the flow second time to make sure duplicates are collected. flow_id = flow_test_lib.TestFlowHelper( transfer.MultiGetFile.__name__, client_mock, creator=self.test_username, client_id=self.client_id, args=args) f_obj = flow_test_lib.GetFlowObj(self.client_id, flow_id) f_instance = transfer.MultiGetFile(f_obj) p = f_instance.GetProgress() self.assertEqual(p.num_collected, 0) self.assertEqual(p.num_failed, 0) self.assertEqual(p.num_skipped, 1) self.assertLen(p.pathspecs_progress, 1) self.assertEqual(p.pathspecs_progress[0].pathspec, pathspec) self.assertEqual(p.pathspecs_progress[0].status, transfer.PathSpecProgress.Status.SKIPPED) @mock.patch.object(file_store.EXTERNAL_FILE_STORE, "AddFiles") def testExternalFileStoreSubmissionIsTriggeredWhenFileIsSentToFileStore( self, add_file_mock): client_mock = action_mocks.GetFileClientMock() pathspec = rdf_paths.PathSpec( pathtype=rdf_paths.PathSpec.PathType.OS, path=os.path.join(self.base_path, "test_img.dd")) flow_test_lib.TestFlowHelper( compatibility.GetName(transfer.GetFile), client_mock, creator=self.test_username, client_id=self.client_id, pathspec=pathspec) add_file_mock.assert_called_once() args = add_file_mock.call_args_list[0][0] hash_id = list(args[0].keys())[0] self.assertIsInstance(hash_id, rdf_objects.SHA256HashID) self.assertEqual(args[0][hash_id].client_path, db.ClientPath.FromPathSpec(self.client_id, pathspec)) self.assertNotEmpty(args[0][hash_id].blob_refs) for blob_ref in args[0][hash_id].blob_refs: self.assertIsInstance(blob_ref, rdf_objects.BlobReference) class DummyMultiGetFileLogic(transfer.MultiGetFileLogic, flow_base.FlowBase): args_type = rdf_paths.PathSpec def Start(self): super().Start() self.StartFileFetch(self.args) def ReceiveFileStat(self, stat_entry, request_data=None): pass def ReceiveFileHash(self, stat_entry, file_hash, request_data=None): pass def ReceiveFetchedFile(self, stat_entry, file_hash, request_data=None, is_duplicate=False): pass def FileFetchFailed(self, pathspec, request_data=None, status=None): pass class DummyMultiGetFileLogicStat(transfer.MultiGetFileLogic, flow_base.FlowBase): args_type = rdf_paths.PathSpec def Start(self): super().Start() self.state.stop_at_stat = True self.StartFileFetch(self.args) def ReceiveFileStat(self, stat_entry, request_data=None): pass def ReceiveFileHash(self, stat_entry, file_hash, request_data=None): pass def ReceiveFetchedFile(self, stat_entry, file_hash, request_data=None, is_duplicate=False): pass def FileFetchFailed(self, pathspec, request_data=None, status=None): pass class DummyMultiGetFileLogicHash(transfer.MultiGetFileLogic, flow_base.FlowBase): args_type = rdf_paths.PathSpec def Start(self): super().Start() self.state.stop_at_hash = True self.StartFileFetch(self.args) def ReceiveFileStat(self, stat_entry, request_data=None): del stat_entry, request_data # Unused. def ReceiveFileHash(self, stat_entry, file_hash, request_data=None): del stat_entry, file_hash, request_data # Unused. def ReceiveFetchedFile(self, stat_entry, file_hash, request_data=None, is_duplicate=False): del stat_entry, file_hash, request_data, is_duplicate # Unused. def FileFetchFailed(self, pathspec, request_data=None, status=None): del pathspec, request_data, status # Unused. class MultiGetFileLogicTest(flow_test_lib.FlowTestsBaseclass): """Test the MultiGetFileLogicTest base class using DummyMultiGetFileLogic.""" def setUp(self): super().setUp() self.client_id = self.SetupClient(0) self.client_mock = action_mocks.MultiGetFileClientMock() def testStatCallsStatReceiveFileStatOnly(self): pathtype = rdf_paths.PathSpec.PathType.OS path = os.path.join(self.base_path, "test_img.dd") with mock.patch.object(DummyMultiGetFileLogicStat, "ReceiveFetchedFileStat") as dummy_fetched_stat: with mock.patch.object(DummyMultiGetFileLogicStat, "ReceiveFetchedFileHash") as dummy_fetched_hash: with mock.patch.object(DummyMultiGetFileLogicStat, "ReceiveFetchedFile") as dummy_fetched_file: with mock.patch.object(DummyMultiGetFileLogicStat, "FileFetchFailed") as mock_failure: flow_test_lib.TestFlowHelper( DummyMultiGetFileLogicStat.__name__, self.client_mock, creator=self.test_username, client_id=self.client_id, pathtype=pathtype, path=path) self.assertTrue(dummy_fetched_stat.called) self.assertEqual(dummy_fetched_stat.call_args[0][0].pathspec.path, path) self.assertEqual( dummy_fetched_stat.call_args[0][0].pathspec.pathtype, pathtype) self.assertFalse(dummy_fetched_hash.called) self.assertFalse(dummy_fetched_file.called) self.assertFalse(mock_failure.called) def testStatCallsFileFetchFailed(self): pathtype = rdf_paths.PathSpec.PathType.OS path = os.path.join(self.base_path, "invalid.dd") with mock.patch.object(DummyMultiGetFileLogicStat, "ReceiveFetchedFileStat") as dummy_fetched_stat: with mock.patch.object(DummyMultiGetFileLogicStat, "ReceiveFetchedFileHash") as dummy_fetched_hash: with mock.patch.object(DummyMultiGetFileLogicStat, "ReceiveFetchedFile") as dummy_fetched_file: with mock.patch.object(DummyMultiGetFileLogicStat, "FileFetchFailed") as mock_failure: flow_test_lib.TestFlowHelper( DummyMultiGetFileLogicStat.__name__, self.client_mock, creator=self.test_username, client_id=self.client_id, pathtype=pathtype, path=path) self.assertFalse(dummy_fetched_stat.called) self.assertFalse(dummy_fetched_hash.called) self.assertFalse(dummy_fetched_file.called) self.assertTrue(mock_failure.called) self.assertEqual(mock_failure.call_args[0][0].path, path) self.assertEqual(mock_failure.call_args[0][0].pathtype, pathtype) def testHashCallsReceiveFileHash(self): pathtype = rdf_paths.PathSpec.PathType.OS path = os.path.join(self.base_path, "test_img.dd") with mock.patch.object(DummyMultiGetFileLogicHash, "ReceiveFetchedFileStat") as dummy_fetched_stat: with mock.patch.object(DummyMultiGetFileLogicHash,<|fim▁hole|> "ReceiveFetchedFile") as dummy_fetched_file: with mock.patch.object(DummyMultiGetFileLogicHash, "FileFetchFailed") as mock_failure: flow_test_lib.TestFlowHelper( DummyMultiGetFileLogicHash.__name__, self.client_mock, creator=self.test_username, client_id=self.client_id, pathtype=pathtype, path=path) self.assertTrue(dummy_fetched_stat.called) self.assertTrue(dummy_fetched_hash.called) self.assertEqual(dummy_fetched_hash.call_args[0][0].pathspec.path, path) self.assertEqual( dummy_fetched_hash.call_args[0][0].pathspec.pathtype, pathtype) self.assertFalse(dummy_fetched_file.called) self.assertFalse(mock_failure.called) def testHashCallsFileFetchFailed(self): pathtype = rdf_paths.PathSpec.PathType.OS path = os.path.join(self.base_path, "invalid.dd") with mock.patch.object(DummyMultiGetFileLogicHash, "ReceiveFetchedFileStat") as dummy_fetched_stat: with mock.patch.object(DummyMultiGetFileLogicHash, "ReceiveFetchedFileHash") as dummy_fetched_hash: with mock.patch.object(DummyMultiGetFileLogicHash, "ReceiveFetchedFile") as dummy_fetched_file: with mock.patch.object(DummyMultiGetFileLogicHash, "FileFetchFailed") as mock_failure: flow_test_lib.TestFlowHelper( DummyMultiGetFileLogicHash.__name__, self.client_mock, creator=self.test_username, client_id=self.client_id, pathtype=pathtype, path=path) self.assertFalse(dummy_fetched_stat.called) self.assertFalse(dummy_fetched_hash.called) self.assertFalse(dummy_fetched_file.called) self.assertTrue(mock_failure.called) self.assertEqual(mock_failure.call_args[0][0].path, path) self.assertEqual(mock_failure.call_args[0][0].pathtype, pathtype) def testFileCallsReceiveFetchedFile(self): pathtype = rdf_paths.PathSpec.PathType.OS path = os.path.join(self.base_path, "test_img.dd") with mock.patch.object(DummyMultiGetFileLogic, "ReceiveFetchedFileStat") as dummy_fetched_stat: with mock.patch.object(DummyMultiGetFileLogic, "ReceiveFetchedFileHash") as dummy_fetched_hash: with mock.patch.object(DummyMultiGetFileLogic, "ReceiveFetchedFile") as dummy_fetched_file: with mock.patch.object(DummyMultiGetFileLogic, "FileFetchFailed") as mock_failure: flow_test_lib.TestFlowHelper( DummyMultiGetFileLogic.__name__, self.client_mock, creator=self.test_username, client_id=self.client_id, pathtype=pathtype, path=path) self.assertTrue(dummy_fetched_stat.called) self.assertTrue(dummy_fetched_hash.called) self.assertTrue(dummy_fetched_file.called) self.assertEqual(dummy_fetched_file.call_args[0][0].pathspec.path, path) self.assertEqual( dummy_fetched_file.call_args[0][0].pathspec.pathtype, pathtype) self.assertFalse(mock_failure.called) def testFileCallsFileFetchFailed(self): pathtype = rdf_paths.PathSpec.PathType.OS path = os.path.join(self.base_path, "invalid.dd") with mock.patch.object(DummyMultiGetFileLogic, "ReceiveFetchedFileStat") as dummy_fetched_stat: with mock.patch.object(DummyMultiGetFileLogic, "ReceiveFetchedFileHash") as dummy_fetched_hash: with mock.patch.object(DummyMultiGetFileLogic, "ReceiveFetchedFile") as dummy_fetched_file: with mock.patch.object(DummyMultiGetFileLogic, "FileFetchFailed") as mock_failure: flow_test_lib.TestFlowHelper( DummyMultiGetFileLogic.__name__, self.client_mock, creator=self.test_username, client_id=self.client_id, pathtype=pathtype, path=path) self.assertFalse(dummy_fetched_stat.called) self.assertFalse(dummy_fetched_hash.called) self.assertFalse(dummy_fetched_file.called) self.assertTrue(mock_failure.called) self.assertEqual(mock_failure.call_args[0][0].path, path) self.assertEqual(mock_failure.call_args[0][0].pathtype, pathtype) def main(argv): # Run the full test suite test_lib.main(argv) if __name__ == "__main__": app.run(main)<|fim▁end|>
"ReceiveFetchedFileHash") as dummy_fetched_hash: with mock.patch.object(DummyMultiGetFileLogicHash,
<|file_name|>scrollspy.js<|end_file_name|><|fim▁begin|>import { addClass, removeClass, EVENTS, on, off, getViewportSize, getClosest, getParents } from '../../utils/domUtils' import {nodeListToArray} from '../../utils/arrayUtils' function ScrollSpy (element, target = 'body', options = {}) { this.el = element this.opts = Object.assign({}, ScrollSpy.DEFAULTS, options) this.opts.target = target if (target === 'body') { this.scrollElement = window } else { this.scrollElement = document.querySelector(`[id=${target}]`) } this.selector = 'li > a' this.offsets = [] this.targets = [] this.activeTarget = null this.scrollHeight = 0 if (this.scrollElement) { this.refresh() this.process() } } ScrollSpy.DEFAULTS = { offset: 10, callback: (ele) => 0 } ScrollSpy.prototype.getScrollHeight = function () { return this.scrollElement.scrollHeight || Math.max(document.body.scrollHeight, document.documentElement.scrollHeight) } ScrollSpy.prototype.refresh = function () { this.offsets = [] this.targets = [] this.scrollHeight = this.getScrollHeight() let list = nodeListToArray(this.el.querySelectorAll(this.selector)) const isWindow = this.scrollElement === window list .map(ele => { const href = ele.getAttribute('href') if (/^#./.test(href)) { const doc = document.documentElement const rootEl = isWindow ? document : this.scrollElement const hrefEl = rootEl.querySelector(`[id='${href.slice(1)}']`) const windowScrollTop = (window.pageYOffset || doc.scrollTop) - (doc.clientTop || 0) const offset = isWindow ? hrefEl.getBoundingClientRect().top + windowScrollTop : hrefEl.offsetTop + this.scrollElement.scrollTop return [offset, href] } else { return null } }) .filter(item => item) .sort((a, b) => a[0] - b[0]) .forEach(item => { this.offsets.push(item[0]) this.targets.push(item[1]) }) // console.log(this.offsets, this.targets) } ScrollSpy.prototype.process = function () { const isWindow = this.scrollElement === window const scrollTop = (isWindow ? window.pageYOffset : this.scrollElement.scrollTop) + this.opts.offset const scrollHeight = this.getScrollHeight() const scrollElementHeight = isWindow ? getViewportSize().height : this.scrollElement.getBoundingClientRect().height const maxScroll = this.opts.offset + scrollHeight - scrollElementHeight const offsets = this.offsets const targets = this.targets const activeTarget = this.activeTarget let i if (this.scrollHeight !== scrollHeight) { this.refresh() } if (scrollTop >= maxScroll) { return activeTarget !== (i = targets[targets.length - 1]) && this.activate(i) } if (activeTarget && scrollTop < offsets[0]) { this.activeTarget = null return this.clear() } for (i = offsets.length; i--;) { activeTarget !== targets[i] && scrollTop >= offsets[i] && (offsets[i + 1] === undefined || scrollTop < offsets[i + 1]) && this.activate(targets[i]) } } ScrollSpy.prototype.activate = function (target) { this.activeTarget = target this.clear() const selector = this.selector + '[data-target="' + target + '"],' + this.selector + '[href="' + target + '"]' const activeCallback = this.opts.callback let active = nodeListToArray(this.el.querySelectorAll(selector)) active.forEach(ele => { getParents(ele, 'li') .forEach(item => { addClass(item, 'active') activeCallback(item) }) if (getParents(ele, '.dropdown-menu').length) {<|fim▁hole|> } }) } ScrollSpy.prototype.clear = function () { let list = nodeListToArray(this.el.querySelectorAll(this.selector)) list.forEach(ele => { getParents(ele, '.active', this.opts.target).forEach(item => { removeClass(item, 'active') }) }) } const INSTANCE = '_uiv_scrollspy_instance' const events = [EVENTS.RESIZE, EVENTS.SCROLL] const bind = (el, binding) => { // console.log('bind') unbind(el) } const inserted = (el, binding) => { // console.log('inserted') const scrollSpy = new ScrollSpy(el, binding.arg, binding.value) if (scrollSpy.scrollElement) { scrollSpy.handler = () => { scrollSpy.process() } events.forEach(event => { on(scrollSpy.scrollElement, event, scrollSpy.handler) }) } el[INSTANCE] = scrollSpy } const unbind = (el) => { // console.log('unbind') let instance = el[INSTANCE] if (instance && instance.scrollElement) { events.forEach(event => { off(instance.scrollElement, event, instance.handler) }) delete el[INSTANCE] } } const update = (el, binding) => { // console.log('update') const isArgUpdated = binding.arg !== binding.oldArg const isValueUpdated = binding.value !== binding.oldValue if (isArgUpdated || isValueUpdated) { bind(el, binding) inserted(el, binding) } } export default {bind, unbind, update, inserted}<|fim▁end|>
addClass(getClosest(ele, 'li.dropdown'), 'active')
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>extern crate bindgen; use std::fs::{File, create_dir_all, metadata}; use std::path::Path; use std::io::{ErrorKind, Write}; const HEADERS : &'static [&'static str] = &["ssl", "entropy", "ctr_drbg"]; const HEADER_BASE : &'static str = "/usr/local/include/mbedtls/"; const MOD_FILE : &'static str = r#" #[allow(dead_code, non_camel_case_types, non_snake_case, non_upper_case_globals)] mod bindings; "#; fn main() { for header in HEADERS.iter() { gen(header); } } fn gen(header: &str) { let dir = "src/mbed/".to_string() + header + "/"; let file = dir.clone() + "bindings.rs"; <|fim▁hole|> let bindings_file = File::create(file).unwrap(); bindgen::Builder::default() .header(HEADER_BASE.to_string() + header + ".h") .link("mbedtls") .link("mbedx509") .link("mbedcrypto") .emit_builtins() .generate().unwrap() .write(Box::new(bindings_file)) .unwrap(); ; let mod_file_str = dir.clone() + "/mod.rs"; let metadata = metadata(Path::new(&mod_file_str)); if let Err(e) = metadata { if let ErrorKind::NotFound = e.kind() { let mut mod_file = File::create(mod_file_str).unwrap(); mod_file.write(MOD_FILE.as_bytes()).unwrap(); } } }<|fim▁end|>
create_dir_all(&dir).unwrap();
<|file_name|>issue-50706.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // compile-pass pub struct Stats; #[derive(PartialEq, Eq)] pub struct StatVariant { pub id: u8, _priv: (), } #[derive(PartialEq, Eq)] pub struct Stat { pub variant: StatVariant, pub index: usize, _priv: (), } impl Stats { pub const TEST: StatVariant = StatVariant{id: 0, _priv: (),}; #[allow(non_upper_case_globals)] pub const A: Stat = Stat{ variant: Self::TEST, index: 0, _priv: (),}; }<|fim▁hole|> let stat = Stat{variant, index, _priv: (),}; match stat { Stats::A => Some(Stats::A), _ => None, } } } fn main() {}<|fim▁end|>
impl Stat { pub fn from_index(variant: StatVariant, index: usize) -> Option<Stat> {
<|file_name|>FlowChartScreenController.java<|end_file_name|><|fim▁begin|>package com.virtualfactory.screen.layer.components; import de.lessvoid.nifty.Nifty; import de.lessvoid.nifty.NiftyEventSubscriber; import de.lessvoid.nifty.controls.ButtonClickedEvent; import de.lessvoid.nifty.controls.Controller; import de.lessvoid.nifty.controls.window.WindowControl; import de.lessvoid.nifty.elements.Element; import de.lessvoid.nifty.elements.render.ImageRenderer; import de.lessvoid.nifty.input.NiftyInputEvent; import de.lessvoid.nifty.render.NiftyImage; import de.lessvoid.nifty.screen.Screen; import de.lessvoid.nifty.tools.SizeValue; import de.lessvoid.xml.xpp3.Attributes; import com.virtualfactory.engine.GameEngine; import com.virtualfactory.utils.CommonBuilders; import com.virtualfactory.utils.Pair; import java.util.Properties; /** * * @author David */ public class FlowChartScreenController implements Controller { private Nifty nifty; private Screen screen; private WindowControl winControls; private boolean isVisible; private GameEngine gameEngine; final CommonBuilders common = new CommonBuilders(); private NiftyImage flowChartImage; @Override public void bind( final Nifty nifty, final Screen screen, final Element element, final Properties parameter, final Attributes controlDefinitionAttributes) { this.nifty = nifty; this.screen = screen; this.winControls = screen.findNiftyControl("winFlowChartControl", WindowControl.class); Attributes x = new Attributes(); x.set("hideOnClose", "true"); this.winControls.bind(nifty, screen, winControls.getElement(), null, x); isVisible = false; } public boolean isIsVisible() { return isVisible; } public void setIsVisible(boolean isVisible) { this.isVisible = isVisible; } @Override public void init(Properties parameter, Attributes controlDefinitionAttributes) { } @Override public void onStartScreen() { } @Override public void onFocus(boolean getFocus) { } @Override public boolean inputEvent(final NiftyInputEvent inputEvent) { return false; } public void loadWindowControl(GameEngine game,int index, Pair<Integer,Integer> position){ this.gameEngine = game; if (index == -1){ winControls.getElement().setVisible(false); winControls.getContent().hide(); isVisible = false; }else{ winControls.getElement().setVisible(true); winControls.getContent().show(); isVisible = true; if (position != null){ if (winControls.getWidth() + position.getFirst() > gameEngine.jmonkeyApp.getGuiViewPort().getCamera().getWidth()) position.setFirst(gameEngine.jmonkeyApp.getGuiViewPort().getCamera().getWidth() - winControls.getWidth()); if (winControls.getHeight() + position.getSecond() > gameEngine.jmonkeyApp.getGuiViewPort().getCamera().getHeight()) position.setSecond(gameEngine.jmonkeyApp.getGuiViewPort().getCamera().getHeight() - winControls.getHeight()); winControls.getElement().setConstraintX(new SizeValue(position.getFirst() + "px")); winControls.getElement().setConstraintY(new SizeValue(position.getSecond() + "px")); winControls.getElement().getParent().layoutElements(); } winControls.getElement().setConstraintX(null); winControls.getElement().setConstraintY(null); } loadValues(index); } private void loadValues(int index){ if (index == -1){ flowChartImage = nifty.createImage("Models/Flows/none.png", false); screen.findElementByName("imageFlowOfActivities").getRenderer(ImageRenderer.class).setImage(flowChartImage); }else{ flowChartImage = nifty.createImage("Models/Flows/" + gameEngine.getGameData().getCurrentGame().getFlowImage(), false);<|fim▁hole|> @NiftyEventSubscriber(id="closeFlowChart") public void onCloseFlowChartButtonClicked(final String id, final ButtonClickedEvent event) { gameEngine.updateLastActivitySystemTime(); loadWindowControl(gameEngine, -1, null); } }<|fim▁end|>
screen.findElementByName("imageFlowOfActivities").getRenderer(ImageRenderer.class).setImage(flowChartImage); } }
<|file_name|>cli-help.js<|end_file_name|><|fim▁begin|>/* * This file is part of the xPack distribution * (http://xpack.github.io). * Copyright (c) 2017 Liviu Ionescu. * * Permission is hereby granted, free of charge, to any person * obtaining a copy of this software and associated documentation * files (the "Software"), to deal in the Software without * restriction, including without limitation the rights to use, * copy, modify, merge, publish, distribute, sublicense, and/or * sell copies of the Software, and to permit persons to whom * the Software is furnished to do so, subject to the following * conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR * OTHER DEALINGS IN THE SOFTWARE. */ 'use strict' /* eslint valid-jsdoc: "error" */ /* eslint max-len: [ "error", 80, { "ignoreUrls": true } ] */ // ---------------------------------------------------------------------------- /* * This file provides support for displaying the application and * command specific help. */ // ============================================================================ // export class CliHelp { // -------------------------------------------------------------------------- constructor (ctx) { this.context = ctx this.middleLimit = 40 this.rightLimit = 79 // Do not write in col 80 } outputCommands (commands, description, msg = '[<args>...]') { const log = this.context.log const programName = this.context.programName log.output() if (!description) { const pkgJson = this.context.package description = pkgJson.description } log.output(`${description}`) this.commands = commands if (commands) { // Deep copy & sort const cmds = commands.slice() cmds.sort() log.output(`Usage: ${programName} <command> [<subcommand>...]` + ` [<options> ...] ${msg}`) log.output() log.output('where <command> is one of:') let buf = null cmds.forEach((cmd, i) => { if (buf === null) { buf = ' ' } buf += cmd if (i !== (cmds.length - 1)) { buf += ', ' } if (buf.length > this.rightLimit) { log.output(buf) buf = null } }) if (buf != null) { log.output(buf) buf = null } } else { log.output(`Usage: ${programName} ` + ` [<options> ...] ${msg}`) } } static padRight (str, n) { str += ' '.repeat(n) return str.substr(0, n) } outputHelpDetails (options, more = this.more) { const log = this.context.log const programName = this.context.programName const s1 = `${programName} -h|--help` const s2 = `${programName} <command> -h|--help` if (more.isFirstPass) { if (s1.length > more.width) { more.width = s1.length } if (this.commands) { if (s2.length > more.width) { more.width = s2.length } } } else { log.output() this.outputMaybeLongLine(s1, 'Quick help', more) if (this.commands) { this.outputMaybeLongLine(s2, 'Quick help on command', more) } } } outputMaybeLongLine (out, msg, more = this.more) { const log = this.context.log if (out.length >= more.limit) { log.output(out) out = '' } out += ' '.repeat(more.width) let desc = '' if (msg) { desc = msg + ' ' } log.output(`${CliHelp.padRight(out, more.width)} ${desc}`) } outputEarlyDetails (optionGroups, more = this.more) { const programName = this.context.programName if (!more.isFirstPass) { // log.output() } optionGroups.forEach((optionGroup) => { optionGroup.optionDefs.forEach((optionDef) => { if (optionDef.msg && optionDef.doProcessEarly) { let out = `${programName} ` optionDef.options.forEach((opt, index) => { out += opt if (index < (optionDef.options.length - 1)) { out += '|' } }) if (more.isFirstPass) { if (out.length > more.width) { more.width = out.length } } else { this.outputMaybeLongLine(out, optionDef.msg, more) } } }) }) } outputOptionGroups (optionGroups, more = this.more) { optionGroups.forEach((optionGroup) => { this.outputOptions(optionGroup.optionDefs, optionGroup.title, more) }) } <|fim▁hole|> optionDefs.forEach((optionDef) => { if (optionDef.msg && !optionDef.doProcessEarly && !optionDef.isHelp) { hasContent = true } }) if (!hasContent) { return } if (!more.isFirstPass && title) { log.output() log.output(title + ':') } optionDefs.forEach((optionDef) => { if (optionDef.msg && !optionDef.doProcessEarly && !optionDef.isHelp) { let strOpts = ' ' optionDef.options.forEach((opt, index) => { strOpts += opt if (index < (optionDef.options.length - 1)) { strOpts += '|' } }) if (optionDef.hasValue || optionDef.values || optionDef.param) { if (optionDef.param) { strOpts += ` <${optionDef.param}>` } else { strOpts += ' <s>' } } if (more.isFirstPass) { if (strOpts.length > more.width) { more.width = strOpts.length } } else { if (strOpts.length >= more.limit) { log.output(strOpts) strOpts = '' } strOpts += ' '.repeat(more.width) let desc = '' if (optionDef.msg.length > 0) { desc = optionDef.msg + ' ' } if (Array.isArray(optionDef.values)) { desc += '(' optionDef.values.forEach((value, index) => { desc += value if (index < (optionDef.values.length - 1)) { desc += '|' } }) desc += ') ' } const msgDefault = optionDef.msgDefault ? `, default ${optionDef.msgDefault}` : '' if (optionDef.isOptional && optionDef.isMultiple) { desc += `(optional, multiple${msgDefault})` } else if (optionDef.isOptional) { desc += `(optional${msgDefault})` } else if (optionDef.isMultiple) { desc += '(multiple)' } log.output(`${CliHelp.padRight(strOpts, more.width)} ${desc}`) } } }) } outputCommandLine (title, optionGroups) { const log = this.context.log const programName = this.context.programName log.output() log.output(title) const commands = this.context.fullCommands const usage = `Usage: ${programName} ${commands}` let str = usage let optionDefs = [] if (optionGroups && (optionGroups.length > 0) && optionGroups[0].preOptions) { str += ' ' + optionGroups[0].preOptions } str += ' [options...]' optionGroups.forEach((optionGroup) => { optionDefs = optionDefs.concat(optionGroup.optionDefs) }) let optStr optionDefs.forEach((optionDef) => { optStr = '' optionDef.options.forEach((val) => { // Assume the longest option is the more readable. if (val.length > optStr.length) { optStr = val } }) if (optionDef.param) { optStr += ` <${optionDef.param}>` } else if (optionDef.hasValue) { optStr += ' <s>' } if (optionDef.isOptional) { optStr = `[${optStr}]` if (optionDef.isMultiple) { optStr += '*' } } else if (optionDef.isMultiple) { optStr = `[${optStr}]+` } // log.output(optStr) if (str.length + optStr.length + 1 > this.rightLimit) { log.output(str) str = ' '.repeat(usage.length) } str += ' ' + optStr }) if (optionGroups && (optionGroups.length > 0) && optionGroups[0].postOptions) { optStr = optionGroups[0].postOptions if (str.length + optStr.length + 1 > this.rightLimit) { log.output(str) str = ' '.repeat(usage.length) } str += ' ' + optStr } if (str.length > usage.length) { log.output(str) } } outputFooter () { const log = this.context.log const pkgJson = this.context.package log.output() const pkgPath = this.context.rootPath log.output(`npm ${pkgJson.name}@${pkgJson.version} '${pkgPath}'`) if (pkgJson.homepage) { log.output(`Home page: <${pkgJson.homepage}>`) } const br = 'Bug reports:' if (pkgJson.bugs && pkgJson.bugs.url) { log.output(`${br} <${pkgJson.bugs.url}>`) } else if (pkgJson.author) { if (typeof pkgJson.author === 'object') { log.output(`${br} ${pkgJson.author.name} <${pkgJson.author.email}>`) } else if (typeof pkgJson.author === 'string') { log.output(`${br} ${pkgJson.author}`) } } } outputMainHelp (cmds, optionGroups, description) { // Try to get a message from the first group. this.outputCommands(cmds, description, optionGroups[0].msg) // The special trick here is how to align the right column. // For this two steps are needed, with the first to compute // the max width of the first column, and then to output text. this.twoPassAlign(() => { this.outputOptionGroups(optionGroups) this.outputHelpDetails(optionGroups) this.outputEarlyDetails(optionGroups) }) this.outputFooter() } twoPassAlign (f) { const more = this.firstPass() f() this.secondPass(more) f() } firstPass () { this.more = { isFirstPass: true, width: 0, limit: this.middleLimit } return this.more } secondPass (more = this.more) { more.isFirstPass = false // One more is implicit, so a total 2 spaces between columns. more.width += 1 if (more.width > more.limit) { more.width = more.limit } return more } } // ---------------------------------------------------------------------------- // Node.js specific export definitions. // By default, `module.exports = {}`. // The CliHelp class is added as a property of this object. module.exports.CliHelp = CliHelp // In ES6, it would be: // export class CliHelp { ... } // ... // import { CliHelp } from 'cli-help.js' // ----------------------------------------------------------------------------<|fim▁end|>
outputOptions (optionDefs, title, more = this.more) { const log = this.context.log let hasContent = false
<|file_name|>test_core.py<|end_file_name|><|fim▁begin|>import unittest from pyrogi import Screen, Backend from pyrogi.util import Vec2 class TestScreen(Screen): pass class TestBackend(unittest.TestCase): def test_screens(self): backend = Backend(Vec2(0, 0), Vec2(0, 0), '') self.assertEqual(len(backend.screens), 0) backend.set_screen(TestScreen()) self.assertEqual(len(backend.screens), 1) <|fim▁hole|> backend.go_back_n_screens(1) self.assertEqual(len(backend.screens), 1) backend.set_screen(TestScreen()) self.assertEqual(len(backend.screens), 2) backend.go_back_n_screens(2) self.assertEqual(len(backend.screens), 0)<|fim▁end|>
backend.set_screen(TestScreen()) self.assertEqual(len(backend.screens), 2)
<|file_name|>bluetooth_adapter.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/chromeos/bluetooth/bluetooth_adapter.h" #include "base/bind.h" #include "base/lazy_instance.h" #include "base/logging.h" #include "base/stl_util.h" #include "base/values.h" #include "chrome/browser/chromeos/bluetooth/bluetooth_device.h" #include "chromeos/dbus/bluetooth_adapter_client.h" #include "chromeos/dbus/bluetooth_device_client.h" #include "chromeos/dbus/bluetooth_manager_client.h" #include "chromeos/dbus/bluetooth_out_of_band_client.h" #include "chromeos/dbus/dbus_thread_manager.h" #include "dbus/object_path.h" namespace { // Shared default adapter instance, we don't want to keep this class around // if nobody is using it so use a WeakPtr and create the object when needed; // since Google C++ Style (and clang's static analyzer) forbids us having // exit-time destructors we use a leaky lazy instance for it. base::LazyInstance<base::WeakPtr<chromeos::BluetoothAdapter> >::Leaky default_adapter = LAZY_INSTANCE_INITIALIZER; } // namespace namespace chromeos { BluetoothAdapter::BluetoothAdapter() : weak_ptr_factory_(this), track_default_(false), powered_(false), discovering_(false) { DBusThreadManager::Get()->GetBluetoothManagerClient()-> AddObserver(this); DBusThreadManager::Get()->GetBluetoothAdapterClient()-> AddObserver(this); DBusThreadManager::Get()->GetBluetoothDeviceClient()-> AddObserver(this); } BluetoothAdapter::~BluetoothAdapter() { DBusThreadManager::Get()->GetBluetoothDeviceClient()-> RemoveObserver(this); DBusThreadManager::Get()->GetBluetoothAdapterClient()-> RemoveObserver(this); DBusThreadManager::Get()->GetBluetoothManagerClient()-> RemoveObserver(this); STLDeleteValues(&devices_); } void BluetoothAdapter::AddObserver(Observer* observer) { DCHECK(observer); observers_.AddObserver(observer); } void BluetoothAdapter::RemoveObserver(Observer* observer) { DCHECK(observer); observers_.RemoveObserver(observer); } bool BluetoothAdapter::IsPresent() const { return !object_path_.value().empty(); } bool BluetoothAdapter::IsPowered() const { return powered_; } void BluetoothAdapter::SetPowered(bool powered, const base::Closure& callback, const ErrorCallback& error_callback) { DBusThreadManager::Get()->GetBluetoothAdapterClient()-> GetProperties(object_path_)->powered.Set( powered, base::Bind(&BluetoothAdapter::OnSetPowered, weak_ptr_factory_.GetWeakPtr(), callback, error_callback)); } bool BluetoothAdapter::IsDiscovering() const { return discovering_; } void BluetoothAdapter::SetDiscovering(bool discovering, const base::Closure& callback, const ErrorCallback& error_callback) { if (discovering) { DBusThreadManager::Get()->GetBluetoothAdapterClient()-> StartDiscovery(object_path_, base::Bind(&BluetoothAdapter::OnStartDiscovery, weak_ptr_factory_.GetWeakPtr(), callback, error_callback)); } else { DBusThreadManager::Get()->GetBluetoothAdapterClient()-> StopDiscovery(object_path_, base::Bind(&BluetoothAdapter::OnStopDiscovery, weak_ptr_factory_.GetWeakPtr(), callback, error_callback)); } } BluetoothAdapter::DeviceList BluetoothAdapter::GetDevices() { ConstDeviceList const_devices = const_cast<const BluetoothAdapter *>(this)->GetDevices(); DeviceList devices; for (ConstDeviceList::const_iterator i = const_devices.begin(); i != const_devices.end(); ++i) devices.push_back(const_cast<BluetoothDevice *>(*i)); return devices; } BluetoothAdapter::ConstDeviceList BluetoothAdapter::GetDevices() const { ConstDeviceList devices; for (DevicesMap::const_iterator iter = devices_.begin(); iter != devices_.end(); ++iter) devices.push_back(iter->second); return devices; } BluetoothDevice* BluetoothAdapter::GetDevice(const std::string& address) { return const_cast<BluetoothDevice *>( const_cast<const BluetoothAdapter *>(this)->GetDevice(address)); } const BluetoothDevice* BluetoothAdapter::GetDevice( const std::string& address) const { DevicesMap::const_iterator iter = devices_.find(address); if (iter != devices_.end()) return iter->second; return NULL; } void BluetoothAdapter::ReadLocalOutOfBandPairingData( const BluetoothOutOfBandPairingDataCallback& callback, const ErrorCallback& error_callback) { DBusThreadManager::Get()->GetBluetoothOutOfBandClient()-> ReadLocalData(object_path_, base::Bind(&BluetoothAdapter::OnReadLocalData, weak_ptr_factory_.GetWeakPtr(), callback, error_callback)); } void BluetoothAdapter::TrackDefaultAdapter() { DVLOG(1) << "Tracking default adapter"; track_default_ = true; DBusThreadManager::Get()->GetBluetoothManagerClient()-> DefaultAdapter(base::Bind(&BluetoothAdapter::AdapterCallback, weak_ptr_factory_.GetWeakPtr())); } void BluetoothAdapter::FindAdapter(const std::string& address) { DVLOG(1) << "Using adapter " << address; track_default_ = false; DBusThreadManager::Get()->GetBluetoothManagerClient()-> FindAdapter(address, base::Bind(&BluetoothAdapter::AdapterCallback, weak_ptr_factory_.GetWeakPtr())); } void BluetoothAdapter::AdapterCallback(const dbus::ObjectPath& adapter_path, bool success) { if (success) { ChangeAdapter(adapter_path); } else if (!object_path_.value().empty()) { RemoveAdapter(); } } void BluetoothAdapter::DefaultAdapterChanged( const dbus::ObjectPath& adapter_path) { if (track_default_) ChangeAdapter(adapter_path); } void BluetoothAdapter::AdapterRemoved(const dbus::ObjectPath& adapter_path) { if (adapter_path == object_path_) RemoveAdapter(); } void BluetoothAdapter::ChangeAdapter(const dbus::ObjectPath& adapter_path) { if (adapter_path == object_path_) return; // Determine whether this is a change of adapter or gaining an adapter, // remember for later so we can send the right notification. const bool new_adapter = object_path_.value().empty(); if (new_adapter) { DVLOG(1) << "Adapter path initialized to " << adapter_path.value(); } else { DVLOG(1) << "Adapter path changed from " << object_path_.value() << " to " << adapter_path.value(); // Invalidate the devices list, since the property update does not // remove them. ClearDevices(); } object_path_ = adapter_path; // Update properties to their new values. BluetoothAdapterClient::Properties* properties = DBusThreadManager::Get()->GetBluetoothAdapterClient()-> GetProperties(object_path_); address_ = properties->address.value(); PoweredChanged(properties->powered.value()); DiscoveringChanged(properties->discovering.value()); DevicesChanged(properties->devices.value()); // Notify observers if we did not have an adapter before, the case of // moving from one to another is hidden from layers above. if (new_adapter) FOR_EACH_OBSERVER(BluetoothAdapter::Observer, observers_, AdapterPresentChanged(this, true)); } void BluetoothAdapter::RemoveAdapter() { DVLOG(1) << "Adapter lost."; PoweredChanged(false); DiscoveringChanged(false); ClearDevices(); object_path_ = dbus::ObjectPath(""); address_.clear(); FOR_EACH_OBSERVER(BluetoothAdapter::Observer, observers_, AdapterPresentChanged(this, false)); } void BluetoothAdapter::OnSetPowered(const base::Closure& callback, const ErrorCallback& error_callback, bool success) { if (success) callback.Run(); else error_callback.Run(); } void BluetoothAdapter::PoweredChanged(bool powered) { if (powered == powered_) return; powered_ = powered; FOR_EACH_OBSERVER(BluetoothAdapter::Observer, observers_, AdapterPoweredChanged(this, powered_)); } void BluetoothAdapter::OnStartDiscovery(const base::Closure& callback, const ErrorCallback& error_callback, const dbus::ObjectPath& adapter_path, bool success) { if (success) { DVLOG(1) << object_path_.value() << ": started discovery."; // Clear devices found in previous discovery attempts ClearDiscoveredDevices(); callback.Run(); } else { // TODO(keybuk): in future, don't run the callback if the error was just // that we were already discovering. error_callback.Run(); } } void BluetoothAdapter::OnStopDiscovery(const base::Closure& callback, const ErrorCallback& error_callback, const dbus::ObjectPath& adapter_path, bool success) { if (success) { DVLOG(1) << object_path_.value() << ": stopped discovery."; callback.Run(); // Leave found devices available for perusing. } else { // TODO(keybuk): in future, don't run the callback if the error was just // that we weren't discovering. error_callback.Run(); } } void BluetoothAdapter::DiscoveringChanged(bool discovering) { if (discovering == discovering_) return; discovering_ = discovering; FOR_EACH_OBSERVER(BluetoothAdapter::Observer, observers_, AdapterDiscoveringChanged(this, discovering_)); } void BluetoothAdapter::OnReadLocalData( const BluetoothOutOfBandPairingDataCallback& callback, const ErrorCallback& error_callback, const BluetoothOutOfBandPairingData& data, bool success) { if (success) callback.Run(data); else error_callback.Run(); } void BluetoothAdapter::AdapterPropertyChanged( const dbus::ObjectPath& adapter_path, const std::string& property_name) { if (adapter_path != object_path_) return; BluetoothAdapterClient::Properties* properties = DBusThreadManager::Get()->GetBluetoothAdapterClient()-> GetProperties(object_path_); if (property_name == properties->powered.name()) { PoweredChanged(properties->powered.value()); } else if (property_name == properties->discovering.name()) { DiscoveringChanged(properties->discovering.value()); } else if (property_name == properties->devices.name()) { DevicesChanged(properties->devices.value()); } } void BluetoothAdapter::DevicePropertyChanged( const dbus::ObjectPath& device_path, const std::string& property_name) { UpdateDevice(device_path); } void BluetoothAdapter::UpdateDevice(const dbus::ObjectPath& device_path) { BluetoothDeviceClient::Properties* properties = DBusThreadManager::Get()->GetBluetoothDeviceClient()-> GetProperties(device_path); // When we first see a device, we may not know the address yet and need to // wait for the DevicePropertyChanged signal before adding the device. const std::string address = properties->address.value(); if (address.empty()) return; // The device may be already known to us, either because this is an update // to properties, or the device going from discovered to connected and // pairing gaining an object path in the process. In any case, we want // to update the existing object, not create a new one. DevicesMap::iterator iter = devices_.find(address); BluetoothDevice* device; const bool update_device = (iter != devices_.end()); if (update_device) { device = iter->second; } else { device = BluetoothDevice::Create(this); devices_[address] = device; } const bool was_paired = device->IsPaired(); if (!was_paired) { DVLOG(1) << "Assigned object path " << device_path.value() << " to device " << address; device->SetObjectPath(device_path); } device->Update(properties, true); // Don't send a duplicate added event for supported devices that were // previously visible or for already paired devices, send a changed // event instead. We always send one event or the other since we always<|fim▁hole|> DeviceChanged(this, device)); } else { FOR_EACH_OBSERVER(BluetoothAdapter::Observer, observers_, DeviceAdded(this, device)); } } void BluetoothAdapter::ClearDevices() { DevicesMap replace; devices_.swap(replace); for (DevicesMap::iterator iter = replace.begin(); iter != replace.end(); ++iter) { BluetoothDevice* device = iter->second; if (device->IsSupported() || device->IsPaired()) FOR_EACH_OBSERVER(BluetoothAdapter::Observer, observers_, DeviceRemoved(this, device)); delete device; } } void BluetoothAdapter::DeviceCreated(const dbus::ObjectPath& adapter_path, const dbus::ObjectPath& device_path) { if (adapter_path != object_path_) return; UpdateDevice(device_path); } void BluetoothAdapter::DeviceRemoved(const dbus::ObjectPath& adapter_path, const dbus::ObjectPath& device_path) { if (adapter_path != object_path_) return; DevicesMap::iterator iter = devices_.begin(); while (iter != devices_.end()) { BluetoothDevice* device = iter->second; DevicesMap::iterator temp = iter; ++iter; if (device->object_path_ != device_path) continue; // DeviceRemoved can also be called to indicate a device that is visible // during discovery has disconnected, but it is still visible to the // adapter, so don't remove in that case and only clear the object path. if (!device->IsVisible()) { FOR_EACH_OBSERVER(BluetoothAdapter::Observer, observers_, DeviceRemoved(this, device)); DVLOG(1) << "Removed device " << device->address(); delete device; devices_.erase(temp); } else { DVLOG(1) << "Removed object path from device " << device->address(); device->RemoveObjectPath(); // If the device is not supported then we want to act as if it was // removed, even though it is still visible to the adapter. if (!device->IsSupported()) { FOR_EACH_OBSERVER(BluetoothAdapter::Observer, observers_, DeviceRemoved(this, device)); } else { FOR_EACH_OBSERVER(BluetoothAdapter::Observer, observers_, DeviceChanged(this, device)); } } } } void BluetoothAdapter::DevicesChanged( const std::vector<dbus::ObjectPath>& devices) { for (std::vector<dbus::ObjectPath>::const_iterator iter = devices.begin(); iter != devices.end(); ++iter) UpdateDevice(*iter); } void BluetoothAdapter::ClearDiscoveredDevices() { DevicesMap::iterator iter = devices_.begin(); while (iter != devices_.end()) { BluetoothDevice* device = iter->second; DevicesMap::iterator temp = iter; ++iter; if (!device->IsPaired()) { if (device->IsSupported()) FOR_EACH_OBSERVER(BluetoothAdapter::Observer, observers_, DeviceRemoved(this, device)); delete device; devices_.erase(temp); } } } void BluetoothAdapter::DeviceFound( const dbus::ObjectPath& adapter_path, const std::string& address, const BluetoothDeviceClient::Properties& properties) { if (adapter_path != object_path_) return; // DeviceFound can also be called to indicate that a device we've // paired with is now visible to the adapter during discovery, in which // case we want to update the existing object, not create a new one. BluetoothDevice* device; DevicesMap::iterator iter = devices_.find(address); const bool update_device = (iter != devices_.end()); if (update_device) { device = iter->second; } else { device = BluetoothDevice::Create(this); devices_[address] = device; } DVLOG(1) << "Device " << address << " is visible to the adapter"; device->SetVisible(true); device->Update(&properties, false); // Don't send a duplicated added event for duplicate signals for supported // devices that were previously visible (should never happen) or for already // paired devices, send a changed event instead. We do not inform observers // if we find or update an unconnected and unsupported device. if (update_device && (device->IsSupported() || device->IsPaired())) { FOR_EACH_OBSERVER(BluetoothAdapter::Observer, observers_, DeviceChanged(this, device)); } else if (device->IsSupported()) { FOR_EACH_OBSERVER(BluetoothAdapter::Observer, observers_, DeviceAdded(this, device)); } } void BluetoothAdapter::DeviceDisappeared(const dbus::ObjectPath& adapter_path, const std::string& address) { if (adapter_path != object_path_) return; DevicesMap::iterator iter = devices_.find(address); if (iter == devices_.end()) return; BluetoothDevice* device = iter->second; // DeviceDisappeared can also be called to indicate that a device we've // paired with is no longer visible to the adapter, so don't remove // in that case and only clear the visible flag. if (!device->IsPaired()) { if (device->IsSupported()) FOR_EACH_OBSERVER(BluetoothAdapter::Observer, observers_, DeviceRemoved(this, device)); DVLOG(1) << "Discovered device " << device->address() << " is no longer visible to the adapter"; delete device; devices_.erase(iter); } else { DVLOG(1) << "Paired device " << device->address() << " is no longer visible to the adapter"; device->SetVisible(false); FOR_EACH_OBSERVER(BluetoothAdapter::Observer, observers_, DeviceChanged(this, device)); } } // static scoped_refptr<BluetoothAdapter> BluetoothAdapter::DefaultAdapter() { if (!default_adapter.Get().get()) { BluetoothAdapter* new_adapter = new BluetoothAdapter; default_adapter.Get() = new_adapter->weak_ptr_factory_.GetWeakPtr(); default_adapter.Get()->TrackDefaultAdapter(); } return scoped_refptr<BluetoothAdapter>(default_adapter.Get()); } // static BluetoothAdapter* BluetoothAdapter::Create(const std::string& address) { BluetoothAdapter* adapter = new BluetoothAdapter; adapter->FindAdapter(address); return adapter; } } // namespace chromeos<|fim▁end|>
// inform observers about paired devices whether or not they're supported. if (update_device && (device->IsSupported() || was_paired)) { FOR_EACH_OBSERVER(BluetoothAdapter::Observer, observers_,
<|file_name|>wallpaper_private_api.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/chromeos/extensions/wallpaper_private_api.h" #include <map> #include <set> #include <string> #include <vector> #include "ash/desktop_background/desktop_background_controller.h" #include "ash/shell.h" #include "ash/wm/mru_window_tracker.h" #include "ash/wm/window_state.h" #include "ash/wm/window_util.h" #include "base/command_line.h" #include "base/files/file_enumerator.h" #include "base/files/file_util.h" #include "base/memory/scoped_ptr.h" #include "base/path_service.h" #include "base/prefs/pref_service.h" #include "base/stl_util.h" #include "base/strings/string_number_conversions.h" #include "base/strings/stringprintf.h" #include "base/threading/worker_pool.h" #include "chrome/browser/browser_process.h" #include "chrome/browser/chromeos/login/users/wallpaper/wallpaper_manager.h" #include "chrome/browser/chromeos/profiles/profile_helper.h" #include "chrome/browser/profiles/profile.h" #include "chrome/browser/sync/profile_sync_service_factory.h" #include "chrome/common/chrome_paths.h" #include "chrome/common/pref_names.h" #include "chrome/grit/generated_resources.h" #include "chromeos/chromeos_switches.h" #include "components/browser_sync/browser/profile_sync_service.h" #include "components/user_manager/user.h" #include "components/user_manager/user_manager.h" #include "components/wallpaper/wallpaper_layout.h" #include "content/public/browser/browser_thread.h" #include "extensions/browser/event_router.h" #include "grit/components_strings.h" #include "ui/base/l10n/l10n_util.h" #include "ui/base/webui/web_ui_util.h" #include "ui/strings/grit/app_locale_settings.h" #include "url/gurl.h" using base::BinaryValue; using content::BrowserThread; namespace wallpaper_base = extensions::api::wallpaper; namespace wallpaper_private = extensions::api::wallpaper_private; namespace set_wallpaper_if_exists = wallpaper_private::SetWallpaperIfExists; namespace set_wallpaper = wallpaper_private::SetWallpaper; namespace set_custom_wallpaper = wallpaper_private::SetCustomWallpaper; namespace set_custom_wallpaper_layout = wallpaper_private::SetCustomWallpaperLayout; namespace get_thumbnail = wallpaper_private::GetThumbnail; namespace save_thumbnail = wallpaper_private::SaveThumbnail; namespace get_offline_wallpaper_list = wallpaper_private::GetOfflineWallpaperList; namespace { #if defined(GOOGLE_CHROME_BUILD) const char kWallpaperManifestBaseURL[] = "https://storage.googleapis.com/chromeos-wallpaper-public/manifest_"; #endif bool IsOEMDefaultWallpaper() { return base::CommandLine::ForCurrentProcess()->HasSwitch( chromeos::switches::kDefaultWallpaperIsOem); } // Saves |data| as |file_name| to directory with |key|. Return false if the // directory can not be found/created or failed to write file. bool SaveData(int key, const std::string& file_name, const std::vector<char>& data) { base::FilePath data_dir; CHECK(PathService::Get(key, &data_dir)); if (!base::DirectoryExists(data_dir) && !base::CreateDirectory(data_dir)) { return false; } base::FilePath file_path = data_dir.Append(file_name); return base::PathExists(file_path) || base::WriteFile(file_path, vector_as_array(&data), data.size()) != -1; } // Gets |file_name| from directory with |key|. Return false if the directory can // not be found or failed to read file to string |data|. Note if the |file_name| // can not be found in the directory, return true with empty |data|. It is // expected that we may try to access file which did not saved yet. bool GetData(const base::FilePath& path, std::string* data) { base::FilePath data_dir = path.DirName(); if (!base::DirectoryExists(data_dir) && !base::CreateDirectory(data_dir)) return false; return !base::PathExists(path) || base::ReadFileToString(path, data); } // Gets the |User| for a given |BrowserContext|. The function will only return // valid objects. const user_manager::User* GetUserFromBrowserContext( content::BrowserContext* context) { Profile* profile = Profile::FromBrowserContext(context); DCHECK(profile); const user_manager::User* user = chromeos::ProfileHelper::Get()->GetUserByProfile(profile); DCHECK(user); return user; } // WindowStateManager remembers which windows have been minimized in order to // restore them when the wallpaper viewer is hidden. class WindowStateManager : public aura::WindowObserver { public: typedef std::map<std::string, std::set<aura::Window*> > UserIDHashWindowListMap; // Minimizes all windows except the active window. static void MinimizeInactiveWindows(const std::string& user_id_hash); // Unminimizes all minimized windows restoring them to their previous state. // This should only be called after calling MinimizeInactiveWindows. static void RestoreWindows(const std::string& user_id_hash); private: WindowStateManager(); ~WindowStateManager() override; // Store all unminimized windows except |active_window| and minimize them. // All the windows are saved in a map and the key value is |user_id_hash|. void BuildWindowListAndMinimizeInactiveForUser( const std::string& user_id_hash, aura::Window* active_window); // Unminimize all the stored windows for |user_id_hash|. void RestoreMinimizedWindows(const std::string& user_id_hash); // Remove the observer from |window| if |window| is no longer referenced in // user_id_hash_window_list_map_. void RemoveObserverIfUnreferenced(aura::Window* window); // aura::WindowObserver overrides. void OnWindowDestroyed(aura::Window* window) override; // aura::WindowObserver overrides. void OnWindowStackingChanged(aura::Window* window) override; // Map of user id hash and associated list of minimized windows. UserIDHashWindowListMap user_id_hash_window_list_map_; DISALLOW_COPY_AND_ASSIGN(WindowStateManager); }; // static WindowStateManager* g_window_state_manager = NULL; // static void WindowStateManager::MinimizeInactiveWindows( const std::string& user_id_hash) { if (!g_window_state_manager) g_window_state_manager = new WindowStateManager(); g_window_state_manager->BuildWindowListAndMinimizeInactiveForUser( user_id_hash, ash::wm::GetActiveWindow()); } // static void WindowStateManager::RestoreWindows(const std::string& user_id_hash) { if (!g_window_state_manager) { DCHECK(false) << "This should only be called after calling " << "MinimizeInactiveWindows."; return; } g_window_state_manager->RestoreMinimizedWindows(user_id_hash); if (g_window_state_manager->user_id_hash_window_list_map_.empty()) { delete g_window_state_manager; g_window_state_manager = NULL; } } WindowStateManager::WindowStateManager() {} WindowStateManager::~WindowStateManager() {} void WindowStateManager::BuildWindowListAndMinimizeInactiveForUser( const std::string& user_id_hash, aura::Window* active_window) { if (user_id_hash_window_list_map_.find(user_id_hash) == user_id_hash_window_list_map_.end()) { user_id_hash_window_list_map_[user_id_hash] = std::set<aura::Window*>(); } std::set<aura::Window*>* results = &user_id_hash_window_list_map_[user_id_hash]; std::vector<aura::Window*> windows = ash::Shell::GetInstance()-> mru_window_tracker()->BuildWindowListIgnoreModal(); for (std::vector<aura::Window*>::iterator iter = windows.begin(); iter != windows.end(); ++iter) { // Ignore active window and minimized windows. if (*iter == active_window || ash::wm::GetWindowState(*iter)->IsMinimized()) continue; if (!(*iter)->HasObserver(this)) (*iter)->AddObserver(this); results->insert(*iter); ash::wm::GetWindowState(*iter)->Minimize(); } } void WindowStateManager::RestoreMinimizedWindows( const std::string& user_id_hash) { UserIDHashWindowListMap::iterator it = user_id_hash_window_list_map_.find(user_id_hash); if (it == user_id_hash_window_list_map_.end()) { DCHECK(false) << "This should only be called after calling " << "MinimizeInactiveWindows."; return; } std::set<aura::Window*> removed_windows; removed_windows.swap(it->second); user_id_hash_window_list_map_.erase(it); for (std::set<aura::Window*>::iterator iter = removed_windows.begin(); iter != removed_windows.end(); ++iter) { ash::wm::GetWindowState(*iter)->Unminimize(); RemoveObserverIfUnreferenced(*iter); } } void WindowStateManager::RemoveObserverIfUnreferenced(aura::Window* window) { for (UserIDHashWindowListMap::iterator iter = user_id_hash_window_list_map_.begin(); iter != user_id_hash_window_list_map_.end(); ++iter) { if (iter->second.find(window) != iter->second.end()) return; } // Remove observer if |window| is not observed by any users. window->RemoveObserver(this); } void WindowStateManager::OnWindowDestroyed(aura::Window* window) { for (UserIDHashWindowListMap::iterator iter = user_id_hash_window_list_map_.begin(); iter != user_id_hash_window_list_map_.end(); ++iter) { iter->second.erase(window); } } void WindowStateManager::OnWindowStackingChanged(aura::Window* window) { // If user interacted with the |window| while wallpaper picker is opening, // removes the |window| from observed list. for (auto iter = user_id_hash_window_list_map_.begin(); iter != user_id_hash_window_list_map_.end(); ++iter) { iter->second.erase(window); } window->RemoveObserver(this); } } // namespace bool WallpaperPrivateGetStringsFunction::RunSync() { base::DictionaryValue* dict = new base::DictionaryValue(); SetResult(dict); #define SET_STRING(id, idr) \ dict->SetString(id, l10n_util::GetStringUTF16(idr)) SET_STRING("webFontFamily", IDS_WEB_FONT_FAMILY); SET_STRING("webFontSize", IDS_WEB_FONT_SIZE); SET_STRING("allCategoryLabel", IDS_WALLPAPER_MANAGER_ALL_CATEGORY_LABEL); SET_STRING("deleteCommandLabel", IDS_WALLPAPER_MANAGER_DELETE_COMMAND_LABEL); SET_STRING("customCategoryLabel", IDS_WALLPAPER_MANAGER_CUSTOM_CATEGORY_LABEL); SET_STRING("selectCustomLabel", IDS_WALLPAPER_MANAGER_SELECT_CUSTOM_LABEL); SET_STRING("positionLabel", IDS_WALLPAPER_MANAGER_POSITION_LABEL); SET_STRING("colorLabel", IDS_WALLPAPER_MANAGER_COLOR_LABEL); SET_STRING("centerCroppedLayout", IDS_OPTIONS_WALLPAPER_CENTER_CROPPED_LAYOUT); SET_STRING("centerLayout", IDS_OPTIONS_WALLPAPER_CENTER_LAYOUT); SET_STRING("stretchLayout", IDS_OPTIONS_WALLPAPER_STRETCH_LAYOUT); SET_STRING("connectionFailed", IDS_WALLPAPER_MANAGER_ACCESS_FAIL); SET_STRING("downloadFailed", IDS_WALLPAPER_MANAGER_DOWNLOAD_FAIL); SET_STRING("downloadCanceled", IDS_WALLPAPER_MANAGER_DOWNLOAD_CANCEL); SET_STRING("customWallpaperWarning", IDS_WALLPAPER_MANAGER_SHOW_CUSTOM_WALLPAPER_ON_START_WARNING); SET_STRING("accessFileFailure", IDS_WALLPAPER_MANAGER_ACCESS_FILE_FAILURE); SET_STRING("invalidWallpaper", IDS_WALLPAPER_MANAGER_INVALID_WALLPAPER); SET_STRING("surpriseMeLabel", IDS_WALLPAPER_MANAGER_SURPRISE_ME_LABEL); SET_STRING("learnMore", IDS_LEARN_MORE); SET_STRING("currentWallpaperSetByMessage", IDS_CURRENT_WALLPAPER_SET_BY_MESSAGE); #undef SET_STRING const std::string& app_locale = g_browser_process->GetApplicationLocale(); webui::SetLoadTimeDataDefaults(app_locale, dict); chromeos::WallpaperManager* wallpaper_manager = chromeos::WallpaperManager::Get(); wallpaper::WallpaperInfo info; if (wallpaper_manager->GetLoggedInUserWallpaperInfo(&info)) dict->SetString("currentWallpaper", info.location); #if defined(GOOGLE_CHROME_BUILD) dict->SetString("manifestBaseURL", kWallpaperManifestBaseURL); #endif Profile* profile = Profile::FromBrowserContext(browser_context()); std::string app_name( profile->GetPrefs()->GetString(prefs::kCurrentWallpaperAppName)); if (!app_name.empty()) dict->SetString("wallpaperAppName", app_name); dict->SetBoolean("isOEMDefaultWallpaper", IsOEMDefaultWallpaper()); dict->SetString("canceledWallpaper", wallpaper_api_util::kCancelWallpaperMessage); return true; } bool WallpaperPrivateGetSyncSettingFunction::RunSync() { Profile* profile = Profile::FromBrowserContext(browser_context()); ProfileSyncService* sync = ProfileSyncServiceFactory::GetInstance()->GetForProfile(profile); base::DictionaryValue* dict = new base::DictionaryValue(); SetResult(dict); dict->SetBoolean("syncThemes", sync->GetActiveDataTypes().Has(syncer::THEMES)); return true; } WallpaperPrivateSetWallpaperIfExistsFunction:: WallpaperPrivateSetWallpaperIfExistsFunction() {} WallpaperPrivateSetWallpaperIfExistsFunction:: ~WallpaperPrivateSetWallpaperIfExistsFunction() {} bool WallpaperPrivateSetWallpaperIfExistsFunction::RunAsync() { params = set_wallpaper_if_exists::Params::Create(*args_); EXTENSION_FUNCTION_VALIDATE(params); // Gets email address from caller, ensuring multiprofile compatibility. const user_manager::User* user = GetUserFromBrowserContext(browser_context()); user_id_ = user->email(); base::FilePath wallpaper_path; base::FilePath fallback_path; chromeos::WallpaperManager::WallpaperResolution resolution = chromeos::WallpaperManager::Get()->GetAppropriateResolution(); std::string file_name = GURL(params->url).ExtractFileName(); CHECK(PathService::Get(chrome::DIR_CHROMEOS_WALLPAPERS, &wallpaper_path)); fallback_path = wallpaper_path.Append(file_name); if (params->layout != wallpaper_base::WALLPAPER_LAYOUT_STRETCH && resolution == chromeos::WallpaperManager::WALLPAPER_RESOLUTION_SMALL) { file_name = base::FilePath(file_name) .InsertBeforeExtension(wallpaper::kSmallWallpaperSuffix) .value(); } wallpaper_path = wallpaper_path.Append(file_name); sequence_token_ = BrowserThread::GetBlockingPool()->GetNamedSequenceToken( wallpaper::kWallpaperSequenceTokenName); scoped_refptr<base::SequencedTaskRunner> task_runner = BrowserThread::GetBlockingPool()-> GetSequencedTaskRunnerWithShutdownBehavior(sequence_token_, base::SequencedWorkerPool::CONTINUE_ON_SHUTDOWN); task_runner->PostTask(FROM_HERE, base::Bind( &WallpaperPrivateSetWallpaperIfExistsFunction:: ReadFileAndInitiateStartDecode, this, wallpaper_path, fallback_path)); return true; } void WallpaperPrivateSetWallpaperIfExistsFunction:: ReadFileAndInitiateStartDecode(const base::FilePath& file_path, const base::FilePath& fallback_path) { DCHECK(BrowserThread::GetBlockingPool()->IsRunningSequenceOnCurrentThread( sequence_token_)); base::FilePath path = file_path; if (!base::PathExists(file_path)) path = fallback_path; std::string data; if (base::PathExists(path) && base::ReadFileToString(path, &data)) { BrowserThread::PostTask( BrowserThread::UI, FROM_HERE, base::Bind(&WallpaperPrivateSetWallpaperIfExistsFunction::StartDecode, this, std::vector<char>(data.begin(), data.end()))); return; } std::string error = base::StringPrintf( "Failed to set wallpaper %s from file system.", path.BaseName().value().c_str()); BrowserThread::PostTask( BrowserThread::UI, FROM_HERE, base::Bind(&WallpaperPrivateSetWallpaperIfExistsFunction::OnFileNotExists, this, error)); } void WallpaperPrivateSetWallpaperIfExistsFunction::OnWallpaperDecoded( const gfx::ImageSkia& image) { // Set unsafe_wallpaper_decoder_ to null since the decoding already finished. unsafe_wallpaper_decoder_ = NULL; chromeos::WallpaperManager* wallpaper_manager = chromeos::WallpaperManager::Get(); wallpaper::WallpaperLayout layout = wallpaper_api_util::GetLayoutEnum( wallpaper_base::ToString(params->layout)); bool update_wallpaper = user_id_ == user_manager::UserManager::Get()->GetActiveUser()->email(); wallpaper_manager->SetWallpaperFromImageSkia( user_id_, image, layout, update_wallpaper); bool is_persistent = !user_manager::UserManager::Get() ->IsCurrentUserNonCryptohomeDataEphemeral(); wallpaper::WallpaperInfo info = {params->url, layout, user_manager::User::ONLINE, base::Time::Now().LocalMidnight()}; wallpaper_manager->SetUserWallpaperInfo(user_id_, info, is_persistent); SetResult(new base::FundamentalValue(true)); Profile* profile = Profile::FromBrowserContext(browser_context()); // This API is only available to the component wallpaper picker. We do not // need to show the app's name if it is the component wallpaper picker. So set // the pref to empty string. profile->GetPrefs()->SetString(prefs::kCurrentWallpaperAppName, std::string()); SendResponse(true); } void WallpaperPrivateSetWallpaperIfExistsFunction::OnFileNotExists( const std::string& error) { SetResult(new base::FundamentalValue(false)); OnFailure(error); } WallpaperPrivateSetWallpaperFunction::WallpaperPrivateSetWallpaperFunction() { } WallpaperPrivateSetWallpaperFunction::~WallpaperPrivateSetWallpaperFunction() { } bool WallpaperPrivateSetWallpaperFunction::RunAsync() { params = set_wallpaper::Params::Create(*args_); EXTENSION_FUNCTION_VALIDATE(params); // Gets email address from caller, ensuring multiprofile compatibility. const user_manager::User* user = GetUserFromBrowserContext(browser_context()); user_id_ = user->email(); StartDecode(params->wallpaper); return true; } void WallpaperPrivateSetWallpaperFunction::OnWallpaperDecoded( const gfx::ImageSkia& image) { wallpaper_ = image; // Set unsafe_wallpaper_decoder_ to null since the decoding already finished. unsafe_wallpaper_decoder_ = NULL; sequence_token_ = BrowserThread::GetBlockingPool()->GetNamedSequenceToken( wallpaper::kWallpaperSequenceTokenName); scoped_refptr<base::SequencedTaskRunner> task_runner = BrowserThread::GetBlockingPool()-> GetSequencedTaskRunnerWithShutdownBehavior(sequence_token_, base::SequencedWorkerPool::BLOCK_SHUTDOWN); task_runner->PostTask(FROM_HERE, base::Bind(&WallpaperPrivateSetWallpaperFunction::SaveToFile, this)); } void WallpaperPrivateSetWallpaperFunction::SaveToFile() { DCHECK(BrowserThread::GetBlockingPool()->IsRunningSequenceOnCurrentThread( sequence_token_)); std::string file_name = GURL(params->url).ExtractFileName(); if (SaveData(chrome::DIR_CHROMEOS_WALLPAPERS, file_name, params->wallpaper)) { wallpaper_.EnsureRepsForSupportedScales(); scoped_ptr<gfx::ImageSkia> deep_copy(wallpaper_.DeepCopy()); // ImageSkia is not RefCountedThreadSafe. Use a deep copied ImageSkia if // post to another thread. BrowserThread::PostTask( BrowserThread::UI, FROM_HERE, base::Bind(&WallpaperPrivateSetWallpaperFunction::SetDecodedWallpaper, this, base::Passed(deep_copy.Pass()))); base::FilePath wallpaper_dir; CHECK(PathService::Get(chrome::DIR_CHROMEOS_WALLPAPERS, &wallpaper_dir)); base::FilePath file_path = wallpaper_dir.Append(file_name) .InsertBeforeExtension(wallpaper::kSmallWallpaperSuffix); if (base::PathExists(file_path)) return; // Generates and saves small resolution wallpaper. Uses CENTER_CROPPED to // maintain the aspect ratio after resize. chromeos::WallpaperManager::Get()->ResizeAndSaveWallpaper( wallpaper_, file_path, wallpaper::WALLPAPER_LAYOUT_CENTER_CROPPED, wallpaper::kSmallWallpaperMaxWidth, wallpaper::kSmallWallpaperMaxHeight, NULL); } else { std::string error = base::StringPrintf( "Failed to create/write wallpaper to %s.", file_name.c_str()); BrowserThread::PostTask( BrowserThread::UI, FROM_HERE, base::Bind(&WallpaperPrivateSetWallpaperFunction::OnFailure, this, error)); } } void WallpaperPrivateSetWallpaperFunction::SetDecodedWallpaper( scoped_ptr<gfx::ImageSkia> image) { chromeos::WallpaperManager* wallpaper_manager = chromeos::WallpaperManager::Get(); wallpaper::WallpaperLayout layout = wallpaper_api_util::GetLayoutEnum( wallpaper_base::ToString(params->layout)); bool update_wallpaper = user_id_ == user_manager::UserManager::Get()->GetActiveUser()->email(); wallpaper_manager->SetWallpaperFromImageSkia( user_id_, *image.get(), layout, update_wallpaper); bool is_persistent = !user_manager::UserManager::Get() ->IsCurrentUserNonCryptohomeDataEphemeral(); wallpaper::WallpaperInfo info = {params->url, layout, user_manager::User::ONLINE, base::Time::Now().LocalMidnight()}; Profile* profile = Profile::FromBrowserContext(browser_context()); // This API is only available to the component wallpaper picker. We do not // need to show the app's name if it is the component wallpaper picker. So set // the pref to empty string. profile->GetPrefs()->SetString(prefs::kCurrentWallpaperAppName, std::string()); wallpaper_manager->SetUserWallpaperInfo(user_id_, info, is_persistent); SendResponse(true); } WallpaperPrivateResetWallpaperFunction:: WallpaperPrivateResetWallpaperFunction() {} WallpaperPrivateResetWallpaperFunction:: ~WallpaperPrivateResetWallpaperFunction() {} bool WallpaperPrivateResetWallpaperFunction::RunAsync() { chromeos::WallpaperManager* wallpaper_manager = chromeos::WallpaperManager::Get(); user_manager::UserManager* user_manager = user_manager::UserManager::Get(); std::string user_id = user_manager->GetActiveUser()->email(); wallpaper_manager->RemoveUserWallpaperInfo(user_id); wallpaper::WallpaperInfo info = {std::string(), wallpaper::WALLPAPER_LAYOUT_CENTER, user_manager::User::DEFAULT, base::Time::Now().LocalMidnight()}; bool is_persistent = !user_manager->IsCurrentUserNonCryptohomeDataEphemeral(); wallpaper_manager->SetUserWallpaperInfo(user_id, info, is_persistent); wallpaper_manager->SetDefaultWallpaperNow(user_id); Profile* profile = Profile::FromBrowserContext(browser_context()); // This API is only available to the component wallpaper picker. We do not // need to show the app's name if it is the component wallpaper picker. So set // the pref to empty string. profile->GetPrefs()->SetString(prefs::kCurrentWallpaperAppName, std::string()); return true; } WallpaperPrivateSetCustomWallpaperFunction:: WallpaperPrivateSetCustomWallpaperFunction() {} WallpaperPrivateSetCustomWallpaperFunction:: ~WallpaperPrivateSetCustomWallpaperFunction() {} bool WallpaperPrivateSetCustomWallpaperFunction::RunAsync() { params = set_custom_wallpaper::Params::Create(*args_); EXTENSION_FUNCTION_VALIDATE(params); // Gets email address from caller, ensuring multiprofile compatibility. const user_manager::User* user = GetUserFromBrowserContext(browser_context()); user_id_ = user->email(); user_id_hash_ = user->username_hash(); StartDecode(params->wallpaper); return true; } void WallpaperPrivateSetCustomWallpaperFunction::OnWallpaperDecoded( const gfx::ImageSkia& image) { chromeos::WallpaperManager* wallpaper_manager = chromeos::WallpaperManager::Get(); base::FilePath thumbnail_path = wallpaper_manager->GetCustomWallpaperPath( wallpaper::kThumbnailWallpaperSubDir, user_id_hash_, params->file_name); sequence_token_ = BrowserThread::GetBlockingPool()->GetNamedSequenceToken( wallpaper::kWallpaperSequenceTokenName); scoped_refptr<base::SequencedTaskRunner> task_runner = BrowserThread::GetBlockingPool()-> GetSequencedTaskRunnerWithShutdownBehavior(sequence_token_, base::SequencedWorkerPool::BLOCK_SHUTDOWN); wallpaper::WallpaperLayout layout = wallpaper_api_util::GetLayoutEnum( wallpaper_base::ToString(params->layout)); wallpaper_api_util::RecordCustomWallpaperLayout(layout); bool update_wallpaper = user_id_ == user_manager::UserManager::Get()->GetActiveUser()->email(); wallpaper_manager->SetCustomWallpaper(user_id_, user_id_hash_, params->file_name, layout, user_manager::User::CUSTOMIZED, image, update_wallpaper); unsafe_wallpaper_decoder_ = NULL; Profile* profile = Profile::FromBrowserContext(browser_context()); // This API is only available to the component wallpaper picker. We do not // need to show the app's name if it is the component wallpaper picker. So set // the pref to empty string. profile->GetPrefs()->SetString(prefs::kCurrentWallpaperAppName, std::string()); if (params->generate_thumbnail) { image.EnsureRepsForSupportedScales(); scoped_ptr<gfx::ImageSkia> deep_copy(image.DeepCopy()); // Generates thumbnail before call api function callback. We can then // request thumbnail in the javascript callback. task_runner->PostTask(FROM_HERE, base::Bind( &WallpaperPrivateSetCustomWallpaperFunction::GenerateThumbnail, this, thumbnail_path, base::Passed(&deep_copy))); } else { SendResponse(true); } } void WallpaperPrivateSetCustomWallpaperFunction::GenerateThumbnail( const base::FilePath& thumbnail_path, scoped_ptr<gfx::ImageSkia> image) { DCHECK(BrowserThread::GetBlockingPool()->IsRunningSequenceOnCurrentThread( sequence_token_)); if (!base::PathExists(thumbnail_path.DirName())) base::CreateDirectory(thumbnail_path.DirName()); scoped_refptr<base::RefCountedBytes> data; chromeos::WallpaperManager::Get()->ResizeImage( *image, wallpaper::WALLPAPER_LAYOUT_STRETCH, wallpaper::kWallpaperThumbnailWidth, wallpaper::kWallpaperThumbnailHeight, &data, NULL); BrowserThread::PostTask( BrowserThread::UI, FROM_HERE, base::Bind( &WallpaperPrivateSetCustomWallpaperFunction::ThumbnailGenerated, this, data)); } void WallpaperPrivateSetCustomWallpaperFunction::ThumbnailGenerated( base::RefCountedBytes* data) { BinaryValue* result = BinaryValue::CreateWithCopiedBuffer( reinterpret_cast<const char*>(data->front()), data->size()); SetResult(result); SendResponse(true); } WallpaperPrivateSetCustomWallpaperLayoutFunction:: WallpaperPrivateSetCustomWallpaperLayoutFunction() {} WallpaperPrivateSetCustomWallpaperLayoutFunction:: ~WallpaperPrivateSetCustomWallpaperLayoutFunction() {} bool WallpaperPrivateSetCustomWallpaperLayoutFunction::RunAsync() { scoped_ptr<set_custom_wallpaper_layout::Params> params( set_custom_wallpaper_layout::Params::Create(*args_)); EXTENSION_FUNCTION_VALIDATE(params); chromeos::WallpaperManager* wallpaper_manager = chromeos::WallpaperManager::Get(); wallpaper::WallpaperInfo info; wallpaper_manager->GetLoggedInUserWallpaperInfo(&info); if (info.type != user_manager::User::CUSTOMIZED) { SetError("Only custom wallpaper can change layout."); SendResponse(false); return false; } info.layout = wallpaper_api_util::GetLayoutEnum( wallpaper_base::ToString(params->layout)); wallpaper_api_util::RecordCustomWallpaperLayout(info.layout); std::string email = user_manager::UserManager::Get()->GetActiveUser()->email(); bool is_persistent = !user_manager::UserManager::Get() ->IsCurrentUserNonCryptohomeDataEphemeral(); wallpaper_manager->SetUserWallpaperInfo(email, info, is_persistent); wallpaper_manager->UpdateWallpaper(false /* clear_cache */); SendResponse(true); // Gets email address while at UI thread. return true; } WallpaperPrivateMinimizeInactiveWindowsFunction:: WallpaperPrivateMinimizeInactiveWindowsFunction() { } WallpaperPrivateMinimizeInactiveWindowsFunction:: ~WallpaperPrivateMinimizeInactiveWindowsFunction() { } bool WallpaperPrivateMinimizeInactiveWindowsFunction::RunAsync() { WindowStateManager::MinimizeInactiveWindows( user_manager::UserManager::Get()->GetActiveUser()->username_hash()); return true; } WallpaperPrivateRestoreMinimizedWindowsFunction:: WallpaperPrivateRestoreMinimizedWindowsFunction() { } WallpaperPrivateRestoreMinimizedWindowsFunction:: ~WallpaperPrivateRestoreMinimizedWindowsFunction() { } bool WallpaperPrivateRestoreMinimizedWindowsFunction::RunAsync() { WindowStateManager::RestoreWindows( user_manager::UserManager::Get()->GetActiveUser()->username_hash()); return true; } WallpaperPrivateGetThumbnailFunction::WallpaperPrivateGetThumbnailFunction() { } WallpaperPrivateGetThumbnailFunction::~WallpaperPrivateGetThumbnailFunction() { } bool WallpaperPrivateGetThumbnailFunction::RunAsync() { scoped_ptr<get_thumbnail::Params> params( get_thumbnail::Params::Create(*args_)); EXTENSION_FUNCTION_VALIDATE(params); base::FilePath thumbnail_path; std::string email = user_manager::UserManager::Get()->GetActiveUser()->email(); if (params->source == wallpaper_private::WALLPAPER_SOURCE_ONLINE) {<|fim▁hole|> &thumbnail_path)); thumbnail_path = thumbnail_path.Append(file_name); } else { if (!IsOEMDefaultWallpaper()) { SetError("No OEM wallpaper."); SendResponse(false); return false; } // TODO(bshe): Small resolution wallpaper is used here as wallpaper // thumbnail. We should either resize it or include a wallpaper thumbnail in // addition to large and small wallpaper resolutions. thumbnail_path = base::CommandLine::ForCurrentProcess()->GetSwitchValuePath( chromeos::switches::kDefaultWallpaperSmall); } sequence_token_ = BrowserThread::GetBlockingPool()->GetNamedSequenceToken( wallpaper::kWallpaperSequenceTokenName); scoped_refptr<base::SequencedTaskRunner> task_runner = BrowserThread::GetBlockingPool()-> GetSequencedTaskRunnerWithShutdownBehavior(sequence_token_, base::SequencedWorkerPool::CONTINUE_ON_SHUTDOWN); task_runner->PostTask(FROM_HERE, base::Bind(&WallpaperPrivateGetThumbnailFunction::Get, this, thumbnail_path)); return true; } void WallpaperPrivateGetThumbnailFunction::Failure( const std::string& file_name) { SetError(base::StringPrintf("Failed to access wallpaper thumbnails for %s.", file_name.c_str())); SendResponse(false); } void WallpaperPrivateGetThumbnailFunction::FileNotLoaded() { SendResponse(true); } void WallpaperPrivateGetThumbnailFunction::FileLoaded( const std::string& data) { BinaryValue* thumbnail = BinaryValue::CreateWithCopiedBuffer(data.c_str(), data.size()); SetResult(thumbnail); SendResponse(true); } void WallpaperPrivateGetThumbnailFunction::Get(const base::FilePath& path) { DCHECK(BrowserThread::GetBlockingPool()->IsRunningSequenceOnCurrentThread( sequence_token_)); std::string data; if (GetData(path, &data)) { if (data.empty()) { BrowserThread::PostTask( BrowserThread::UI, FROM_HERE, base::Bind(&WallpaperPrivateGetThumbnailFunction::FileNotLoaded, this)); } else { BrowserThread::PostTask( BrowserThread::UI, FROM_HERE, base::Bind(&WallpaperPrivateGetThumbnailFunction::FileLoaded, this, data)); } } else { BrowserThread::PostTask( BrowserThread::UI, FROM_HERE, base::Bind(&WallpaperPrivateGetThumbnailFunction::Failure, this, path.BaseName().value())); } } WallpaperPrivateSaveThumbnailFunction::WallpaperPrivateSaveThumbnailFunction() { } WallpaperPrivateSaveThumbnailFunction:: ~WallpaperPrivateSaveThumbnailFunction() {} bool WallpaperPrivateSaveThumbnailFunction::RunAsync() { scoped_ptr<save_thumbnail::Params> params( save_thumbnail::Params::Create(*args_)); EXTENSION_FUNCTION_VALIDATE(params); sequence_token_ = BrowserThread::GetBlockingPool()->GetNamedSequenceToken( wallpaper::kWallpaperSequenceTokenName); scoped_refptr<base::SequencedTaskRunner> task_runner = BrowserThread::GetBlockingPool()-> GetSequencedTaskRunnerWithShutdownBehavior(sequence_token_, base::SequencedWorkerPool::CONTINUE_ON_SHUTDOWN); task_runner->PostTask(FROM_HERE, base::Bind(&WallpaperPrivateSaveThumbnailFunction::Save, this, params->data, GURL(params->url).ExtractFileName())); return true; } void WallpaperPrivateSaveThumbnailFunction::Failure( const std::string& file_name) { SetError(base::StringPrintf("Failed to create/write thumbnail of %s.", file_name.c_str())); SendResponse(false); } void WallpaperPrivateSaveThumbnailFunction::Success() { SendResponse(true); } void WallpaperPrivateSaveThumbnailFunction::Save(const std::vector<char>& data, const std::string& file_name) { DCHECK(BrowserThread::GetBlockingPool()->IsRunningSequenceOnCurrentThread( sequence_token_)); if (SaveData(chrome::DIR_CHROMEOS_WALLPAPER_THUMBNAILS, file_name, data)) { BrowserThread::PostTask( BrowserThread::UI, FROM_HERE, base::Bind(&WallpaperPrivateSaveThumbnailFunction::Success, this)); } else { BrowserThread::PostTask( BrowserThread::UI, FROM_HERE, base::Bind(&WallpaperPrivateSaveThumbnailFunction::Failure, this, file_name)); } } WallpaperPrivateGetOfflineWallpaperListFunction:: WallpaperPrivateGetOfflineWallpaperListFunction() { } WallpaperPrivateGetOfflineWallpaperListFunction:: ~WallpaperPrivateGetOfflineWallpaperListFunction() { } bool WallpaperPrivateGetOfflineWallpaperListFunction::RunAsync() { sequence_token_ = BrowserThread::GetBlockingPool()->GetNamedSequenceToken( wallpaper::kWallpaperSequenceTokenName); scoped_refptr<base::SequencedTaskRunner> task_runner = BrowserThread::GetBlockingPool()-> GetSequencedTaskRunnerWithShutdownBehavior(sequence_token_, base::SequencedWorkerPool::CONTINUE_ON_SHUTDOWN); task_runner->PostTask(FROM_HERE, base::Bind(&WallpaperPrivateGetOfflineWallpaperListFunction::GetList, this)); return true; } void WallpaperPrivateGetOfflineWallpaperListFunction::GetList() { DCHECK(BrowserThread::GetBlockingPool()->IsRunningSequenceOnCurrentThread( sequence_token_)); std::vector<std::string> file_list; base::FilePath wallpaper_dir; CHECK(PathService::Get(chrome::DIR_CHROMEOS_WALLPAPERS, &wallpaper_dir)); if (base::DirectoryExists(wallpaper_dir)) { base::FileEnumerator files(wallpaper_dir, false, base::FileEnumerator::FILES); for (base::FilePath current = files.Next(); !current.empty(); current = files.Next()) { std::string file_name = current.BaseName().RemoveExtension().value(); // Do not add file name of small resolution wallpaper to the list. if (!base::EndsWith(file_name, wallpaper::kSmallWallpaperSuffix, base::CompareCase::SENSITIVE)) file_list.push_back(current.BaseName().value()); } } BrowserThread::PostTask( BrowserThread::UI, FROM_HERE, base::Bind(&WallpaperPrivateGetOfflineWallpaperListFunction::OnComplete, this, file_list)); } void WallpaperPrivateGetOfflineWallpaperListFunction::OnComplete( const std::vector<std::string>& file_list) { base::ListValue* results = new base::ListValue(); results->AppendStrings(file_list); SetResult(results); SendResponse(true); }<|fim▁end|>
std::string file_name = GURL(params->url_or_file).ExtractFileName(); CHECK(PathService::Get(chrome::DIR_CHROMEOS_WALLPAPER_THUMBNAILS,
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
#!/usr/bin/env python from TweetGrabber import *
<|file_name|>server.py<|end_file_name|><|fim▁begin|>import msgpackrpc import time class SumServer(object): def sum(self, x, y): return x + y def sleepy_sum(self, x, y): time.sleep(1) return x + y server = msgpackrpc.Server(SumServer()) server.listen(msgpackrpc.Address("localhost", 18800))<|fim▁hole|><|fim▁end|>
server.start()
<|file_name|>brd-restructure.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- import os import argparse import pprint import proteindf_bridge as bridge import logging import logging.config def get_rest_of_frame_molecule(frame_molecule, selected_molecule): # calc the rest selector = bridge.Select_AtomGroup(selected_molecule) selected = frame_molecule.select(selector) rest_molecule = frame_molecule ^ selected return rest_molecule def assign_rest_molecule(rest_molecule, output_atom_group, model_id="model_1", chain_id="Z", res_name="UNK"): chain = bridge.AtomGroup() res = bridge.AtomGroup() res.name = res_name atom_id = 1 for atom in rest_molecule.get_atom_list(): res.set_atom(atom_id, atom) atom_id += 1 chain.set_group(1, res)<|fim▁hole|> output_atom_group[model_id].set_group(chain_id, chain) def main(): parser = argparse.ArgumentParser( description='restructure brd file by reference file') parser.add_argument('target_brd_path', nargs=1, help='target brd file') parser.add_argument('ref_brd_path', nargs=1, help='reference brd file') parser.add_argument('-o', '--output_path', nargs=1, default=["output.brd"]) parser.add_argument('-r', '--range', nargs=1, default=[1.0E-5]) parser.add_argument('-v', '--verbose', action='store_true', default=False) args = parser.parse_args() # print(args) target_brd_path = args.target_brd_path[0] ref_brd_path = args.ref_brd_path[0] output_path = args.output_path[0] range = float(args.range[0]) verbose = args.verbose if verbose: print("target: {}".format(target_brd_path)) print("reference: {}".format(ref_brd_path)) # load target_ag = bridge.load_atomgroup(target_brd_path) ref_ag = bridge.load_atomgroup(ref_brd_path) # matching #target_selector = bridge.Select_AtomGroup(target_ag) #restructured = ref_ag.select(target_selector) # calc the rest #rest_of_target = get_rest_of_frame_molecule(target_ag, restructured) #assign_rest_molecule(rest_of_target, restructured) restructured = target_ag.restructure(ref_ag, range) if output_path: if verbose: print("output brd file: {}".format(output_path)) bridge.save_atomgroup(restructured, output_path) if __name__ == '__main__': #import cProfile #pr = cProfile.Profile() # pr.enable() main() # pr.disable() # pr.dump_stats('program.profile')<|fim▁end|>
<|file_name|>test_topi_qnn.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """Test code for QNN operators.""" import numpy as np import tvm from tvm import topi, relay, te from tvm.contrib import graph_executor import tvm.topi.testing def verify_simulated_quantize(data_shape, out_dtype, channels, axis): # Create placeholder variables for all qnn inputs. A = te.placeholder(data_shape, name="value", dtype="float32") D = te.placeholder([], name="dtype", dtype="int32") S = te.placeholder([te.size_var("scale_dim")], name="scale", dtype="float32") Z = te.placeholder([te.size_var("zp_dim")], name="zp", dtype="int32") SIM_Q = topi.nn.simulated_quantize(A, D, output_scale=S, output_zero_point=Z, axis=axis) # Create random numpy values to assign to inputs. a_np = np.random.uniform(size=data_shape).astype("float32") d_np = np.int32(topi.nn.SQNN_DTYPE_TO_CODE[out_dtype]) s_np = np.random.uniform(low=1e-4, high=0.1, size=channels).astype("float32") z_np = np.random.uniform(low=-10, high=10, size=channels).astype("int32") q_np = np.zeros(shape=data_shape, dtype="float32") def check_target(target, dev): # Wrap the numpy arrays in nd arrays. a = tvm.nd.array(a_np, dev) d = tvm.nd.array(d_np, dev)<|fim▁hole|> q = tvm.nd.array(q_np, dev) # Construct equivalent relay graph. per_channel = channels[0] != 1 a_var = relay.var("a", shape=data_shape, dtype="float32") if per_channel: s_var = relay.const(s_np) z_var = relay.const(z_np) else: s_var = relay.const(s_np[0]) z_var = relay.const(z_np[0]) real_q_op = relay.qnn.op.quantize(a_var, s_var, z_var, axis=axis, out_dtype=out_dtype) with tvm.transform.PassContext(opt_level=3): lib = relay.build(tvm.IRModule.from_expr(real_q_op), target=target) # Get real qnn quantize output. m = graph_executor.GraphModule(lib["default"](dev)) m.set_input("a", a_np) m.run() real_q_out = m.get_output(0) # Compile the simulated quantize function. with tvm.target.Target(target): sched = tvm.topi.testing.get_injective_schedule(target)(SIM_Q) func = tvm.build(sched, [A, D, S, Z, SIM_Q], target, name="sim_quantize") func(a, d, s, z, q) # Check correctness against the true qnn output. mismatch = q.numpy() != real_q_out.numpy().astype("float32") # Allow some rounding errors due to GPU fp32 arithmetic. assert np.sum(mismatch) <= 3 for target, dev in tvm.testing.enabled_targets(): check_target(target, dev) def test_simulated_quantize(): verify_simulated_quantize([1], "int8", [1], -1) verify_simulated_quantize([2, 5], "int8", [5], 1) verify_simulated_quantize([1, 32, 32, 32], "int8", [32], -1) verify_simulated_quantize([1, 32, 32, 32], "uint8", [32], -2) verify_simulated_quantize([2, 5], "int32", [5], 1) def verify_simulated_dequantize(data_shape, in_dtype, channels, axis): # Create placeholder variables for all qnn inputs. A = te.placeholder(data_shape, name="value", dtype="float32") D = te.placeholder([], name="dtype", dtype="int32") S = te.placeholder([te.size_var("scale_dim")], name="scale", dtype="float32") Z = te.placeholder([te.size_var("zp_dim")], name="zp", dtype="int32") SIM_DQ = topi.nn.simulated_dequantize(A, D, input_scale=S, input_zero_point=Z, axis=axis) # Create random numpy values to assign to inputs. a_np = np.random.uniform(low=-128, high=127, size=data_shape).astype(in_dtype) a_np_f = a_np.astype("float32") d_np = np.int32(topi.nn.SQNN_DTYPE_TO_CODE[in_dtype]) s_np = np.random.uniform(low=1e-4, high=0.1, size=channels).astype("float32") z_np = np.random.uniform(low=-10, high=10, size=channels).astype("int32") dq_np = np.zeros(shape=data_shape, dtype="float32") def check_target(target, dev): # Wrap the numpy arrays in nd arrays. a = tvm.nd.array(a_np_f, dev) d = tvm.nd.array(d_np, dev) s = tvm.nd.array(s_np, dev) z = tvm.nd.array(z_np, dev) dq = tvm.nd.array(dq_np, dev) # Construct equivalent relay graph. per_channel = channels[0] != 1 a_var = relay.var("a", shape=data_shape, dtype=in_dtype) if per_channel: s_var = relay.const(s_np) z_var = relay.const(z_np) else: s_var = relay.const(s_np[0]) z_var = relay.const(z_np[0]) real_dq_op = relay.qnn.op.dequantize(a_var, s_var, z_var, axis=axis) with tvm.transform.PassContext(opt_level=3): lib = relay.build(tvm.IRModule.from_expr(real_dq_op), target=target) # Get real qnn quantize output. m = graph_executor.GraphModule(lib["default"](dev)) m.set_input("a", a_np) m.run() real_dq_out = m.get_output(0) # Compile the simulated quantize function. with tvm.target.Target(target): sched = tvm.topi.testing.get_injective_schedule(target)(SIM_DQ) func = tvm.build(sched, [A, D, S, Z, SIM_DQ], target, name="sim_quantize") func(a, d, s, z, dq) # Check correctness against the true qnn output. tvm.testing.assert_allclose(dq.numpy(), real_dq_out.numpy().astype("float32"), rtol=1e-5) for target, dev in tvm.testing.enabled_targets(): check_target(target, dev) def test_simulated_dequantize(): verify_simulated_dequantize([1], "int8", [1], -1) verify_simulated_dequantize([2, 5], "int8", [5], 1) verify_simulated_dequantize([2, 5], "int8", [2], 0) verify_simulated_dequantize([1, 32, 32, 32], "int8", [32], -1) verify_simulated_dequantize([1, 32, 32, 32], "uint8", [32], -2) verify_simulated_dequantize([2, 5], "int32", [5], 1) if __name__ == "__main__": test_simulated_quantize() test_simulated_dequantize()<|fim▁end|>
s = tvm.nd.array(s_np, dev) z = tvm.nd.array(z_np, dev)
<|file_name|>analisi.py<|end_file_name|><|fim▁begin|>import numpy import math import pylab from scipy.optimize import curve_fit import math import scipy.stats import lab def fit_function(x, a, b): return b*(numpy.exp(x/a)-1) FileName='/home/federico/Documenti/Laboratorio2/Diodo/dati_arduino/dati.txt' N1, N2 = pylab.loadtxt(FileName, unpack="True") errN2 = numpy.array([1.0 for i in range(len(N2))]) errN1 = numpy.array([1.0 for i in range(len(N1))]) Rd = 3280.0 errRd = 30.0 eta = 4.89/1000 erreta = 0.02/1000 V1 = eta*N1 V2 = eta*N2 I = (V1-V2)/Rd #Inserire errori per i i V errV2 = (erreta/eta + errN2/N2)*V2 errV1 = (erreta/eta + errN1/N1)*V1 errI = (errRd/Rd)*I #for i in range(len(I)): # errI[i] = 50e-06 for i in range(len(I)): if(I[i]==0.0): I[i] = 1.0e-11*i for i in range(len(V2)): if(V2[i]==0.0): V2[i] = 1.0e-11*i #da finire vorrei implementare quella cosa che sostituisco le colonne di punti con un solo punto ma non ne ho voglia #number = 150 #minV = 0.30 #maxV = 0.70 #inc = (maxV - minV)/number #volt = numpy.array([(minV + i*inc) for i in range(number)]) #voltaggiVeri = numpy.array([]) #ampere = numpy.array([]) #errVolt = numpy.array([0.0 for i in range(number)]) #errAmpere = numpy.array([0.0 for i in range(number)]) #count = numpy.array([]) #for i in range(number): # for j in range(len(V2)): # if(volt[i]<=V2[j]<=volt[i+1]): # voltaggiVeri = numpy.append(voltaggiVeri, V2[ # errVolt[i] = errV2[j] # errAmpere[i] = errI[j] # ampere[i] += I[j] # count[i] += 1 #nonnulli = len(numpy.nonzero(count)) #aNonNulli = numpy.array([0.0 for i in range(nonnulli)]) #for i in range(nonnulli): # index = (numpy.nonzero(ampere))[i] # print(index) # aNonNulli[i] = ampere[index] #V2 = volt #I = ampere #errI = errAmpere #errV2 = errVolt print(V2, I, errV2, errI) pylab.title("Curva corrente tensione") pylab.xlabel("V (V)") pylab.ylabel("I (A)") pylab.grid(color = "gray") pylab.grid(color = "gray") pylab.errorbar(V2, I, errI, errV2, "o", color="black") initial = numpy.array([0.0515, 6.75e-09]) error = errI+errV2/100 #NON posso prendere errore squadrato perchè mescolerei le unità di misura<|fim▁hole|> div = 1000 bucket = numpy.array([0.0 for i in range(div)]) funzione = numpy.array([0.0 for i in range(div)]) inc = (V2.max()-V2.min())/div for i in range(len(bucket)): bucket[i]=float(i)*inc + V2.min() funzione[i] = fit_function(bucket[i], a, b) pylab.plot(bucket, funzione, color = "red") #calcolo il chi quadro chisq = (((I - fit_function(V2, a, b))/error)**2).sum() ndof = len(V2) - 2 p=1.0-scipy.stats.chi2.cdf(chisq, len(V2)-3) print("Carica Chisquare/ndof = %f/%d" % (chisq, ndof)) print("p = ", p) pylab.show() #number = 150 #minV = 0.30 #maxV = 0.70 #inc = (maxV -minV)/number #volt = numpy.array([(minV + i*inc) for i in range(number)]) #ampere = numpy.array([0.0 for i in range(number)]) #count = numpy.array([0 for i in range(number)]) #for i in range(number): # for j in range(len(V2)): # if(V2[j] == volt[i]): # ampere[j] += I[i] # count[j] += 1 #ampere = ampere/count #V2 = volt #I = ampere<|fim▁end|>
popt, pcov = curve_fit(fit_function, V2, I, initial, error) a, b = popt print(a, b) print(pcov)
<|file_name|>impact_function.py<|end_file_name|><|fim▁begin|># coding=utf-8 """ InaSAFE Disaster risk assessment tool by AusAid - **Generic Impact Function on Population for Continuous Hazard.** Contact : [email protected] .. note:: This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. .. todo:: Check raster is single band """ import numpy from safe.impact_functions.generic\ .continuous_hazard_population.metadata_definitions import \ ContinuousHazardPopulationMetadata from safe.impact_functions.bases.continuous_rh_continuous_re import \ ContinuousRHContinuousRE from safe.impact_functions.impact_function_manager import ImpactFunctionManager from safe.impact_functions.core import ( evacuated_population_needs, population_rounding, has_no_data) from safe.storage.raster import Raster from safe.utilities.i18n import tr from safe.common.utilities import format_int from safe.common.tables import Table, TableRow from safe.common.utilities import create_classes, create_label, humanize_class from safe.common.exceptions import ( FunctionParametersError, ZeroImpactException) from safe.gui.tools.minimum_needs.needs_profile import add_needs_parameters, \ filter_needs_parameters __author__ = 'lucernae' __date__ = '24/03/15' __revision__ = '$Format:%H$' __copyright__ = ('Copyright 2014, Australia Indonesia Facility for ' 'Disaster Reduction') class ContinuousHazardPopulationFunction(ContinuousRHContinuousRE): # noinspection PyUnresolvedReferences """Plugin for impact of population as derived by continuous hazard.""" _metadata = ContinuousHazardPopulationMetadata() def __init__(self): super(ContinuousHazardPopulationFunction, self).__init__() self.impact_function_manager = ImpactFunctionManager() # AG: Use the proper minimum needs, update the parameters self.parameters = add_needs_parameters(self.parameters) def _tabulate( self, high, low, medium, question, total_impact): # Generate impact report for the pdf map table_body = [ question, TableRow([tr('People impacted '), '%s' % format_int(total_impact)], header=True), TableRow([tr('People in high hazard area '), '%s' % format_int(high)], header=True), TableRow([tr('People in medium hazard area '), '%s' % format_int(medium)], header=True), TableRow([tr('People in low hazard area'), '%s' % format_int(low)], header=True)] return table_body<|fim▁hole|> def _tabulate_notes( self, minimum_needs, table_body, total, total_impact, no_data_warning): # Extend impact report for on-screen display table_body.extend([ TableRow(tr('Notes'), header=True), tr('Map shows population count in high, medium, and low hazard ' 'area.'), tr('Total population: %s') % format_int(total), TableRow(tr( 'Table below shows the minimum needs for all ' 'affected people'))]) if no_data_warning: table_body.extend([ tr('The layers contained `no data`. This missing data was ' 'carried through to the impact layer.'), tr('`No data` values in the impact layer were treated as 0 ' 'when counting the affected or total population.') ]) total_needs = evacuated_population_needs( total_impact, minimum_needs) for frequency, needs in total_needs.items(): table_body.append(TableRow( [ tr('Needs should be provided %s' % frequency), tr('Total') ], header=True)) for resource in needs: table_body.append(TableRow([ tr(resource['table name']), format_int(resource['amount'])])) return table_body, total_needs def run(self): """Plugin for impact of population as derived by continuous hazard. Hazard is reclassified into 3 classes based on the extrema provided as impact function parameters. Counts number of people exposed to each category of the hazard :returns: Map of population exposed to high category Table with number of people in each category """ self.validate() self.prepare() thresholds = [ p.value for p in self.parameters['Categorical thresholds'].value] # Thresholds must contain 3 thresholds if len(thresholds) != 3: raise FunctionParametersError( 'The thresholds must consist of 3 values.') # Thresholds must monotonically increasing monotonically_increasing_flag = all( x < y for x, y in zip(thresholds, thresholds[1:])) if not monotonically_increasing_flag: raise FunctionParametersError( 'Each threshold should be larger than the previous.') # The 3 categories low_t = thresholds[0] medium_t = thresholds[1] high_t = thresholds[2] # Extract data as numeric arrays hazard_data = self.hazard.layer.get_data(nan=True) # Category no_data_warning = False if has_no_data(hazard_data): no_data_warning = True # Calculate impact as population exposed to each category exposure_data = self.exposure.layer.get_data(nan=True, scaling=True) if has_no_data(exposure_data): no_data_warning = True # Make 3 data for each zone. Get the value of the exposure if the # exposure is in the hazard zone, else just assign 0 low_exposure = numpy.where(hazard_data < low_t, exposure_data, 0) medium_exposure = numpy.where( (hazard_data >= low_t) & (hazard_data < medium_t), exposure_data, 0) high_exposure = numpy.where( (hazard_data >= medium_t) & (hazard_data <= high_t), exposure_data, 0) impacted_exposure = low_exposure + medium_exposure + high_exposure # Count totals total = int(numpy.nansum(exposure_data)) low_total = int(numpy.nansum(low_exposure)) medium_total = int(numpy.nansum(medium_exposure)) high_total = int(numpy.nansum(high_exposure)) total_impact = high_total + medium_total + low_total # Check for zero impact if total_impact == 0: table_body = [ self.question, TableRow( [tr('People impacted'), '%s' % format_int(total_impact)], header=True)] message = Table(table_body).toNewlineFreeString() raise ZeroImpactException(message) # Don't show digits less than a 1000 total = population_rounding(total) total_impact = population_rounding(total_impact) low_total = population_rounding(low_total) medium_total = population_rounding(medium_total) high_total = population_rounding(high_total) minimum_needs = [ parameter.serialize() for parameter in filter_needs_parameters(self.parameters['minimum needs']) ] table_body = self._tabulate( high_total, low_total, medium_total, self.question, total_impact) impact_table = Table(table_body).toNewlineFreeString() table_body, total_needs = self._tabulate_notes( minimum_needs, table_body, total, total_impact, no_data_warning) impact_summary = Table(table_body).toNewlineFreeString() map_title = tr('People in each hazard areas (low, medium, high)') # Style for impact layer colours = [ '#FFFFFF', '#38A800', '#79C900', '#CEED00', '#FFCC00', '#FF6600', '#FF0000', '#7A0000'] classes = create_classes(impacted_exposure.flat[:], len(colours)) interval_classes = humanize_class(classes) style_classes = [] for i in xrange(len(colours)): style_class = dict() if i == 1: label = create_label( interval_classes[i], tr('Low Population [%i people/cell]' % classes[i])) elif i == 4: label = create_label( interval_classes[i], tr('Medium Population [%i people/cell]' % classes[i])) elif i == 7: label = create_label( interval_classes[i], tr('High Population [%i people/cell]' % classes[i])) else: label = create_label(interval_classes[i]) style_class['label'] = label style_class['quantity'] = classes[i] if i == 0: transparency = 100 else: transparency = 0 style_class['transparency'] = transparency style_class['colour'] = colours[i] style_classes.append(style_class) style_info = dict( target_field=None, style_classes=style_classes, style_type='rasterStyle') # Create raster object and return raster_layer = Raster( data=impacted_exposure, projection=self.hazard.layer.get_projection(), geotransform=self.hazard.layer.get_geotransform(), name=tr('Population might %s') % ( self.impact_function_manager. get_function_title(self).lower()), keywords={ 'impact_summary': impact_summary, 'impact_table': impact_table, 'map_title': map_title, 'total_needs': total_needs}, style_info=style_info) self._impact = raster_layer return raster_layer<|fim▁end|>
<|file_name|>map.rs<|end_file_name|><|fim▁begin|>use std::cmp; use rand::{self, Rng}; use tcod::bsp::{Bsp, TraverseOrder}; use consts; use object::{self, actor, Object, ObjectClass}; use object::load::ObjectRandomizer; use object::item::Function; use ai::Ai; pub const MAP_WIDTH: i32 = 80; pub const MAP_HEIGHT: i32 = 43; pub const FLOOR_WIDTH: i32 = 30; pub const FLOOR_HEIGHT: i32 = 30; pub const ROOM_MAX_SIZE: i32 = 10; pub const ROOM_MIN_X: i32 = 8; pub const ROOM_MIN_Y: i32 = 8; pub const MAX_ROOMS: i32 = 30; pub const MAX_ROOM_MONSTERS: i32 = 3; pub const MAX_ROOM_ITEMS:i32 = 4; #[derive(Debug, RustcEncodable, RustcDecodable)] pub struct Tile { pub floor: Object, pub explored: bool, pub items: Vec<Object>, } impl Tile { pub fn new(floor: &ObjectClass) -> Self { Tile{ floor: floor.create_object(), explored: false, items: vec![],} } } pub fn is_blocked(x: i32, y: i32, map: &Map, actors: &[Object]) -> object::Blocks { // Because actors are stored in a separate place from the map, we need // to check both for actors marked as being in a place on the map, // as well as all objects in the map location to see if they block // If only one thing blocks fully we know nothing new can move // onto that tile, so we are done. If something only partially blocks, we // have to keep checking in case there is something fully blocking. let mut blocks = object::Blocks::No; for actor in actors { if actor.x == x && actor.y == y { blocks = cmp::max(blocks, actor.blocks); if blocks == object::Blocks::Full { return blocks } } } for item in &map[x as usize][y as usize].items { blocks = cmp::max(blocks, item.blocks); if blocks == object::Blocks::Full { return blocks } } blocks } pub fn blocks_view(x: i32, y: i32, map: &Map, actors: &[Object]) -> object::Blocks { // Because actors are stored in a separate place from the map, we need // to check both for actors marked as being in a place on the map, // as well as all actors in the map location to see if they block // If only one thing blocks fully we know nothing can see through that // tile, so we are done. If something only partially blocks, we // have to keep checking in case there is something fully blocking. let mut blocks = object::Blocks::No; for actor in actors { if actor.x == x && actor.y == y { blocks = cmp::max(blocks, actor.blocks_view); if blocks == object::Blocks::Full { return blocks } } } for item in &map[x as usize][y as usize].items { blocks = cmp::max(blocks, item.blocks_view); if blocks == object::Blocks::Full { return blocks } } blocks } pub type Map = Vec<Vec<Tile>>; #[derive(Clone, Copy, Debug)] struct Rect { x1: i32, y1: i32, x2: i32, y2: i32, } impl Rect { pub fn new(x: i32, y: i32, w: i32, h: i32) -> Self { Rect { x1: x, y1: y, x2: x + w, y2: y + h } } pub fn center(&self) -> (i32, i32) { let center_x = (self.x1 + self.x2) / 2; let center_y = (self.y1 + self.y2) / 2; (center_x, center_y) } pub fn intersects_with(&self, other: &Rect) -> bool { (self.x1 <= other.x2) && (self.x2 >= other.x1) && (self.y1 <= other.y2) && (self.y2 >= other.y1) } } fn create_room(room: &mut Bsp, floor_class: &ObjectClass, map: &mut Map) { for x in (room.x)..room.x + room.w { for y in (room.y)..room.y + room.h { map[x as usize][y as usize] = Tile::new(floor_class); } } } fn place_objects(floor: usize, rooms: &Vec<Rect>, map: &mut Map, items: &object::load::ObjectTypes) { if floor == 1 { let mut stairs = (0, 0); for room in rooms { let ref mut door_randomizer = items.create_randomizer("door").unwrap(); if room.x1 == 1 && room.y1 == 1 { make_door(0, room.y2 / 2, door_randomizer, map); } else if room.y2 == FLOOR_HEIGHT - 1 || room.x2 == FLOOR_WIDTH - 1 { if stairs == (0, 0) || rand::random() { let stairs_x = room.x1 + ((room.x2 - room.x1)/2); let stairs_y = room.y1 + ((room.y2 - room.y1)/2); stairs = (stairs_x, stairs_y); } } } let (stairs_x, stairs_y) = stairs; let mut stairs_up = items.get_class("stairs up").create_object(); stairs_up.set_pos(stairs_x, stairs_y); map[stairs_x as usize][stairs_y as usize].items.push(stairs_up); } for _ in 0..rand::thread_rng().gen_range(1,3) { let room = rooms[rand::thread_rng().gen_range(0, rooms.len())]; let brick_x = room.x1 + 1; let brick_y = room.y1 + 2; if let Some(ref mut brick_random) = items.create_randomizer( "environmental weapon") { let brick_class = &mut brick_random.get_class(); let mut brick = brick_class.create_object(); brick.set_pos(brick_x, brick_y); map[brick_x as usize][brick_y as usize].items.push(brick); }; } } fn place_actors(floor: usize, rooms: &Vec<Rect>, map: &mut Map, actor_types: &object::load::ObjectTypes, actors: &mut Vec<Object>) { for room in rooms { if room.x1 == 1 && room.y1 == 1 { actors[consts::PLAYER].set_pos(1, room.y2 / 2); } } for _ in 0..rand::thread_rng().gen_range(1,2) { let room = rooms[rand::thread_rng().gen_range(0, rooms.len())]; let x = rand::thread_rng().gen_range(room.x1+1, room.x2); let y = rand::thread_rng().gen_range(room.y1+1, room.y2); if let Some(ref mut zombie_random) = actor_types.create_randomizer( "zombie") { let zombie_class = &mut zombie_random.get_class(); let mut zombie = zombie_class.create_object(); zombie.set_pos(x, y); actors.push(zombie); } } } fn make_door(x: i32, y: i32, door_randomizer: &mut ObjectRandomizer, map: &mut Map) { let door_class = door_randomizer.get_class(); let mut door = door_class.create_object(); door.set_pos(x, y); map[x as usize][y as usize].items[0] = door; } fn traverse_node(node: &mut Bsp, rooms: &mut Vec<Rect>, object_types: &object::load::ObjectTypes, floor_type: &ObjectClass, mut map: &mut Map) -> bool { if node.is_leaf() { let minx = node.x + 1; let mut maxx = node.x + node.w - 1; let mut miny = node.y + 1; let mut maxy = node.y + node.h - 1; if maxx == FLOOR_WIDTH - 1 { maxx -= 1; } if maxy == FLOOR_HEIGHT - 1 { maxy -= 1; } node.x = minx; node.y = miny; node.w = maxx - minx + 1; node.h = maxy - miny + 1;<|fim▁hole|> create_room(node, floor_type, map); rooms.push(Rect::new(node.x, node.y, node.w, node.h)); } else { if let (Some(left), Some(right)) = (node.left(), node.right()) { node.x = cmp::min(left.x, right.x); node.y = cmp::min(left.y, right.y); node.w = cmp::max(left.x + left.w, right.x + right.w) - node.x; node.h = cmp::max(left.y + left.h, right.y + right.h) - node.y; let ref mut door_randomizer = object_types.create_randomizer("door") .unwrap(); if node.horizontal() { make_door(left.x, cmp::max(left.y, right.y) - 1, door_randomizer, &mut map); } else { make_door(cmp::max(left.x, right.x) - 1, left.y, door_randomizer, &mut map); } } } true } pub fn make_map(mut actors: &mut Vec<Object>) -> Map { let mut map = vec![]; let actor_types = object::load::load_objects( "data/objects/actors.json").unwrap(); let item_types = object::load::load_objects( "data/objects/items.json").unwrap(); let wall_class = item_types.get_class("brick wall"); let concrete_floor = item_types.get_class("concrete floor"); for x in 0..FLOOR_WIDTH { map.push(vec![]); for y in 0..FLOOR_HEIGHT { let mut wall_tile: Tile = Tile::new(&concrete_floor); let mut brick_wall = wall_class.create_object(); brick_wall.set_pos(x, y); wall_tile.items.push(brick_wall); map[x as usize].push(wall_tile); } } let mut rooms = vec![]; let mut bsp = Bsp::new_with_size(0, 0, FLOOR_WIDTH, FLOOR_HEIGHT); bsp.split_recursive(None, 3, ROOM_MIN_X, ROOM_MIN_Y, 1.25, 1.25); bsp.traverse(TraverseOrder::InvertedLevelOrder, |node| { traverse_node(node, &mut rooms, &item_types, &concrete_floor, &mut map) }); place_objects(1, &rooms, &mut map, &item_types); place_actors(1, &rooms, &mut map, &actor_types, &mut actors); map }<|fim▁end|>
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
declare module 'hsv2rgb';
<|file_name|>my_box.hpp<|end_file_name|><|fim▁begin|>#ifndef BOX_H #define BOX_H class MyBox: public Gtk::Frame { public: MyBox(const Glib::ustring& label, Gtk::Orientation orientation); void pack_start(Widget& child);<|fim▁hole|> private: Gtk::Box* p_child; }; #endif<|fim▁end|>
<|file_name|>003.rs<|end_file_name|><|fim▁begin|>#![feature(slicing_syntax)] extern crate test; extern crate time; use std::io::stdio; use std::iter; use std::os; fn solution() -> u64 { let mut n = 600_851_475_143; for factor in iter::count(3, 2) { while n % factor == 0 { n /= factor; } if factor * factor > n { return n;<|fim▁hole|> unreachable!(); } fn main() { match os::args()[] { [_, ref flag] if flag[] == "-a" => return println!("{}", solution()), _ => {}, } for line in stdio::stdin().lock().lines() { let iters: u64 = line.unwrap()[].trim().parse().unwrap(); let start = time::precise_time_ns(); for _ in range(0, iters) { test::black_box(solution()); } let end = time::precise_time_ns(); println!("{}", end - start); } }<|fim▁end|>
} else if n == 1 { return factor; } }
<|file_name|>file.js<|end_file_name|><|fim▁begin|>// Editor.js (function() { var STORAGE_KEY = slingUserId+'-browser-file'; var editor = ace.edit("editor"); var saveBtn = $('#saveBtn'); // parent file should set the aceMode variable editor.getSession().setMode(aceMode); editor.getSession().setUseWrapMode(false); editor.getSession().on('change', function(e) { saveBtn[0].disabled=false; }); $('#aceThemeSelect').on('change',function () { var theme = $(this).val();<|fim▁hole|> this.disabled=true; $('input#jcrData').val(editor.getValue()); $('#updateForm').submit(); }); $('#editor').css('opacity',1); var storage = getJsonLocalStorage(STORAGE_KEY); if (storage && storage.theme) { $('#aceThemeSelect').val(storage.theme).trigger('change'); } })()<|fim▁end|>
editor.setTheme("ace/theme/"+theme); setLocalStorage(STORAGE_KEY, {theme:theme}); }) saveBtn.on('click', function(e) {
<|file_name|>sale.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from ast import literal_eval from odoo import models, fields, api class SaleOrderLine(models.Model): _inherit = 'sale.order.line' config_ok = fields.Boolean( related='product_id.config_ok', string="Configurable",<|fim▁hole|> @api.multi def reconfigure_product(self): """ Creates and launches a product configurator wizard with a linked template and variant in order to re-configure a existing product. It is esetially a shortcut to pre-fill configuration data of a variant""" cfg_steps = self.product_id.product_tmpl_id.config_step_line_ids active_step = str(cfg_steps[0].id) if cfg_steps else 'configure' product_modifiable = literal_eval(self.env['ir.config_parameter'].sudo().get_param( 'product_configurator.product_modifiable', default='False')) wizard_obj = self.env['product.configurator'] wizard = wizard_obj.create({ 'product_modifiable': product_modifiable, 'product_id': self.product_id.id, 'state': active_step, 'order_line_id': self.id, }) return { 'type': 'ir.actions.act_window', 'res_model': 'product.configurator', 'name': "Configure Product", 'view_mode': 'form', 'context': dict( self.env.context, wizard_id=wizard.id, ), 'target': 'new', 'res_id': wizard.id, }<|fim▁end|>
readonly=True )
<|file_name|>IocpManager.cpp<|end_file_name|><|fim▁begin|>#include "stdafx.h" #include "Exception.h" #include "ThreadLocal.h" #include "Log.h" #include "IocpManager.h" #include "EduServer_IOCP.h" #include "ClientSession.h" #include "IOThread.h" #include "ClientSessionManager.h" #include "DBContext.h" IocpManager* GIocpManager = nullptr; LPFN_DISCONNECTEX IocpManager::mFnDisconnectEx = nullptr; LPFN_ACCEPTEX IocpManager::mFnAcceptEx = nullptr; LPFN_CONNECTEX IocpManager::mFnConnectEx = nullptr; char IocpManager::mAcceptBuf[64] = { 0, }; BOOL DisconnectEx(SOCKET hSocket, LPOVERLAPPED lpOverlapped, DWORD dwFlags, DWORD reserved) { return IocpManager::mFnDisconnectEx(hSocket, lpOverlapped, dwFlags, reserved); } BOOL MyAcceptEx(SOCKET sListenSocket, SOCKET sAcceptSocket, PVOID lpOutputBuffer, DWORD dwReceiveDataLength, DWORD dwLocalAddressLength, DWORD dwRemoteAddressLength, LPDWORD lpdwBytesReceived, LPOVERLAPPED lpOverlapped) { return IocpManager::mFnAcceptEx(sListenSocket, sAcceptSocket, lpOutputBuffer, dwReceiveDataLength, dwLocalAddressLength, dwRemoteAddressLength, lpdwBytesReceived, lpOverlapped); } BOOL ConnectEx(SOCKET hSocket, const struct sockaddr* name, int namelen, PVOID lpSendBuffer, DWORD dwSendDataLength, LPDWORD lpdwBytesSent, LPOVERLAPPED lpOverlapped) { return IocpManager::mFnConnectEx(hSocket, name, namelen, lpSendBuffer, dwSendDataLength, lpdwBytesSent, lpOverlapped); } IocpManager::IocpManager() : mCompletionPort(NULL), mListenSocket(NULL) { memset(mIoWorkerThread, 0, sizeof(mIoWorkerThread)); } IocpManager::~IocpManager() { } bool IocpManager::Initialize() { /// winsock initializing WSADATA wsa; if (WSAStartup(MAKEWORD(2, 2), &wsa) != 0) return false; /// Create I/O Completion Port mCompletionPort = CreateIoCompletionPort(INVALID_HANDLE_VALUE, NULL, 0, 0); if (mCompletionPort == NULL) return false; /// create TCP socket mListenSocket = WSASocket(AF_INET, SOCK_STREAM, IPPROTO_TCP, NULL, 0, WSA_FLAG_OVERLAPPED); if (mListenSocket == INVALID_SOCKET) return false; HANDLE handle = CreateIoCompletionPort((HANDLE)mListenSocket, mCompletionPort, 0, 0); if (handle != mCompletionPort) { printf_s("[DEBUG] listen socket IOCP register error: %d\n", GetLastError()); return false; } int opt = 1; setsockopt(mListenSocket, SOL_SOCKET, SO_REUSEADDR, (const char*)&opt, sizeof(int)); /// bind SOCKADDR_IN serveraddr; ZeroMemory(&serveraddr, sizeof(serveraddr)); serveraddr.sin_family = AF_INET; serveraddr.sin_port = htons(LISTEN_PORT); serveraddr.sin_addr.s_addr = htonl(INADDR_ANY); if (SOCKET_ERROR == bind(mListenSocket, (SOCKADDR*)&serveraddr, sizeof(serveraddr))) return false; GUID guidDisconnectEx = WSAID_DISCONNECTEX ; DWORD bytes = 0 ; if (SOCKET_ERROR == WSAIoctl(mListenSocket, SIO_GET_EXTENSION_FUNCTION_POINTER, &guidDisconnectEx, sizeof(GUID), &mFnDisconnectEx, sizeof(LPFN_DISCONNECTEX), &bytes, NULL, NULL) ) return false; GUID guidAcceptEx = WSAID_ACCEPTEX ; if (SOCKET_ERROR == WSAIoctl(mListenSocket, SIO_GET_EXTENSION_FUNCTION_POINTER, &guidAcceptEx, sizeof(GUID), &mFnAcceptEx, sizeof(LPFN_ACCEPTEX), &bytes, NULL, NULL)) return false; GUID guidConnectEx = WSAID_CONNECTEX; if (SOCKET_ERROR == WSAIoctl(mListenSocket, SIO_GET_EXTENSION_FUNCTION_POINTER, &guidConnectEx, sizeof(GUID), &mFnConnectEx, sizeof(LPFN_CONNECTEX), &bytes, NULL, NULL)) return false; /// make session pool GClientSessionManager->PrepareClientSessions(); return true; } bool IocpManager::StartIoThreads()<|fim▁hole|> DWORD dwThreadId; /// ½º·¹µåID´Â DB ½º·¹µå ÀÌÈÄ¿¡ IO ½º·¹µå·Î.. HANDLE hThread = (HANDLE)_beginthreadex(NULL, 0, IoWorkerThread, (LPVOID)(i+MAX_DB_THREAD), CREATE_SUSPENDED, (unsigned int*)&dwThreadId); if (hThread == NULL) return false; mIoWorkerThread[i] = new IOThread(hThread, mCompletionPort); } /// start! for (int i = 0; i < MAX_IO_THREAD; ++i) { ResumeThread(mIoWorkerThread[i]->GetHandle()); } return true; } void IocpManager::StartAccept() { /// listen if (SOCKET_ERROR == listen(mListenSocket, SOMAXCONN)) { printf_s("[DEBUG] listen error\n"); return; } while (GClientSessionManager->AcceptClientSessions()) { Sleep(100); } } void IocpManager::Finalize() { for (int i = 0; i < MAX_IO_THREAD; ++i) { CloseHandle(mIoWorkerThread[i]->GetHandle()); } CloseHandle(mCompletionPort); /// winsock finalizing WSACleanup(); } unsigned int WINAPI IocpManager::IoWorkerThread(LPVOID lpParam) { LThreadType = THREAD_IO_WORKER; LWorkerThreadId = reinterpret_cast<int>(lpParam); LSendRequestSessionList = new std::deque<Session*>; GThreadCallHistory[LWorkerThreadId] = LThreadCallHistory = new ThreadCallHistory(LWorkerThreadId); GThreadCallElapsedRecord[LWorkerThreadId] = LThreadCallElapsedRecord = new ThreadCallElapsedRecord(LWorkerThreadId); /// ¹Ýµå½Ã DB ¾²·¹µå¸¦ ¸ÕÀú ¶ç¿î ÈÄ¿¡ ÁøÀÔÇØ¾ß ÇÑ´Ù. CRASH_ASSERT(LWorkerThreadId >= MAX_DB_THREAD); return GIocpManager->mIoWorkerThread[LWorkerThreadId-MAX_DB_THREAD]->Run(); } void IocpManager::PostDatabaseResult(DatabaseJobContext* dbContext) { if (FALSE == PostQueuedCompletionStatus(mCompletionPort, 0, (ULONG_PTR)CK_DB_RESULT, (LPOVERLAPPED)dbContext)) { printf_s("IocpManager::PostDatabaseResult PostQueuedCompletionStatus Error: %d\n", GetLastError()); CRASH_ASSERT(false); } }<|fim▁end|>
{ /// create I/O Thread for (int i = 0; i < MAX_IO_THREAD; ++i) {
<|file_name|>main.go<|end_file_name|><|fim▁begin|>package main import ( "flag" "os" "runtime" "github.com/cpalone/maimai" "github.com/Sirupsen/logrus" ) var roomName string var nick string var logPath string var dbPath string var password string var join bool var msgLog bool var logger = logrus.New() func init() { const ( defaultRoom = "test" defaultNick = "MaiMai" defaultLog = "room_test.log" defaultDB = "room_test.db" defaultPass = "" defaultJoin = false defaultMsgLog = false ) flag.StringVar(&roomName, "room", defaultRoom, "room for the bot to join") flag.StringVar(&nick, "nick", defaultNick, "nick for the bot to use") flag.StringVar(&logPath, "log", defaultLog, "path for the bot's log") flag.StringVar(&dbPath, "db", defaultDB, "path for the bot's db") flag.StringVar(&password, "pass", defaultPass, "password for the room") flag.BoolVar(&join, "join", defaultJoin, "whether the bot sends join/part/nick messages") flag.BoolVar(&msgLog, "msglog", defaultMsgLog, "whether the bot logs messages.") } func main() { flag.Parse() logFile, err := os.OpenFile(logPath, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0666)<|fim▁hole|> } defer logFile.Close() logger.Out = logFile logger.Level = logrus.DebugLevel logger.Formatter = &logrus.JSONFormatter{} runtime.GOMAXPROCS(runtime.NumCPU() - 1) roomCfg := &maimai.RoomConfig{ DBPath: dbPath, ErrorLogPath: logPath, Join: join, MsgLog: msgLog, Nick: nick, Password: password, } room, err := maimai.NewRoom(roomCfg, roomName, maimai.NewWSSenderReceiver(roomName, logger), logger) if err != nil { panic(err) } room.Run() }<|fim▁end|>
if err != nil { panic(err)
<|file_name|>oauth_provider.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from odoo import models, fields, api from datetime import datetime, timedelta from odoo.tools import DEFAULT_SERVER_DATETIME_FORMAT try: from oauthlib import common as oauthlib_common except: pass import uuid class OauthApplication(models.Model): CLIENT_ID_CHARACTER_SET = r'_-.:;=?!@0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz' _name = 'oauth.application' _rec_name = 'client_id' def generate_client_id(self): return str(uuid.uuid1()) client_id = fields.Char('Client ID', index=True, required=True, default=generate_client_id) token_ids = fields.One2many('oauth.access_token', 'application_id', 'Tokens') _sql_constraints = [ ('client_id_uniq', 'unique (client_id)', 'client_id should be unique!'), ] @api.multi def _get_access_token(self, user_id=None, create=False): self.ensure_one() if not user_id: user_id = self.env.user.id access_token = self.env['oauth.access_token'].sudo().search([('application_id', '=', self.id), ('user_id', '=', user_id)], order='id DESC', limit=1) if access_token: access_token = access_token[0] if access_token.is_expired(): access_token = None if not access_token and create: expires = datetime.now() + timedelta(seconds=60 * 60) vals = { 'user_id': user_id, 'scope': 'userinfo', 'expires': expires.strftime(DEFAULT_SERVER_DATETIME_FORMAT), 'token': oauthlib_common.generate_token(), 'application_id': self.id, } access_token = self.env['oauth.access_token'].create(vals) # we have to commit now, because /oauth2/tokeninfo could # be called before we finish current transaction. self._cr.commit() if not access_token: return None return access_token.token class OauthAccessToken(models.Model): _name = 'oauth.access_token' application_id = fields.Many2one('oauth.application', string='Application') token = fields.Char('Access Token', required=True) user_id = fields.Many2one('res.users', string='User', required=True) expires = fields.Datetime('Expires', required=True)<|fim▁hole|> @api.multi def is_valid(self, scopes=None): """ Checks if the access token is valid. :param scopes: An iterable containing the scopes to check or None """ self.ensure_one() return not self.is_expired() and self._allow_scopes(scopes) @api.multi def is_expired(self): self.ensure_one() return datetime.now() > datetime.strptime(self.expires, DEFAULT_SERVER_DATETIME_FORMAT) @api.multi def _allow_scopes(self, scopes): self.ensure_one() if not scopes: return True provided_scopes = set(self.scope.split()) resource_scopes = set(scopes) return resource_scopes.issubset(provided_scopes)<|fim▁end|>
scope = fields.Char('Scope')
<|file_name|>JSLValidationEventHandler.java<|end_file_name|><|fim▁begin|>/* * Copyright 2012 International Business Machines Corp. * * See the NOTICE file distributed with this work for additional information * regarding copyright ownership. Licensed under the Apache License, * Version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ibm.jbatch.jsl.util; import javax.xml.bind.ValidationEvent; import javax.xml.bind.ValidationEventHandler; public class JSLValidationEventHandler implements ValidationEventHandler { private boolean eventOccurred = false; public boolean handleEvent(ValidationEvent event) { System.out.println("\nMESSAGE: " + event.getMessage()); System.out.println("\nSEVERITY: " + event.getSeverity()); System.out.println("\nLINKED EXC: " + event.getLinkedException()); System.out.println("\nLOCATOR INFO:\n------------"); System.out.println("\n COLUMN NUMBER: " + event.getLocator().getColumnNumber()); System.out.println("\n LINE NUMBER: " + event.getLocator().getLineNumber()); System.out.println("\n OFFSET: " + event.getLocator().getOffset()); System.out.println("\n CLASS: " + event.getLocator().getClass()); System.out.println("\n NODE: " + event.getLocator().getNode()); System.out.println("\n OBJECT: " + event.getLocator().getObject()); System.out.println("\n URL: " + event.getLocator().getURL()); eventOccurred = true; // Allow more parsing feedback return true; } public boolean eventOccurred() { return eventOccurred; } <|fim▁hole|><|fim▁end|>
}
<|file_name|>raxml.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python """ wrapper to make simple calls to raxml """ import os import sys import glob import subprocess from ipyrad.analysis.utils import Params from ipyrad.assemble.utils import IPyradError # alias OPJ = os.path.join class Raxml(object): """ RAxML analysis utility function. This tool makes it easy to build a raxml command line string and submit it as a job. It also makes it easy to access the resulting tree files. Set params on the raxml object and print(<object>.command) to see raxml command string. Call .run() to submit the job running in background, or .run(block=True) to wait until it finishes. Parameters: ----------- data: str The phylip formated sequence file (.phy from ipyrad). An alias for '-s'. name: str The name for this run. An alias for '-n'. workdir: str The output directory for results. An alias for '-w'. Additional optional parameters ------------------------------- f: str (-f a) The raxml function. Default is 'a'. T: str (-T 4) The number of threads. Default is 4. m: str (-m GTRGAMMA) The model to use. N: str (-N 100) The number of distinct starting trees from which to run full search, or number of bootstrap replicates to run if using -f a. x: str (-x 12345) The bootstrap random seed. p: str (-p 54321) The parsimony random seed. n: str (-n test) The prefix name for output files w: str (-w outdir) The output directory s: str (-s seq.phy) The .phy formatted sequence file. o: str or list (-o tax1,tax2) A list of outgroup sample names or a string. Attributes: ----------- params: dict parameters for this raxml run command: returns the command string to run raxml Functions: ---------- run() submits a raxml job to locally or on an ipyparallel client cluster. """ # init object for params def __init__( self, data, name="test", workdir="analysis-raxml", *args, **kwargs): # path attributes self._kwargs = { "f": "a", "T": 4, # <- change to zero !? "m": "GTRGAMMA", "N": 100, "x": 12345, "p": 54321, "o": None, "binary": "", } # update kwargs for user args and drop key if value is None self._kwargs.update(kwargs) self._kwargs = {i: j for (i, j) in self._kwargs.items() if j is not None} # check workdir if workdir: workdir = os.path.abspath(os.path.expanduser(workdir)) else: workdir = os.path.abspath(os.path.curdir) if not os.path.exists(workdir): os.makedirs(workdir) # store entered args in params object self.params = Params() self.params.n = name self.params.w = workdir self.params.s = os.path.abspath(os.path.expanduser(data)) # if arg append kwargs to top of list of binaries to search for binaries = _get_binary_paths() if self._kwargs["binary"]: binaries = [self._kwargs["binary"]] + binaries # sefind a binary from the list self.params.binary = _check_binaries(binaries) # set params notparams = set(["workdir", "name", "data", "binary"]) for key in set(self._kwargs.keys()) - notparams: self.params[key] = self._kwargs[key] # attributesx self.rasync = None self.stdout = None self.stderr = None # results files self.trees = Params() self.trees.bestTree = OPJ(workdir, "RAxML_bestTree." + name) self.trees.bipartitionsBranchLabels = OPJ(workdir, "RAxML_bipartitionsBranchLabels." + name) self.trees.bipartitions = OPJ(workdir, "RAxML_bipartitions." + name) self.trees.bootstrap = OPJ(workdir, "RAxML_bootstrap." + name) self.trees.info = OPJ(workdir, "RAxML_info." + name) @property def _command_list(self): """ build the command list """ cmd = [ self.params.binary, "-f", str(self.params.f), "-T", str(self.params.T), "-m", str(self.params.m), "-n", str(self.params.n), "-w", str(self.params.w), "-s", str(self.params.s), "-p", str(self.params.p), ] if 'N' in self.params: cmd += ["-N", str(self.params.N)] if "x" in self.params: cmd += ["-x", str(self.params.x)] # ultrafast boostrap and mapping with -f d # If no bootstraps then run -f D not -f a, and drop -x and -N # if "-f D": # add ougroups if 'o' in self.params: cmd += ["-o"] cmd += [",".join(self.params.o)] return cmd @property def command(self): """ returns command as a string """ return " ".join(self._command_list) def run( self, ipyclient=None, quiet=False, force=False, block=False, ): """ Submits raxml job to run. If no ipyclient object is provided then the function will block until the raxml run is finished. If an ipyclient is provided then the job is sent to a remote engine and an asynchronous result object is returned which can be queried or awaited until it finishes. Parameters -----------<|fim▁hole|> ipyclient: Not yet supported... quiet: suppress print statements force: overwrite existing results files with this job name. block: will block progress in notebook until job finishes, even if job is running on a remote ipyclient. """ # force removes old files, a bit risky here if names are subsets if force: opath = os.path.join( self.params.w, "RAxML_*.{}".format(self.params.n)) oldfiles = glob.glob(opath) for oldfile in oldfiles: if os.path.exists(oldfile): os.remove(oldfile) if os.path.exists(self.trees.info): print("Error Files Exist: set a new name or use Force flag.\n{}" .format(self.trees.info)) return ## TODO: add a progress bar tracker here. It could even read it from ## the info file that is being written. ## submit it if not ipyclient: proc = _call_raxml(self._command_list) self.stdout = proc[0] self.stderr = proc[1] else: # find all hosts and submit job to the host with most available engines lbview = ipyclient.load_balanced_view() self.rasync = lbview.apply(_call_raxml, self._command_list) # initiate random seed if not quiet: if not ipyclient: # look for errors if "Overall execution time" not in self.stdout.decode(): print("Error in raxml run\n" + self.stdout.decode()) else: print("job {} finished successfully".format(self.params.n)) else: if block: print("job {} running".format(self.params.n)) ipyclient.wait() if self.rasync.successful(): print( "job {} finished successfully" .format(self.params.n)) else: raise IPyradError(self.rasync.get()) else: print("job {} submitted to cluster".format(self.params.n)) def _get_binary_paths(): # check for binary list_binaries = [ "raxmlHPC-PTHREADS-AVX2", "raxmlHPC-PTHREADS-AVX", "raxmlHPC-PTHREADS-SSE3", "raxmlHPC-PTHREADS", ] # expand for env path list_binaries = [os.path.join(sys.prefix, "bin", i) for i in list_binaries] return list_binaries def _check_binaries(binaries): """ find and return a working binary""" # check user binary first, then backups for binary in binaries: # call which to find proc = subprocess.Popen( ["which", binary], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, ).communicate() # if it exists then update the binary if proc[0]: return binary # if you get here then no binaries were found raise NameError(BINARY_ERROR) def _call_raxml(command_list): """ call the command as sps """ proc = subprocess.Popen( command_list, stderr=subprocess.STDOUT, stdout=subprocess.PIPE ) comm = proc.communicate() return comm BINARY_ERROR = """ RAxML binary not found. Check that you have raxml installed. For example, with conda: 'conda install raxml -c bioconda' If you have a different binary installed you can select it using the argument 'binary'. For example: rax = ipa.raxml(name='test', data='test.phy', binary='raxmlHPC') """<|fim▁end|>
<|file_name|>FormatTextdirectionLToRRounded.js<|end_file_name|><|fim▁begin|>"use strict"; var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); Object.defineProperty(exports, "__esModule", {<|fim▁hole|> var _createSvgIcon = _interopRequireDefault(require("./utils/createSvgIcon")); var _jsxRuntime = require("react/jsx-runtime"); var _default = (0, _createSvgIcon.default)( /*#__PURE__*/(0, _jsxRuntime.jsx)("path", { d: "M9 10v4c0 .55.45 1 1 1s1-.45 1-1V4h2v10c0 .55.45 1 1 1s1-.45 1-1V4h1c.55 0 1-.45 1-1s-.45-1-1-1H9.17C7.08 2 5.22 3.53 5.02 5.61 4.79 7.99 6.66 10 9 10zm11.65 7.65-2.79-2.79c-.32-.32-.86-.1-.86.35V17H6c-.55 0-1 .45-1 1s.45 1 1 1h11v1.79c0 .45.54.67.85.35l2.79-2.79c.2-.19.2-.51.01-.7z" }), 'FormatTextdirectionLToRRounded'); exports.default = _default;<|fim▁end|>
value: true }); exports.default = void 0;
<|file_name|>sha512.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1<|fim▁hole|><|fim▁end|>
oid sha256:4a4e80129485fe848fa53149568184f09fa2da8648b6476b750ef97344bd4c5b size 10959
<|file_name|>TransportMasterNodeActionTests.java<|end_file_name|><|fim▁begin|>/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action.support.master; import org.elasticsearch.Version; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.ThreadedActionListener; import org.elasticsearch.action.support.replication.ClusterStateCreationUtils; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.MasterNotDiscoveredException; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.CapturingTransport; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.TransportService; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import java.util.Collections; import java.util.HashSet; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; public class TransportMasterNodeActionTests extends ESTestCase { private static ThreadPool threadPool; private ClusterService clusterService; private TransportService transportService; private CapturingTransport transport; private DiscoveryNode localNode; private DiscoveryNode remoteNode; private DiscoveryNode[] allNodes; @BeforeClass public static void beforeClass() { threadPool = new TestThreadPool("TransportMasterNodeActionTests"); } @Override @Before public void setUp() throws Exception { super.setUp(); transport = new CapturingTransport(); clusterService = createClusterService(threadPool); transportService = new TransportService(clusterService.getSettings(), transport, threadPool); transportService.start(); transportService.acceptIncomingRequests(); localNode = new DiscoveryNode("local_node", DummyTransportAddress.INSTANCE, Collections.emptyMap(), Collections.singleton(DiscoveryNode.Role.MASTER), Version.CURRENT); remoteNode = new DiscoveryNode("remote_node", DummyTransportAddress.INSTANCE, Collections.emptyMap(), Collections.singleton(DiscoveryNode.Role.MASTER), Version.CURRENT); allNodes = new DiscoveryNode[]{localNode, remoteNode}; } @After public void tearDown() throws Exception { super.tearDown(); clusterService.close(); transportService.close(); } @AfterClass public static void afterClass() { ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); threadPool = null; } void assertListenerThrows(String msg, ActionFuture<?> listener, Class<?> klass) throws InterruptedException { try { listener.get(); fail(msg); } catch (ExecutionException ex) { assertThat(ex.getCause(), instanceOf(klass)); } } public static class Request extends MasterNodeRequest<Request> { @Override public ActionRequestValidationException validate() { return null; } } class Response extends ActionResponse {} class Action extends TransportMasterNodeAction<Request, Response> { Action(Settings settings, String actionName, TransportService transportService, ClusterService clusterService, ThreadPool threadPool) { super(settings, actionName, transportService, clusterService, threadPool, new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), Request::new); } @Override protected void doExecute(Task task, final Request request, ActionListener<Response> listener) { // remove unneeded threading by wrapping listener with SAME to prevent super.doExecute from wrapping it with LISTENER super.doExecute(task, request, new ThreadedActionListener<>(logger, threadPool, ThreadPool.Names.SAME, listener)); } @Override protected String executor() { // very lightweight operation in memory, no need to fork to a thread return ThreadPool.Names.SAME; } @Override protected Response newResponse() { return new Response(); } @Override protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception { listener.onResponse(new Response()); // default implementation, overridden in specific tests } @Override protected ClusterBlockException checkBlock(Request request, ClusterState state) { return null; // default implementation, overridden in specific tests } } public void testLocalOperationWithoutBlocks() throws ExecutionException, InterruptedException { final boolean masterOperationFailure = randomBoolean(); Request request = new Request(); PlainActionFuture<Response> listener = new PlainActionFuture<>(); final Throwable exception = new Throwable(); final Response response = new Response(); setState(clusterService, ClusterStateCreationUtils.state(localNode, localNode, allNodes)); new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool) { @Override protected void masterOperation(Task task, Request request, ClusterState state, ActionListener<Response> listener) throws Exception { if (masterOperationFailure) { listener.onFailure(exception); } else { listener.onResponse(response); }<|fim▁hole|> } }.execute(request, listener); assertTrue(listener.isDone()); if (masterOperationFailure) { try { listener.get(); fail("Expected exception but returned proper result"); } catch (ExecutionException ex) { assertThat(ex.getCause(), equalTo(exception)); } } else { assertThat(listener.get(), equalTo(response)); } } public void testLocalOperationWithBlocks() throws ExecutionException, InterruptedException { final boolean retryableBlock = randomBoolean(); final boolean unblockBeforeTimeout = randomBoolean(); Request request = new Request().masterNodeTimeout(TimeValue.timeValueSeconds(unblockBeforeTimeout ? 60 : 0)); PlainActionFuture<Response> listener = new PlainActionFuture<>(); ClusterBlock block = new ClusterBlock(1, "", retryableBlock, true, randomFrom(RestStatus.values()), ClusterBlockLevel.ALL); ClusterState stateWithBlock = ClusterState.builder(ClusterStateCreationUtils.state(localNode, localNode, allNodes)) .blocks(ClusterBlocks.builder().addGlobalBlock(block)).build(); setState(clusterService, stateWithBlock); new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool) { @Override protected ClusterBlockException checkBlock(Request request, ClusterState state) { Set<ClusterBlock> blocks = state.blocks().global(); return blocks.isEmpty() ? null : new ClusterBlockException(blocks); } }.execute(request, listener); if (retryableBlock && unblockBeforeTimeout) { assertFalse(listener.isDone()); setState(clusterService, ClusterState.builder(ClusterStateCreationUtils.state(localNode, localNode, allNodes)) .blocks(ClusterBlocks.EMPTY_CLUSTER_BLOCK).build()); assertTrue(listener.isDone()); listener.get(); return; } assertTrue(listener.isDone()); if (retryableBlock) { try { listener.get(); fail("Expected exception but returned proper result"); } catch (ExecutionException ex) { assertThat(ex.getCause(), instanceOf(MasterNotDiscoveredException.class)); assertThat(ex.getCause().getCause(), instanceOf(ClusterBlockException.class)); } } else { assertListenerThrows("ClusterBlockException should be thrown", listener, ClusterBlockException.class); } } public void testForceLocalOperation() throws ExecutionException, InterruptedException { Request request = new Request(); PlainActionFuture<Response> listener = new PlainActionFuture<>(); setState(clusterService, ClusterStateCreationUtils.state(localNode, randomFrom(null, localNode, remoteNode), allNodes)); new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool) { @Override protected boolean localExecute(Request request) { return true; } }.execute(request, listener); assertTrue(listener.isDone()); listener.get(); } public void testMasterNotAvailable() throws ExecutionException, InterruptedException { Request request = new Request().masterNodeTimeout(TimeValue.timeValueSeconds(0)); setState(clusterService, ClusterStateCreationUtils.state(localNode, null, allNodes)); PlainActionFuture<Response> listener = new PlainActionFuture<>(); new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool).execute(request, listener); assertTrue(listener.isDone()); assertListenerThrows("MasterNotDiscoveredException should be thrown", listener, MasterNotDiscoveredException.class); } public void testMasterBecomesAvailable() throws ExecutionException, InterruptedException { Request request = new Request(); setState(clusterService, ClusterStateCreationUtils.state(localNode, null, allNodes)); PlainActionFuture<Response> listener = new PlainActionFuture<>(); new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool).execute(request, listener); assertFalse(listener.isDone()); setState(clusterService, ClusterStateCreationUtils.state(localNode, localNode, allNodes)); assertTrue(listener.isDone()); listener.get(); } public void testDelegateToMaster() throws ExecutionException, InterruptedException { Request request = new Request(); setState(clusterService, ClusterStateCreationUtils.state(localNode, remoteNode, allNodes)); PlainActionFuture<Response> listener = new PlainActionFuture<>(); new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool).execute(request, listener); assertThat(transport.capturedRequests().length, equalTo(1)); CapturingTransport.CapturedRequest capturedRequest = transport.capturedRequests()[0]; assertTrue(capturedRequest.node.isMasterNode()); assertThat(capturedRequest.request, equalTo(request)); assertThat(capturedRequest.action, equalTo("testAction")); Response response = new Response(); transport.handleResponse(capturedRequest.requestId, response); assertTrue(listener.isDone()); assertThat(listener.get(), equalTo(response)); } public void testDelegateToFailingMaster() throws ExecutionException, InterruptedException { boolean failsWithConnectTransportException = randomBoolean(); Request request = new Request().masterNodeTimeout(TimeValue.timeValueSeconds(failsWithConnectTransportException ? 60 : 0)); setState(clusterService, ClusterStateCreationUtils.state(localNode, remoteNode, allNodes)); PlainActionFuture<Response> listener = new PlainActionFuture<>(); new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool).execute(request, listener); assertThat(transport.capturedRequests().length, equalTo(1)); CapturingTransport.CapturedRequest capturedRequest = transport.capturedRequests()[0]; assertTrue(capturedRequest.node.isMasterNode()); assertThat(capturedRequest.request, equalTo(request)); assertThat(capturedRequest.action, equalTo("testAction")); if (failsWithConnectTransportException) { transport.handleRemoteError(capturedRequest.requestId, new ConnectTransportException(remoteNode, "Fake error")); assertFalse(listener.isDone()); setState(clusterService, ClusterStateCreationUtils.state(localNode, localNode, allNodes)); assertTrue(listener.isDone()); listener.get(); } else { Throwable t = new Throwable(); transport.handleRemoteError(capturedRequest.requestId, t); assertTrue(listener.isDone()); try { listener.get(); fail("Expected exception but returned proper result"); } catch (ExecutionException ex) { assertThat(ex.getCause().getCause(), equalTo(t)); } } } public void testMasterFailoverAfterStepDown() throws ExecutionException, InterruptedException { Request request = new Request().masterNodeTimeout(TimeValue.timeValueHours(1)); PlainActionFuture<Response> listener = new PlainActionFuture<>(); final Response response = new Response(); setState(clusterService, ClusterStateCreationUtils.state(localNode, localNode, allNodes)); new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool) { @Override protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception { // The other node has become master, simulate failures of this node while publishing cluster state through ZenDiscovery setState(clusterService, ClusterStateCreationUtils.state(localNode, remoteNode, allNodes)); Throwable failure = randomBoolean() ? new Discovery.FailedToCommitClusterStateException("Fake error") : new NotMasterException("Fake error"); listener.onFailure(failure); } }.execute(request, listener); assertThat(transport.capturedRequests().length, equalTo(1)); CapturingTransport.CapturedRequest capturedRequest = transport.capturedRequests()[0]; assertTrue(capturedRequest.node.isMasterNode()); assertThat(capturedRequest.request, equalTo(request)); assertThat(capturedRequest.action, equalTo("testAction")); transport.handleResponse(capturedRequest.requestId, response); assertTrue(listener.isDone()); assertThat(listener.get(), equalTo(response)); } }<|fim▁end|>
<|file_name|>snippets_test.py<|end_file_name|><|fim▁begin|># # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """Tests for all code snippets used in public docs.""" import glob import gzip import logging import os import tempfile import unittest import uuid import apache_beam as beam from apache_beam import coders from apache_beam import pvalue from apache_beam import typehints from apache_beam.transforms.util import assert_that from apache_beam.transforms.util import equal_to from apache_beam.utils.pipeline_options import TypeOptions from apache_beam.examples.snippets import snippets # pylint: disable=expression-not-assigned from apache_beam.test_pipeline import TestPipeline # Protect against environments where apitools library is not available. # pylint: disable=wrong-import-order, wrong-import-position try: from apitools.base.py import base_api except ImportError: base_api = None # pylint: enable=wrong-import-order, wrong-import-position # Protect against environments where datastore library is not available. # pylint: disable=wrong-import-order, wrong-import-position try: from google.cloud.proto.datastore.v1 import datastore_pb2 except ImportError: datastore_pb2 = None # pylint: enable=wrong-import-order, wrong-import-position class ParDoTest(unittest.TestCase): """Tests for model/par-do.""" def test_pardo(self): # Note: "words" and "ComputeWordLengthFn" are referenced by name in # the text of the doc. words = ['aa', 'bbb', 'c'] # [START model_pardo_pardo] class ComputeWordLengthFn(beam.DoFn): def process(self, element): return [len(element)] # [END model_pardo_pardo] # [START model_pardo_apply] # Apply a ParDo to the PCollection "words" to compute lengths for each word. word_lengths = words | beam.ParDo(ComputeWordLengthFn()) # [END model_pardo_apply] self.assertEqual({2, 3, 1}, set(word_lengths)) def test_pardo_yield(self): words = ['aa', 'bbb', 'c'] # [START model_pardo_yield] class ComputeWordLengthFn(beam.DoFn): def process(self, element): yield len(element) # [END model_pardo_yield] word_lengths = words | beam.ParDo(ComputeWordLengthFn()) self.assertEqual({2, 3, 1}, set(word_lengths)) def test_pardo_using_map(self): words = ['aa', 'bbb', 'c'] # [START model_pardo_using_map] word_lengths = words | beam.Map(len) # [END model_pardo_using_map] self.assertEqual({2, 3, 1}, set(word_lengths)) def test_pardo_using_flatmap(self): words = ['aa', 'bbb', 'c'] # [START model_pardo_using_flatmap] word_lengths = words | beam.FlatMap(lambda word: [len(word)]) # [END model_pardo_using_flatmap] self.assertEqual({2, 3, 1}, set(word_lengths)) def test_pardo_using_flatmap_yield(self): words = ['aA', 'bbb', 'C'] # [START model_pardo_using_flatmap_yield] def capitals(word): for letter in word: if 'A' <= letter <= 'Z': yield letter all_capitals = words | beam.FlatMap(capitals) # [END model_pardo_using_flatmap_yield] self.assertEqual({'A', 'C'}, set(all_capitals)) def test_pardo_with_label(self): # pylint: disable=line-too-long words = ['aa', 'bbc', 'defg'] # [START model_pardo_with_label] result = words | 'CountUniqueLetters' >> beam.Map( lambda word: len(set(word))) # [END model_pardo_with_label] self.assertEqual({1, 2, 4}, set(result)) def test_pardo_side_input(self): p = TestPipeline() words = p | 'start' >> beam.Create(['a', 'bb', 'ccc', 'dddd']) # [START model_pardo_side_input] # Callable takes additional arguments. def filter_using_length(word, lower_bound, upper_bound=float('inf')): if lower_bound <= len(word) <= upper_bound: yield word # Construct a deferred side input. avg_word_len = (words | beam.Map(len) | beam.CombineGlobally(beam.combiners.MeanCombineFn())) # Call with explicit side inputs. small_words = words | 'small' >> beam.FlatMap(filter_using_length, 0, 3) # A single deferred side input. larger_than_average = (words | 'large' >> beam.FlatMap( filter_using_length, lower_bound=pvalue.AsSingleton(avg_word_len))) # Mix and match. small_but_nontrivial = words | beam.FlatMap(filter_using_length, lower_bound=2, upper_bound=pvalue.AsSingleton( avg_word_len)) # [END model_pardo_side_input] beam.assert_that(small_words, beam.equal_to(['a', 'bb', 'ccc'])) beam.assert_that(larger_than_average, beam.equal_to(['ccc', 'dddd']), label='larger_than_average') beam.assert_that(small_but_nontrivial, beam.equal_to(['bb']), label='small_but_not_trivial') p.run() def test_pardo_side_input_dofn(self): words = ['a', 'bb', 'ccc', 'dddd'] # [START model_pardo_side_input_dofn] class FilterUsingLength(beam.DoFn): def process(self, element, lower_bound, upper_bound=float('inf')): if lower_bound <= len(element) <= upper_bound: yield element small_words = words | beam.ParDo(FilterUsingLength(), 0, 3) # [END model_pardo_side_input_dofn] self.assertEqual({'a', 'bb', 'ccc'}, set(small_words)) def test_pardo_with_side_outputs(self): # [START model_pardo_emitting_values_on_side_outputs] class ProcessWords(beam.DoFn): def process(self, element, cutoff_length, marker): if len(element) <= cutoff_length: # Emit this short word to the main output. yield element else: # Emit this word's long length to a side output. yield pvalue.SideOutputValue( 'above_cutoff_lengths', len(element)) if element.startswith(marker): # Emit this word to a different side output. yield pvalue.SideOutputValue('marked strings', element) # [END model_pardo_emitting_values_on_side_outputs] words = ['a', 'an', 'the', 'music', 'xyz'] # [START model_pardo_with_side_outputs] results = (words | beam.ParDo(ProcessWords(), cutoff_length=2, marker='x') .with_outputs('above_cutoff_lengths', 'marked strings', main='below_cutoff_strings')) below = results.below_cutoff_strings above = results.above_cutoff_lengths marked = results['marked strings'] # indexing works as well # [END model_pardo_with_side_outputs] self.assertEqual({'a', 'an'}, set(below)) self.assertEqual({3, 5}, set(above)) self.assertEqual({'xyz'}, set(marked)) # [START model_pardo_with_side_outputs_iter] below, above, marked = (words | beam.ParDo( ProcessWords(), cutoff_length=2, marker='x') .with_outputs('above_cutoff_lengths', 'marked strings', main='below_cutoff_strings')) # [END model_pardo_with_side_outputs_iter] self.assertEqual({'a', 'an'}, set(below)) self.assertEqual({3, 5}, set(above)) self.assertEqual({'xyz'}, set(marked)) def test_pardo_with_undeclared_side_outputs(self): numbers = [1, 2, 3, 4, 5, 10, 20] # [START model_pardo_with_side_outputs_undeclared] def even_odd(x): yield pvalue.SideOutputValue('odd' if x % 2 else 'even', x) if x % 10 == 0: yield x results = numbers | beam.FlatMap(even_odd).with_outputs() evens = results.even odds = results.odd tens = results[None] # the undeclared main output # [END model_pardo_with_side_outputs_undeclared] self.assertEqual({2, 4, 10, 20}, set(evens)) self.assertEqual({1, 3, 5}, set(odds)) self.assertEqual({10, 20}, set(tens)) class TypeHintsTest(unittest.TestCase): def test_bad_types(self): p = TestPipeline() evens = None # pylint: disable=unused-variable # [START type_hints_missing_define_numbers] numbers = p | beam.Create(['1', '2', '3']) # [END type_hints_missing_define_numbers] # Consider the following code. # pylint: disable=expression-not-assigned # pylint: disable=unused-variable # [START type_hints_missing_apply] evens = numbers | beam.Filter(lambda x: x % 2 == 0) # [END type_hints_missing_apply] # Now suppose numbers was defined as [snippet above]. # When running this pipeline, you'd get a runtime error, # possibly on a remote machine, possibly very late. with self.assertRaises(TypeError): p.run() # To catch this early, we can assert what types we expect. with self.assertRaises(typehints.TypeCheckError): # [START type_hints_takes] p.options.view_as(TypeOptions).pipeline_type_check = True evens = numbers | beam.Filter(lambda x: x % 2 == 0).with_input_types(int) # [END type_hints_takes] # Type hints can be declared on DoFns and callables as well, rather # than where they're used, to be more self contained. with self.assertRaises(typehints.TypeCheckError): # [START type_hints_do_fn] @beam.typehints.with_input_types(int) class FilterEvensDoFn(beam.DoFn): def process(self, element): if element % 2 == 0: yield element evens = numbers | beam.ParDo(FilterEvensDoFn()) # [END type_hints_do_fn] words = p | 'words' >> beam.Create(['a', 'bb', 'c']) # One can assert outputs and apply them to transforms as well. # Helps document the contract and checks it at pipeline construction time. # [START type_hints_transform] T = beam.typehints.TypeVariable('T') @beam.typehints.with_input_types(T) @beam.typehints.with_output_types(beam.typehints.Tuple[int, T]) class MyTransform(beam.PTransform): def expand(self, pcoll): return pcoll | beam.Map(lambda x: (len(x), x)) words_with_lens = words | MyTransform() # [END type_hints_transform] # pylint: disable=expression-not-assigned with self.assertRaises(typehints.TypeCheckError): words_with_lens | beam.Map(lambda x: x).with_input_types( beam.typehints.Tuple[int, int]) def test_runtime_checks_off(self): # pylint: disable=expression-not-assigned p = TestPipeline() # [START type_hints_runtime_off] p | beam.Create(['a']) | beam.Map(lambda x: 3).with_output_types(str) p.run() # [END type_hints_runtime_off] def test_runtime_checks_on(self): # pylint: disable=expression-not-assigned p = TestPipeline() with self.assertRaises(typehints.TypeCheckError): # [START type_hints_runtime_on] p.options.view_as(TypeOptions).runtime_type_check = True p | beam.Create(['a']) | beam.Map(lambda x: 3).with_output_types(str) p.run() # [END type_hints_runtime_on] def test_deterministic_key(self): p = TestPipeline() lines = (p | beam.Create( ['banana,fruit,3', 'kiwi,fruit,2', 'kiwi,fruit,2', 'zucchini,veg,3'])) # [START type_hints_deterministic_key] class Player(object): def __init__(self, team, name): self.team = team self.name = name class PlayerCoder(beam.coders.Coder): def encode(self, player): return '%s:%s' % (player.team, player.name) def decode(self, s): return Player(*s.split(':')) def is_deterministic(self): return True beam.coders.registry.register_coder(Player, PlayerCoder) def parse_player_and_score(csv): name, team, score = csv.split(',') return Player(team, name), int(score) totals = ( lines | beam.Map(parse_player_and_score) | beam.CombinePerKey(sum).with_input_types( beam.typehints.Tuple[Player, int])) # [END type_hints_deterministic_key] assert_that( totals | beam.Map(lambda (k, v): (k.name, v)), equal_to([('banana', 3), ('kiwi', 4), ('zucchini', 3)])) p.run() class SnippetsTest(unittest.TestCase): # Replacing text read/write transforms with dummy transforms for testing. class DummyReadTransform(beam.PTransform): """A transform that will replace iobase.ReadFromText. To be used for testing. """ def __init__(self, file_to_read=None, compression_type=None): self.file_to_read = file_to_read self.compression_type = compression_type class ReadDoFn(beam.DoFn): def __init__(self, file_to_read, compression_type): self.file_to_read = file_to_read self.compression_type = compression_type self.coder = coders.StrUtf8Coder() def process(self, element): pass def finish_bundle(self): assert self.file_to_read for file_name in glob.glob(self.file_to_read): if self.compression_type is None: with open(file_name) as file: for record in file: yield self.coder.decode(record.rstrip('\n')) else: with gzip.open(file_name, 'r') as file: for record in file: yield self.coder.decode(record.rstrip('\n')) def expand(self, pcoll): return pcoll | beam.Create([None]) | 'DummyReadForTesting' >> beam.ParDo( SnippetsTest.DummyReadTransform.ReadDoFn( self.file_to_read, self.compression_type)) class DummyWriteTransform(beam.PTransform): """A transform that will replace iobase.WriteToText. To be used for testing. """ def __init__(self, file_to_write=None, file_name_suffix=''): self.file_to_write = file_to_write class WriteDoFn(beam.DoFn): def __init__(self, file_to_write): self.file_to_write = file_to_write self.file_obj = None self.coder = coders.ToStringCoder() def start_bundle(self): assert self.file_to_write self.file_to_write += str(uuid.uuid4()) self.file_obj = open(self.file_to_write, 'w') def process(self, element): assert self.file_obj self.file_obj.write(self.coder.encode(element) + '\n') def finish_bundle(self): assert self.file_obj self.file_obj.close() def expand(self, pcoll): return pcoll | 'DummyWriteForTesting' >> beam.ParDo( SnippetsTest.DummyWriteTransform.WriteDoFn(self.file_to_write)) def setUp(self): self.old_read_from_text = beam.io.ReadFromText self.old_write_to_text = beam.io.WriteToText # Monkey patching to allow testing pipelines defined in snippets.py using # real data. beam.io.ReadFromText = SnippetsTest.DummyReadTransform beam.io.WriteToText = SnippetsTest.DummyWriteTransform self.temp_files = [] def tearDown(self): beam.io.ReadFromText = self.old_read_from_text beam.io.WriteToText = self.old_write_to_text # Cleanup all the temporary files created in the test map(os.remove, self.temp_files) def create_temp_file(self, contents=''): with tempfile.NamedTemporaryFile(delete=False) as f: f.write(contents) self.temp_files.append(f.name) return f.name def get_output(self, path, sorted_output=True, suffix=''): all_lines = [] for file_name in glob.glob(path + '*'): with open(file_name) as f: lines = f.readlines() all_lines.extend([s.rstrip('\n') for s in lines]) if sorted_output: return sorted(s.rstrip('\n') for s in all_lines) else: return all_lines def test_model_pipelines(self): temp_path = self.create_temp_file('aa bb cc\n bb cc\n cc') result_path = temp_path + '.result' snippets.model_pipelines([ '--input=%s*' % temp_path, '--output=%s' % result_path]) self.assertEqual( self.get_output(result_path), [str(s) for s in [(u'aa', 1), (u'bb', 2), (u'cc', 3)]]) def test_model_pcollection(self): temp_path = self.create_temp_file() snippets.model_pcollection(['--output=%s' % temp_path]) self.assertEqual(self.get_output(temp_path, sorted_output=False), [ 'To be, or not to be: that is the question: ', 'Whether \'tis nobler in the mind to suffer ', 'The slings and arrows of outrageous fortune, ', 'Or to take arms against a sea of troubles, ']) def test_construct_pipeline(self): temp_path = self.create_temp_file( 'abc def ghi\n jkl mno pqr\n stu vwx yz') result_path = self.create_temp_file() snippets.construct_pipeline({'read': temp_path, 'write': result_path}) self.assertEqual( self.get_output(result_path), ['cba', 'fed', 'ihg', 'lkj', 'onm', 'rqp', 'uts', 'xwv', 'zy']) def test_model_custom_source(self): snippets.model_custom_source(100) def test_model_custom_sink(self): tempdir_name = tempfile.mkdtemp() class SimpleKV(object): def __init__(self, tmp_dir): self._dummy_token = 'dummy_token' self._tmp_dir = tmp_dir def connect(self, url): return self._dummy_token def open_table(self, access_token, table_name): assert access_token == self._dummy_token file_name = self._tmp_dir + os.sep + table_name assert not os.path.exists(file_name) open(file_name, 'wb').close() return table_name def write_to_table(self, access_token, table_name, key, value): assert access_token == self._dummy_token file_name = self._tmp_dir + os.sep + table_name assert os.path.exists(file_name) with open(file_name, 'ab') as f: f.write(key + ':' + value + os.linesep) def rename_table(self, access_token, old_name, new_name): assert access_token == self._dummy_token<|fim▁hole|> new_file_name = self._tmp_dir + os.sep + new_name assert os.path.isfile(old_file_name) assert not os.path.exists(new_file_name) os.rename(old_file_name, new_file_name) snippets.model_custom_sink( SimpleKV(tempdir_name), [('key' + str(i), 'value' + str(i)) for i in range(100)], 'final_table_no_ptransform', 'final_table_with_ptransform') expected_output = [ 'key' + str(i) + ':' + 'value' + str(i) for i in range(100)] glob_pattern = tempdir_name + os.sep + 'final_table_no_ptransform*' output_files = glob.glob(glob_pattern) assert output_files received_output = [] for file_name in output_files: with open(file_name) as f: for line in f: received_output.append(line.rstrip(os.linesep)) self.assertItemsEqual(expected_output, received_output) glob_pattern = tempdir_name + os.sep + 'final_table_with_ptransform*' output_files = glob.glob(glob_pattern) assert output_files received_output = [] for file_name in output_files: with open(file_name) as f: for line in f: received_output.append(line.rstrip(os.linesep)) self.assertItemsEqual(expected_output, received_output) def test_model_textio(self): temp_path = self.create_temp_file('aa bb cc\n bb cc\n cc') result_path = temp_path + '.result' snippets.model_textio({'read': temp_path, 'write': result_path}) self.assertEqual( ['aa', 'bb', 'bb', 'cc', 'cc', 'cc'], self.get_output(result_path, suffix='.csv')) def test_model_textio_compressed(self): temp_path = self.create_temp_file('aa\nbb\ncc') gzip_file_name = temp_path + '.gz' with open(temp_path) as src, gzip.open(gzip_file_name, 'wb') as dst: dst.writelines(src) # Add the temporary gzip file to be cleaned up as well. self.temp_files.append(gzip_file_name) snippets.model_textio_compressed( {'read': gzip_file_name}, ['aa', 'bb', 'cc']) @unittest.skipIf(datastore_pb2 is None, 'GCP dependencies are not installed') def test_model_datastoreio(self): # We cannot test datastoreio functionality in unit tests therefore we limit # ourselves to making sure the pipeline containing Datastore read and write # transforms can be built. # TODO(vikasrk): Expore using Datastore Emulator. snippets.model_datastoreio() @unittest.skipIf(base_api is None, 'GCP dependencies are not installed') def test_model_bigqueryio(self): # We cannot test BigQueryIO functionality in unit tests therefore we limit # ourselves to making sure the pipeline containing BigQuery sources and # sinks can be built. snippets.model_bigqueryio() def _run_test_pipeline_for_options(self, fn): temp_path = self.create_temp_file('aa\nbb\ncc') result_path = temp_path + '.result' fn([ '--input=%s*' % temp_path, '--output=%s' % result_path]) self.assertEqual( ['aa', 'bb', 'cc'], self.get_output(result_path)) def test_pipeline_options_local(self): self._run_test_pipeline_for_options(snippets.pipeline_options_local) def test_pipeline_options_remote(self): self._run_test_pipeline_for_options(snippets.pipeline_options_remote) def test_pipeline_options_command_line(self): self._run_test_pipeline_for_options(snippets.pipeline_options_command_line) def test_pipeline_logging(self): result_path = self.create_temp_file() lines = ['we found love right where we are', 'we found love right from the start', 'we found love in a hopeless place'] snippets.pipeline_logging(lines, result_path) self.assertEqual( sorted(' '.join(lines).split(' ')), self.get_output(result_path)) def test_examples_wordcount(self): pipelines = [snippets.examples_wordcount_minimal, snippets.examples_wordcount_wordcount, snippets.pipeline_monitoring] for pipeline in pipelines: temp_path = self.create_temp_file( 'abc def ghi\n abc jkl') result_path = self.create_temp_file() pipeline({'read': temp_path, 'write': result_path}) self.assertEqual( self.get_output(result_path), ['abc: 2', 'def: 1', 'ghi: 1', 'jkl: 1']) def test_examples_wordcount_debugging(self): temp_path = self.create_temp_file( 'Flourish Flourish Flourish stomach abc def') result_path = self.create_temp_file() snippets.examples_wordcount_debugging( {'read': temp_path, 'write': result_path}) self.assertEqual( self.get_output(result_path), ['Flourish: 3', 'stomach: 1']) def test_model_composite_transform_example(self): contents = ['aa bb cc', 'bb cc', 'cc'] result_path = self.create_temp_file() snippets.model_composite_transform_example(contents, result_path) self.assertEqual(['aa: 1', 'bb: 2', 'cc: 3'], self.get_output(result_path)) def test_model_multiple_pcollections_flatten(self): contents = ['a', 'b', 'c', 'd', 'e', 'f'] result_path = self.create_temp_file() snippets.model_multiple_pcollections_flatten(contents, result_path) self.assertEqual(contents, self.get_output(result_path)) def test_model_multiple_pcollections_partition(self): contents = [17, 42, 64, 32, 0, 99, 53, 89] result_path = self.create_temp_file() snippets.model_multiple_pcollections_partition(contents, result_path) self.assertEqual(['0', '17', '32', '42', '53', '64', '89', '99'], self.get_output(result_path)) def test_model_group_by_key(self): contents = ['a bb ccc bb bb a'] result_path = self.create_temp_file() snippets.model_group_by_key(contents, result_path) expected = [('a', 2), ('bb', 3), ('ccc', 1)] self.assertEqual([str(s) for s in expected], self.get_output(result_path)) def test_model_co_group_by_key_tuple(self): email_list = [['a', '[email protected]'], ['b', '[email protected]']] phone_list = [['a', 'x4312'], ['b', 'x8452']] result_path = self.create_temp_file() snippets.model_co_group_by_key_tuple(email_list, phone_list, result_path) expect = ['a; [email protected]; x4312', 'b; [email protected]; x8452'] self.assertEqual(expect, self.get_output(result_path)) def test_model_join_using_side_inputs(self): name_list = ['a', 'b'] email_list = [['a', '[email protected]'], ['b', '[email protected]']] phone_list = [['a', 'x4312'], ['b', 'x8452']] result_path = self.create_temp_file() snippets.model_join_using_side_inputs( name_list, email_list, phone_list, result_path) expect = ['a; [email protected]; x4312', 'b; [email protected]; x8452'] self.assertEqual(expect, self.get_output(result_path)) class CombineTest(unittest.TestCase): """Tests for model/combine.""" def test_global_sum(self): pc = [1, 2, 3] # [START global_sum] result = pc | beam.CombineGlobally(sum) # [END global_sum] self.assertEqual([6], result) def test_combine_values(self): occurences = [('cat', 1), ('cat', 5), ('cat', 9), ('dog', 5), ('dog', 2)] # [START combine_values] first_occurences = occurences | beam.GroupByKey() | beam.CombineValues(min) # [END combine_values] self.assertEqual({('cat', 1), ('dog', 2)}, set(first_occurences)) def test_combine_per_key(self): player_accuracies = [ ('cat', 1), ('cat', 5), ('cat', 9), ('cat', 1), ('dog', 5), ('dog', 2)] # [START combine_per_key] avg_accuracy_per_player = (player_accuracies | beam.CombinePerKey( beam.combiners.MeanCombineFn())) # [END combine_per_key] self.assertEqual({('cat', 4.0), ('dog', 3.5)}, set(avg_accuracy_per_player)) def test_combine_concat(self): pc = ['a', 'b'] # [START combine_concat] def concat(values, separator=', '): return separator.join(values) with_commas = pc | beam.CombineGlobally(concat) with_dashes = pc | beam.CombineGlobally(concat, separator='-') # [END combine_concat] self.assertEqual(1, len(with_commas)) self.assertTrue(with_commas[0] in {'a, b', 'b, a'}) self.assertEqual(1, len(with_dashes)) self.assertTrue(with_dashes[0] in {'a-b', 'b-a'}) def test_bounded_sum(self): # [START combine_bounded_sum] pc = [1, 10, 100, 1000] def bounded_sum(values, bound=500): return min(sum(values), bound) small_sum = pc | beam.CombineGlobally(bounded_sum) # [500] large_sum = pc | beam.CombineGlobally(bounded_sum, bound=5000) # [1111] # [END combine_bounded_sum] self.assertEqual([500], small_sum) self.assertEqual([1111], large_sum) def test_combine_reduce(self): factors = [2, 3, 5, 7] # [START combine_reduce] import functools import operator product = factors | beam.CombineGlobally( functools.partial(reduce, operator.mul), 1) # [END combine_reduce] self.assertEqual([210], product) def test_custom_average(self): pc = [2, 3, 5, 7] # [START combine_custom_average] class AverageFn(beam.CombineFn): def create_accumulator(self): return (0.0, 0) def add_input(self, (sum, count), input): return sum + input, count + 1 def merge_accumulators(self, accumulators): sums, counts = zip(*accumulators) return sum(sums), sum(counts) def extract_output(self, (sum, count)): return sum / count if count else float('NaN') average = pc | beam.CombineGlobally(AverageFn()) # [END combine_custom_average] self.assertEqual([4.25], average) def test_keys(self): occurrences = [('cat', 1), ('cat', 5), ('dog', 5), ('cat', 9), ('dog', 2)] unique_keys = occurrences | snippets.Keys() self.assertEqual({'cat', 'dog'}, set(unique_keys)) def test_count(self): occurrences = ['cat', 'dog', 'cat', 'cat', 'dog'] perkey_counts = occurrences | snippets.Count() self.assertEqual({('cat', 3), ('dog', 2)}, set(perkey_counts)) if __name__ == '__main__': logging.getLogger().setLevel(logging.INFO) unittest.main()<|fim▁end|>
old_file_name = self._tmp_dir + os.sep + old_name
<|file_name|>edraak_devstack.py<|end_file_name|><|fim▁begin|><|fim▁hole|># reflected in `cms/envs/edraak_devstack.py`<|fim▁end|>
from .devstack import * from .edraak_common import * # WARNING: Don't just add/delete settings from here. Make sure the settings are
<|file_name|>ipython_config.py<|end_file_name|><|fim▁begin|># Configuration file for ipython. #------------------------------------------------------------------------------ # InteractiveShellApp(Configurable) configuration #------------------------------------------------------------------------------ ## A Mixin for applications that start InteractiveShell instances. # # Provides configurables for loading extensions and executing files as part of # configuring a Shell environment. # # The following methods should be called by the :meth:`initialize` method of the # subclass: # # - :meth:`init_path` # - :meth:`init_shell` (to be implemented by the subclass) # - :meth:`init_gui_pylab` # - :meth:`init_extensions` # - :meth:`init_code` ## Execute the given command string. #c.InteractiveShellApp.code_to_run = '' ## Run the file referenced by the PYTHONSTARTUP environment variable at IPython # startup. #c.InteractiveShellApp.exec_PYTHONSTARTUP = True ## List of files to run at IPython startup. #c.InteractiveShellApp.exec_files = [] ## lines of code to run at IPython startup. #c.InteractiveShellApp.exec_lines = [] ## A list of dotted module names of IPython extensions to load. #c.InteractiveShellApp.extensions = [] ## dotted module name of an IPython extension to load. #c.InteractiveShellApp.extra_extension = '' ## A file to be run #c.InteractiveShellApp.file_to_run = '' ## Enable GUI event loop integration with any of ('glut', 'gtk', 'gtk2', 'gtk3', # 'osx', 'pyglet', 'qt', 'qt4', 'qt5', 'tk', 'wx', 'gtk2', 'qt4'). #c.InteractiveShellApp.gui = None ## Should variables loaded at startup (by startup files, exec_lines, etc.) be # hidden from tools like %who? #c.InteractiveShellApp.hide_initial_ns = True ## Configure matplotlib for interactive use with the default matplotlib backend. #c.InteractiveShellApp.matplotlib = None ## Run the module as a script. #c.InteractiveShellApp.module_to_run = '' ## Pre-load matplotlib and numpy for interactive use, selecting a particular # matplotlib backend and loop integration. #c.InteractiveShellApp.pylab = None ## If true, IPython will populate the user namespace with numpy, pylab, etc. and # an ``import *`` is done from numpy and pylab, when using pylab mode. # # When False, pylab mode should not import any names into the user namespace. #c.InteractiveShellApp.pylab_import_all = True ## Reraise exceptions encountered loading IPython extensions? #c.InteractiveShellApp.reraise_ipython_extension_failures = False #------------------------------------------------------------------------------ # Application(SingletonConfigurable) configuration #------------------------------------------------------------------------------ ## This is an application. ## The date format used by logging formatters for %(asctime)s #c.Application.log_datefmt = '%Y-%m-%d %H:%M:%S' ## The Logging format template #c.Application.log_format = '[%(name)s]%(highlevel)s %(message)s' ## Set the log level by value or name. #c.Application.log_level = 30 #------------------------------------------------------------------------------ # BaseIPythonApplication(Application) configuration #------------------------------------------------------------------------------ ## IPython: an enhanced interactive Python shell. ## Whether to create profile dir if it doesn't exist #c.BaseIPythonApplication.auto_create = False ## Whether to install the default config files into the profile dir. If a new # profile is being created, and IPython contains config files for that profile, # then they will be staged into the new directory. Otherwise, default config # files will be automatically generated. #c.BaseIPythonApplication.copy_config_files = False ## Path to an extra config file to load. # # If specified, load this config file in addition to any other IPython config. #c.BaseIPythonApplication.extra_config_file = '' ## The name of the IPython directory. This directory is used for logging # configuration (through profiles), history storage, etc. The default is usually # $HOME/.ipython. This option can also be specified through the environment # variable IPYTHONDIR. #c.BaseIPythonApplication.ipython_dir = '' ## Whether to overwrite existing config files when copying #c.BaseIPythonApplication.overwrite = False ## The IPython profile to use. #c.BaseIPythonApplication.profile = 'default' ## Create a massive crash report when IPython encounters what may be an internal # error. The default is to append a short message to the usual traceback #c.BaseIPythonApplication.verbose_crash = False #------------------------------------------------------------------------------ # TerminalIPythonApp(BaseIPythonApplication,InteractiveShellApp) configuration #------------------------------------------------------------------------------ ## Whether to display a banner upon starting IPython. #c.TerminalIPythonApp.display_banner = True ## If a command or file is given via the command-line, e.g. 'ipython foo.py', # start an interactive shell after executing the file or command. #c.TerminalIPythonApp.force_interact = False ## Class to use to instantiate the TerminalInteractiveShell object. Useful for # custom Frontends #c.TerminalIPythonApp.interactive_shell_class = 'IPython.terminal.interactiveshell.TerminalInteractiveShell' ## Start IPython quickly by skipping the loading of config files. #c.TerminalIPythonApp.quick = False #------------------------------------------------------------------------------ # InteractiveShell(SingletonConfigurable) configuration #------------------------------------------------------------------------------ ## An enhanced, interactive shell for Python. ## 'all', 'last', 'last_expr' or 'none', 'last_expr_or_assign' specifying which # nodes should be run interactively (displaying output from expressions). #c.InteractiveShell.ast_node_interactivity = 'last_expr' ## A list of ast.NodeTransformer subclass instances, which will be applied to # user input before code is run. #c.InteractiveShell.ast_transformers = [] ## Make IPython automatically call any callable object even if you didn't type # explicit parentheses. For example, 'str 43' becomes 'str(43)' automatically. # The value can be '0' to disable the feature, '1' for 'smart' autocall, where # it is not applied if there are no more arguments on the line, and '2' for # 'full' autocall, where all callable objects are automatically called (even if # no arguments are present). #c.InteractiveShell.autocall = 0 ## Autoindent IPython code entered interactively. #c.InteractiveShell.autoindent = True ## Enable magic commands to be called without the leading %. #c.InteractiveShell.automagic = True ## The part of the banner to be printed before the profile #c.InteractiveShell.banner1 = "Python 3.5.2 (default, Nov 23 2017, 16:37:01) \nType 'copyright', 'credits' or 'license' for more information\nIPython 6.2.1 -- An enhanced Interactive Python. Type '?' for help.\n" ## The part of the banner to be printed after the profile #c.InteractiveShell.banner2 = '' ## Set the size of the output cache. The default is 1000, you can change it # permanently in your config file. Setting it to 0 completely disables the # caching system, and the minimum value accepted is 3 (if you provide a value # less than 3, it is reset to 0 and a warning is issued). This limit is defined # because otherwise you'll spend more time re-flushing a too small cache than # working #c.InteractiveShell.cache_size = 1000 ## Use colors for displaying information about objects. Because this information # is passed through a pager (like 'less'), and some pagers get confused with # color codes, this capability can be turned off. #c.InteractiveShell.color_info = True ## Set the color scheme (NoColor, Neutral, Linux, or LightBG). #c.InteractiveShell.colors = 'Neutral' ## #c.InteractiveShell.debug = False ## Don't call post-execute functions that have failed in the past. #c.InteractiveShell.disable_failing_post_execute = False ## If True, anything that would be passed to the pager will be displayed as # regular output instead. #c.InteractiveShell.display_page = False ## (Provisional API) enables html representation in mime bundles sent to pagers. #c.InteractiveShell.enable_html_pager = False ## Total length of command history #c.InteractiveShell.history_length = 10000 ## The number of saved history entries to be loaded into the history buffer at # startup. #c.InteractiveShell.history_load_length = 1000 ## #c.InteractiveShell.ipython_dir = '' ## Start logging to the given file in append mode. Use `logfile` to specify a log # file to **overwrite** logs to. #c.InteractiveShell.logappend = '' ## The name of the logfile to use. #c.InteractiveShell.logfile = '' ## Start logging to the default log file in overwrite mode. Use `logappend` to # specify a log file to **append** logs to. #c.InteractiveShell.logstart = False ## #c.InteractiveShell.object_info_string_level = 0 ## Automatically call the pdb debugger after every exception. #c.InteractiveShell.pdb = False ## Deprecated since IPython 4.0 and ignored since 5.0, set # TerminalInteractiveShell.prompts object directly. #c.InteractiveShell.prompt_in1 = 'In [\\#]: ' ## Deprecated since IPython 4.0 and ignored since 5.0, set # TerminalInteractiveShell.prompts object directly. #c.InteractiveShell.prompt_in2 = ' .\\D.: ' ## Deprecated since IPython 4.0 and ignored since 5.0, set # TerminalInteractiveShell.prompts object directly. #c.InteractiveShell.prompt_out = 'Out[\\#]: ' ## Deprecated since IPython 4.0 and ignored since 5.0, set # TerminalInteractiveShell.prompts object directly. #c.InteractiveShell.prompts_pad_left = True ## #c.InteractiveShell.quiet = False ## #c.InteractiveShell.separate_in = '\n' ## #c.InteractiveShell.separate_out = '' ## #c.InteractiveShell.separate_out2 = '' ## Show rewritten input, e.g. for autocall. #c.InteractiveShell.show_rewritten_input = True ## Enables rich html representation of docstrings. (This requires the docrepr # module). #c.InteractiveShell.sphinxify_docstring = False ## #c.InteractiveShell.wildcards_case_sensitive = True ## Switch modes for the IPython exception handlers. #c.InteractiveShell.xmode = 'Context' #------------------------------------------------------------------------------ # TerminalInteractiveShell(InteractiveShell) configuration #------------------------------------------------------------------------------ ## Set to confirm when you try to exit IPython with an EOF (Control-D in Unix, # Control-Z/Enter in Windows). By typing 'exit' or 'quit', you can force a # direct exit without any confirmation. #c.TerminalInteractiveShell.confirm_exit = True ## Options for displaying tab completions, 'column', 'multicolumn', and # 'readlinelike'. These options are for `prompt_toolkit`, see `prompt_toolkit` # documentation for more information. #c.TerminalInteractiveShell.display_completions = 'multicolumn' ## Shortcut style to use at the prompt. 'vi' or 'emacs'. #c.TerminalInteractiveShell.editing_mode = 'emacs' ## Set the editor used by IPython (default to $EDITOR/vi/notepad). #c.TerminalInteractiveShell.editor = 'nano' ## Enable vi (v) or Emacs (C-X C-E) shortcuts to open an external editor. This is # in addition to the F2 binding, which is always enabled. #c.TerminalInteractiveShell.extra_open_editor_shortcuts = False ## Provide an alternative handler to be called when the user presses Return. This # is an advanced option intended for debugging, which may be changed or removed # in later releases. #c.TerminalInteractiveShell.handle_return = None ## Highlight matching brackets. #c.TerminalInteractiveShell.highlight_matching_brackets = True ## The name or class of a Pygments style to use for syntax # highlighting: # rrt, abap, borland, monokai, trac, fruity, vs, autumn, igor, friendly, vim, bw, native, algol_nu, rainbow_dash, emacs, murphy, manni, paraiso-light, arduino, colorful, algol, lovelace, perldoc, tango, xcode, pastie, paraiso-dark, default #c.TerminalInteractiveShell.highlighting_style = traitlets.Undefined ## Override highlighting format for specific tokens #c.TerminalInteractiveShell.highlighting_style_overrides = {} ## Enable mouse support in the prompt (Note: prevents selecting text with the # mouse) #c.TerminalInteractiveShell.mouse_support = False ## Class used to generate Prompt token for prompt_toolkit #c.TerminalInteractiveShell.prompts_class = 'IPython.terminal.prompts.Prompts' ## Use `raw_input` for the REPL, without completion and prompt colors. # # Useful when controlling IPython as a subprocess, and piping STDIN/OUT/ERR. # Known usage are: IPython own testing machinery, and emacs inferior-shell # integration through elpy. # # This mode default to `True` if the `IPY_TEST_SIMPLE_PROMPT` environment # variable is set, or the current terminal is not a tty. #c.TerminalInteractiveShell.simple_prompt = False ## Number of line at the bottom of the screen to reserve for the completion menu #c.TerminalInteractiveShell.space_for_menu = 6 ## Automatically set the terminal title #c.TerminalInteractiveShell.term_title = True ## Customize the terminal title format. This is a python format string. # Available substitutions are: {cwd}.<|fim▁hole|>## Use 24bit colors instead of 256 colors in prompt highlighting. If your # terminal supports true color, the following command should print 'TRUECOLOR' # in orange: printf "\x1b[38;2;255;100;0mTRUECOLOR\x1b[0m\n" #c.TerminalInteractiveShell.true_color = False #------------------------------------------------------------------------------ # HistoryAccessor(HistoryAccessorBase) configuration #------------------------------------------------------------------------------ ## Access the history database without adding to it. # # This is intended for use by standalone history tools. IPython shells use # HistoryManager, below, which is a subclass of this. ## Options for configuring the SQLite connection # # These options are passed as keyword args to sqlite3.connect when establishing # database conenctions. #c.HistoryAccessor.connection_options = {} ## enable the SQLite history # # set enabled=False to disable the SQLite history, in which case there will be # no stored history, no SQLite connection, and no background saving thread. # This may be necessary in some threaded environments where IPython is embedded. #c.HistoryAccessor.enabled = True ## Path to file to use for SQLite history database. # # By default, IPython will put the history database in the IPython profile # directory. If you would rather share one history among profiles, you can set # this value in each, so that they are consistent. # # Due to an issue with fcntl, SQLite is known to misbehave on some NFS mounts. # If you see IPython hanging, try setting this to something on a local disk, # e.g:: # # ipython --HistoryManager.hist_file=/tmp/ipython_hist.sqlite # # you can also use the specific value `:memory:` (including the colon at both # end but not the back ticks), to avoid creating an history file. #c.HistoryAccessor.hist_file = '' #------------------------------------------------------------------------------ # HistoryManager(HistoryAccessor) configuration #------------------------------------------------------------------------------ ## A class to organize all history-related functionality in one place. ## Write to database every x commands (higher values save disk access & power). # Values of 1 or less effectively disable caching. #c.HistoryManager.db_cache_size = 0 ## Should the history database include output? (default: no) #c.HistoryManager.db_log_output = False #------------------------------------------------------------------------------ # ProfileDir(LoggingConfigurable) configuration #------------------------------------------------------------------------------ ## An object to manage the profile directory and its resources. # # The profile directory is used by all IPython applications, to manage # configuration, logging and security. # # This object knows how to find, create and manage these directories. This # should be used by any code that wants to handle profiles. ## Set the profile location directly. This overrides the logic used by the # `profile` option. #c.ProfileDir.location = '' #------------------------------------------------------------------------------ # BaseFormatter(Configurable) configuration #------------------------------------------------------------------------------ ## A base formatter class that is configurable. # # This formatter should usually be used as the base class of all formatters. It # is a traited :class:`Configurable` class and includes an extensible API for # users to determine how their objects are formatted. The following logic is # used to find a function to format an given object. # # 1. The object is introspected to see if it has a method with the name # :attr:`print_method`. If is does, that object is passed to that method # for formatting. # 2. If no print method is found, three internal dictionaries are consulted # to find print method: :attr:`singleton_printers`, :attr:`type_printers` # and :attr:`deferred_printers`. # # Users should use these dictionaries to register functions that will be used to # compute the format data for their objects (if those objects don't have the # special print methods). The easiest way of using these dictionaries is through # the :meth:`for_type` and :meth:`for_type_by_name` methods. # # If no function/callable is found to compute the format data, ``None`` is # returned and this format type is not used. ## #c.BaseFormatter.deferred_printers = {} ## #c.BaseFormatter.enabled = True ## #c.BaseFormatter.singleton_printers = {} ## #c.BaseFormatter.type_printers = {} #------------------------------------------------------------------------------ # PlainTextFormatter(BaseFormatter) configuration #------------------------------------------------------------------------------ ## The default pretty-printer. # # This uses :mod:`IPython.lib.pretty` to compute the format data of the object. # If the object cannot be pretty printed, :func:`repr` is used. See the # documentation of :mod:`IPython.lib.pretty` for details on how to write pretty # printers. Here is a simple example:: # # def dtype_pprinter(obj, p, cycle): # if cycle: # return p.text('dtype(...)') # if hasattr(obj, 'fields'): # if obj.fields is None: # p.text(repr(obj)) # else: # p.begin_group(7, 'dtype([') # for i, field in enumerate(obj.descr): # if i > 0: # p.text(',') # p.breakable() # p.pretty(field) # p.end_group(7, '])') ## #c.PlainTextFormatter.float_precision = '' ## Truncate large collections (lists, dicts, tuples, sets) to this size. # # Set to 0 to disable truncation. #c.PlainTextFormatter.max_seq_length = 1000 ## #c.PlainTextFormatter.max_width = 79 ## #c.PlainTextFormatter.newline = '\n' ## #c.PlainTextFormatter.pprint = True ## #c.PlainTextFormatter.verbose = False #------------------------------------------------------------------------------ # Completer(Configurable) configuration #------------------------------------------------------------------------------ ## Enable unicode completions, e.g. \alpha<tab> . Includes completion of latex # commands, unicode names, and expanding unicode characters back to latex # commands. #c.Completer.backslash_combining_completions = True ## Enable debug for the Completer. Mostly print extra information for # experimental jedi integration. #c.Completer.debug = False ## Activate greedy completion PENDING DEPRECTION. this is now mostly taken care # of with Jedi. # # This will enable completion on elements of lists, results of function calls, # etc., but can be unsafe because the code is actually evaluated on TAB. #c.Completer.greedy = False ## Experimental: restrict time (in milliseconds) during which Jedi can compute # types. Set to 0 to stop computing types. Non-zero value lower than 100ms may # hurt performance by preventing jedi to build its cache. #c.Completer.jedi_compute_type_timeout = 400 ## Experimental: Use Jedi to generate autocompletions. Default to True if jedi is # installed #c.Completer.use_jedi = True #------------------------------------------------------------------------------ # IPCompleter(Completer) configuration #------------------------------------------------------------------------------ ## Extension of the completer class with IPython-specific features ## DEPRECATED as of version 5.0. # # Instruct the completer to use __all__ for the completion # # Specifically, when completing on ``object.<tab>``. # # When True: only those names in obj.__all__ will be included. # # When False [default]: the __all__ attribute is ignored #c.IPCompleter.limit_to__all__ = False ## Whether to merge completion results into a single list # # If False, only the completion results from the first non-empty completer will # be returned. #c.IPCompleter.merge_completions = True ## Instruct the completer to omit private method names # # Specifically, when completing on ``object.<tab>``. # # When 2 [default]: all names that start with '_' will be excluded. # # When 1: all 'magic' names (``__foo__``) will be excluded. # # When 0: nothing will be excluded. #c.IPCompleter.omit__names = 2 #------------------------------------------------------------------------------ # ScriptMagics(Magics) configuration #------------------------------------------------------------------------------ ## Magics for talking to scripts # # This defines a base `%%script` cell magic for running a cell with a program in # a subprocess, and registers a few top-level magics that call %%script with # common interpreters. ## Extra script cell magics to define # # This generates simple wrappers of `%%script foo` as `%%foo`. # # If you want to add script magics that aren't on your path, specify them in # script_paths #c.ScriptMagics.script_magics = [] ## Dict mapping short 'ruby' names to full paths, such as '/opt/secret/bin/ruby' # # Only necessary for items in script_magics where the default path will not find # the right interpreter. #c.ScriptMagics.script_paths = {} #------------------------------------------------------------------------------ # LoggingMagics(Magics) configuration #------------------------------------------------------------------------------ ## Magics related to all logging machinery. ## Suppress output of log state when logging is enabled #c.LoggingMagics.quiet = False #------------------------------------------------------------------------------ # StoreMagics(Magics) configuration #------------------------------------------------------------------------------ ## Lightweight persistence for python variables. # # Provides the %store magic. ## If True, any %store-d variables will be automatically restored when IPython # starts. #c.StoreMagics.autorestore = False<|fim▁end|>
#c.TerminalInteractiveShell.term_title_format = 'IPython: {cwd}'
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#! /usr/bin/python # Module: # Author: Maxim Borisyak, 2014 import functools<|fim▁hole|> partial = functools.partial from pattern import MatchError from pattern import case from pattern import to_pattern # Type patterns from pattern import a_class from pattern import a_str from pattern import a_float from pattern import an_int # General patterns from pattern import some from pattern import otherwise from pattern import constant from match import match from match import match_f from match import case_f from match import match_method from match import case_method from match import merge_matches from match import to_match<|fim▁end|>
<|file_name|>YOGClientGameListManager.cpp<|end_file_name|><|fim▁begin|>/* Copyright (C) 2008 Bradley Arsenault This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ #include "YOGClientGameListManager.h" #include "NetMessage.h" #include "YOGClientGameListListener.h" YOGClientGameListManager::YOGClientGameListManager(YOGClient* client) : client(client) { } void YOGClientGameListManager::recieveMessage(boost::shared_ptr<NetMessage> message) { Uint8 type = message->getMessageType(); ///This recieves a game list update message if(type==MNetUpdateGameList) { shared_ptr<NetUpdateGameList> info = static_pointer_cast<NetUpdateGameList>(message); info->applyDifferences(games); sendToListeners(); } } const std::list<YOGGameInfo>& YOGClientGameListManager::getGameList() const { return games; } std::list<YOGGameInfo>& YOGClientGameListManager::getGameList() { return games; } <|fim▁hole|> for(std::list<YOGGameInfo>::iterator i=games.begin(); i!=games.end(); ++i) { if(i->getGameID() == gameID) { return *i; } } return YOGGameInfo(); } void YOGClientGameListManager::addListener(YOGClientGameListListener* listener) { listeners.push_back(listener); } void YOGClientGameListManager::removeListener(YOGClientGameListListener* listener) { listeners.remove(listener); } void YOGClientGameListManager::sendToListeners() { for(std::list<YOGClientGameListListener*>::iterator i = listeners.begin(); i!=listeners.end(); ++i) { (*i)->gameListUpdated(); } }<|fim▁end|>
YOGGameInfo YOGClientGameListManager::getGameInfo(Uint16 gameID) {
<|file_name|>box-reflection.tsx<|end_file_name|><|fim▁begin|>import React from 'react'; import * as pc from '../../../../'; import { AssetLoader } from '../../app/helpers/loader'; // @ts-ignore: library file import import Panel from '@playcanvas/pcui/Panel/component'; // @ts-ignore: library file import import SliderInput from '@playcanvas/pcui/SliderInput/component'; // @ts-ignore: library file import import LabelGroup from '@playcanvas/pcui/LabelGroup/component'; // @ts-ignore: library file import import BindingTwoWay from '@playcanvas/pcui/BindingTwoWay'; // @ts-ignore: library file import import SelectInput from '@playcanvas/pcui/SelectInput/component'; // @ts-ignore: library file import import { Observer } from '@playcanvas/observer'; class BoxReflectionExample { static CATEGORY = 'Graphics'; static NAME = 'Box Reflection'; load() { return <> <AssetLoader name='script' type='script' url='/static/scripts/camera/orbit-camera.js' /> <AssetLoader name='script' type='script' url='/static/scripts/utils/cubemap-renderer.js' /> <AssetLoader name='normal' type='texture' url='/static/assets/textures/normal-map.png' /> </>; } controls(data: Observer) { return <> <Panel headerText='Settings'> {<LabelGroup text='Update'> <SelectInput binding={new BindingTwoWay()} link={{ observer: data, path: 'settings.updateFrequency' }} type="number" options={[ { v: 0, t: 'Once' }, { v: 1, t: 'Every frame' }, { v: 10, t: 'Every 10 frames' }, { v: 30, t: 'Every 30 frames' } ]} /> </LabelGroup>} <LabelGroup text='Shininess'> <SliderInput binding={new BindingTwoWay()} link={{ observer: data, path: 'settings.shininess' }} min={0} max={100} precision={0}/> </LabelGroup> <LabelGroup text='Metalness'> <SliderInput binding={new BindingTwoWay()} link={{ observer: data, path: 'settings.metalness' }} min={0} max={1} precision={2}/> </LabelGroup> <LabelGroup text='Bumpiness'> <SliderInput binding={new BindingTwoWay()} link={{ observer: data, path: 'settings.bumpiness' }} min={0} max={1} precision={2}/> </LabelGroup> </Panel> </>; } example(canvas: HTMLCanvasElement, assets: any, data: any): void { // Create the application and start the update loop const app = new pc.Application(canvas, {}); app.start(); data.set('settings', { updateFrequency: 10, shininess: 90, metalness: 0.7, bumpiness: 0.2 }); // Set the canvas to fill the window and automatically change resolution to be the same as the canvas size app.setCanvasFillMode(pc.FILLMODE_FILL_WINDOW); app.setCanvasResolution(pc.RESOLUTION_AUTO); // create a layer for object that do not render into reflection cubemap const excludedLayer = new pc.Layer({ name: "Excluded" }); app.scene.layers.push(excludedLayer); // get world layer const worldLayer = app.scene.layers.getLayerByName("World"); // create an envAtlas texture, which will hold a prefiltering lighting generated from the cubemap. // This represents a reflection prefiltered for different levels of roughness const envAtlas = new pc.Texture(app.graphicsDevice, { width: 512, height: 512, format: pc.PIXELFORMAT_R8_G8_B8_A8, type: pc.TEXTURETYPE_RGBM, projection: pc.TEXTUREPROJECTION_EQUIRECT, addressU: pc.ADDRESS_CLAMP_TO_EDGE, addressV: pc.ADDRESS_CLAMP_TO_EDGE, mipmaps: false }); // material for the walls const roomMaterial = new pc.StandardMaterial(); roomMaterial.useMetalness = true; roomMaterial.diffuse = pc.Color.WHITE; roomMaterial.normalMap = assets.normal.resource; roomMaterial.normalMapTiling.set(5, 5); roomMaterial.bumpiness = 0.1; roomMaterial.shininess = 90; // @ts-ignore roomMaterial.envAtlas = envAtlas; // use reflection from env atlas roomMaterial.metalness = 0.5; // the material uses box projected cubemap for reflections. Set its bounding box the the size of the room // so that the reflections line up roomMaterial.cubeMapProjection = pc.CUBEPROJ_BOX; roomMaterial.cubeMapProjectionBox = new pc.BoundingBox(new pc.Vec3(0, 200, 0), new pc.Vec3(400, 200, 400)); roomMaterial.update(); // material for the magenta emissive beams const emissiveMaterial = new pc.StandardMaterial(); emissiveMaterial.emissive = pc.Color.MAGENTA; emissiveMaterial.diffuse = pc.Color.BLACK; emissiveMaterial.update(); // material for the white sphere representing an omni light const lightMaterial = new pc.StandardMaterial(); lightMaterial.emissive = pc.Color.WHITE; lightMaterial.diffuse = pc.Color.BLACK; lightMaterial.update(); // material for the reflective sphere in the center const sphereMaterial = new pc.StandardMaterial(); sphereMaterial.useMetalness = true; sphereMaterial.diffuse = pc.Color.WHITE; sphereMaterial.normalMap = assets.normal.resource; sphereMaterial.normalMapTiling.set(5, 5); sphereMaterial.bumpiness = 0.7; sphereMaterial.shininess = 90; sphereMaterial.metalness = 0.6; // @ts-ignore sphereMaterial.envAtlas = envAtlas; // use reflection from env atlas sphereMaterial.update(); // set up video playback into a texture const videoTexture = new pc.Texture(app.graphicsDevice, { format: pc.PIXELFORMAT_R5_G6_B5, mipmaps: false, minFilter: pc.FILTER_LINEAR, magFilter: pc.FILTER_LINEAR, addressU: pc.ADDRESS_CLAMP_TO_EDGE, addressV: pc.ADDRESS_CLAMP_TO_EDGE }); // create a HTML element with the video const video: HTMLVideoElement = document.createElement('video'); video.id = 'vid'; video.loop = true; video.muted = true; video.autoplay = true; video.playsInline = true; video.crossOrigin = "anonymous"; video.setAttribute('style', 'display: block; width: 1px; height: 1px; position: absolute; opacity: 0; z-index: -1000; top: 0px; pointer-events: none'); video.src = '/static/assets/video/SampleVideo_1280x720_1mb.mp4'; document.body.append(video); video.addEventListener('canplaythrough', function () { videoTexture.setSource(video); }); // materials used on the TV screen to display the video texture const screenMaterial = new pc.StandardMaterial(); screenMaterial.useLighting = false; screenMaterial.emissiveMap = videoTexture; screenMaterial.update(); // helper function to create a 3d primitive including its material function createPrimitive(primitiveType: string, position: pc.Vec3, scale: pc.Vec3, material: pc.Material) { // create the primitive using the material const primitive = new pc.Entity(); primitive.addComponent('render', { type: primitiveType, material: material, layers: [worldLayer.id, excludedLayer.id] }); // set position and scale and add it to scene primitive.setLocalPosition(position); primitive.setLocalScale(scale); app.root.addChild(primitive); } // create the ground plane from the boxes createPrimitive("box", new pc.Vec3(0, 0, 0), new pc.Vec3(800, 2, 800), roomMaterial); createPrimitive("box", new pc.Vec3(0, 400, 0), new pc.Vec3(800, 2, 800), roomMaterial); // walls createPrimitive("box", new pc.Vec3(400, 200, 0), new pc.Vec3(2, 400, 800), roomMaterial); createPrimitive("box", new pc.Vec3(-400, 200, 0), new pc.Vec3(2, 400, 800), roomMaterial); createPrimitive("box", new pc.Vec3(0, 200, -400), new pc.Vec3(800, 400, 0), roomMaterial); createPrimitive("box", new pc.Vec3(0, 200, 400), new pc.Vec3(800, 400, 0), roomMaterial); // emissive pillars createPrimitive("box", new pc.Vec3(400, 200, -50), new pc.Vec3(20, 400, 20), emissiveMaterial); createPrimitive("box", new pc.Vec3(400, 200, 50), new pc.Vec3(20, 400, 20), emissiveMaterial); createPrimitive("box", new pc.Vec3(-400, 200, 50), new pc.Vec3(20, 400, 20), emissiveMaterial); createPrimitive("box", new pc.Vec3(-400, 200, -50), new pc.Vec3(20, 400, 20), emissiveMaterial); createPrimitive("box", new pc.Vec3(0, 400, 50), new pc.Vec3(800, 20, 20), emissiveMaterial); createPrimitive("box", new pc.Vec3(0, 400, -50), new pc.Vec3(800, 20, 20), emissiveMaterial); // screen createPrimitive("box", new pc.Vec3(0, 200, 400), new pc.Vec3(500, 250, 5), screenMaterial); // sphere createPrimitive("sphere", new pc.Vec3(0, 150, 0), new pc.Vec3(150, 150, 150), sphereMaterial); // create an omni light white orbits the room to avoid it being completely dark const lightOmni = new pc.Entity(); lightOmni.addComponent("light", { type: "omni", layers: [excludedLayer.id], // add it to excluded layer, we don't want the light captured in the reflection castShadows: false, color: pc.Color.WHITE, intensity: 0.2, range: 1000 }); // add a white sphere to light so that we can see where it is. This sphere is excluded from the reflections. lightOmni.addComponent("render", { type: "sphere", layers: [excludedLayer.id], material: lightMaterial }); lightOmni.setLocalScale(20, 20, 20); app.root.addChild(lightOmni); // create an Entity with a camera component const camera = new pc.Entity(); camera.addComponent("camera", { fov: 100, layers: [worldLayer.id, excludedLayer.id], farClip: 1500 }); camera.setLocalPosition(270, 90, -260); // add orbit camera script with a mouse and a touch support camera.addComponent("script"); camera.script.create("orbitCamera", { attributes: { inertiaFactor: 0.2, distanceMax: 390, frameOnStart: false } }); camera.script.create("orbitCameraInputMouse"); camera.script.create("orbitCameraInputTouch"); app.root.addChild(camera); // create a probe object with cubemapRenderer script which takes care of rendering dynamic cubemap const probe = new pc.Entity(); probe.addComponent('script'); // add camera component to the probe - this defines camera properties for cubemap rendering probe.addComponent('camera', { // optimization - no need to clear as all pixels get overwritten clearColorBuffer: false, // priority - render before world camera priority: -1, // only render meshes on the worldLayer (and not excluded layer) layers: [worldLayer.id], // disable as this is not a camera that renders cube map but only a container for properties for cube map rendering enabled: false, nearClip: 1, farClip: 500 }); // Add a cubemap renderer script, which renders to a cubemap of size 128 with mipmaps, which is directly useable // as a lighting source for envAtlas generation // Position it in the center of the room. probe.script.create('cubemapRenderer', { attributes: { resolution: 128, mipmaps: true, depth: true } }); probe.setPosition(0, 200, 0); app.root.addChild(probe); // handle onCubemapPostRender event fired by the cubemapRenderer when all faces of the cubemap are done rendering probe.on('onCubemapPostRender', () => { // prefilter just rendered cubemap into envAtlas, so that it can be used for reflection during the rest of the frame // @ts-ignore pc.EnvLighting.generateAtlas(probe.script.cubemapRenderer.cubeMap, { target: envAtlas }); }); // Set an update function on the app's update event let time = 0; let updateProbeCount = 1; let updateVideo = true; app.on("update", function (dt: number) { time += dt * 0.3; // Update the video data to the texture every other frame if (updateVideo) { videoTexture.upload(); } updateVideo = !updateVideo; // move the light around lightOmni.setLocalPosition(300 * Math.sin(time), 300, 300 * Math.cos(time)); // update the reflection probe as needed const updateFrequency = data.get('settings.updateFrequency'); updateProbeCount--; if (updateFrequency === 0) updateProbeCount = 1; if (updateProbeCount <= 0) { // enable probe rendering probe.enabled = true; updateProbeCount = updateFrequency; } else {<|fim▁hole|> // update material properties based on settings const shininess = data.get('settings.shininess'); const metalness = data.get('settings.metalness'); const bumpiness = data.get('settings.bumpiness'); roomMaterial.shininess = shininess; roomMaterial.metalness = metalness; roomMaterial.bumpiness = bumpiness; roomMaterial.update(); sphereMaterial.shininess = shininess; sphereMaterial.metalness = metalness; sphereMaterial.bumpiness = bumpiness; sphereMaterial.update(); }); } } export default BoxReflectionExample;<|fim▁end|>
probe.enabled = false; }
<|file_name|>subscription.py<|end_file_name|><|fim▁begin|>__author__ = "UShareSoft" from texttable import Texttable from ussclicore.argumentParser import ArgumentParser, ArgumentParserError from ussclicore.cmd import Cmd, CoreGlobal from uforgecli.utils import org_utils from ussclicore.utils import printer from ussclicore.utils import generics_utils from uforgecli.utils.uforgecli_utils import * from uforge.objects import uforge from subscription_admin import Subscription_Admins from subscription_role import Subscription_Roles from subscription_format import Subscription_Format from subscription_os import Subscription_Os from subscription_quota import Subscription_Quota from uforgecli.utils import uforgecli_utils import pyxb import shlex import sys class Subscription_Cmd(Cmd, CoreGlobal): """Manage subscription profiles : list profile, create profiles, update profiles""" cmd_name = "subscription" def __init__(self): self.subCmds = {} self.generate_sub_commands() super(Subscription_Cmd, self).__init__() def generate_sub_commands(self): subscriptionRoles = Subscription_Roles() self.subCmds[subscriptionRoles.cmd_name] = subscriptionRoles subscriptionAdmins = Subscription_Admins() self.subCmds[subscriptionAdmins.cmd_name] = subscriptionAdmins subscriptionFormat = Subscription_Format() self.subCmds[subscriptionFormat.cmd_name] = subscriptionFormat subscriptionOs = Subscription_Os() self.subCmds[subscriptionOs.cmd_name] = subscriptionOs subscriptionQuota = Subscription_Quota() self.subCmds[subscriptionQuota.cmd_name] = subscriptionQuota def arg_list(self): doParser = ArgumentParser(prog=self.cmd_name + " list", add_help=True, description="List all the subscription profiles for a given organization. If no organization is provided the default organization is used.") optional = doParser.add_argument_group("optional arguments") optional.add_argument('--org', dest='org', required=False, help="The organization name. If no organization is provided, then the default organization is used.") return doParser def do_list(self, args): try: doParser = self.arg_list() doArgs = doParser.parse_args(shlex.split(args)) org = org_utils.org_get(self.api, doArgs.org) # call UForge API printer.out("Getting all the subscription profiles for organization ...") subscriptions = self.api.Orgs(org.dbId).Subscriptions().Getall(Search=None) subscriptions = generics_utils.order_list_object_by(subscriptions.subscriptionProfiles.subscriptionProfile, "name") if subscriptions is None or len(subscriptions) == 0: printer.out("There is no subscriptions in [" + org.name + "] ") return 0 printer.out("List of subscription profiles in [" + org.name + "] :") table = Texttable(200) table.set_cols_align(["c", "c", "c", "c"]) table.header(["Name", "Code", "Active", "description"]) for subscription in subscriptions: if subscription.active: active = "X" else: active = "" table.add_row([subscription.name, subscription.code, active, subscription.description]) print table.draw() + "\n" printer.out("Foumd " + str(len(subscriptions)) + " subscription profile(s).") return 0 except ArgumentParserError as e: printer.out("ERROR: In Arguments: " + str(e), printer.ERROR) self.help_list() except Exception as e: return handle_uforge_exception(e) def help_list(self): doParser = self.arg_list() doParser.print_help() def arg_info(self): doParser = ArgumentParser(prog=self.cmd_name + " info", add_help=True, description="Get detailed information on a subscription profile within an organization.") mandatory = doParser.add_argument_group("mandatory arguments") optional = doParser.add_argument_group("optional arguments") mandatory.add_argument('--name', dest='name', required=True, help="The name of the subscription profile") optional.add_argument('--org', dest='org', required=False, help="The organization name. If no organization is provided, then the default organization is used.") return doParser def do_info(self, args): try: # add arguments doParser = self.arg_info() doArgs = doParser.parse_args(shlex.split(args)) # call UForge API printer.out("Getting subscription profile with name [" + doArgs.name + "]...") org = org_utils.org_get(self.api, doArgs.org) subscriptions = self.api.Orgs(org.dbId).Subscriptions().Getall(Search=None) printer.out("Subscription profile for [" + doArgs.name + "] :") subscription = subscriptions.subscriptionProfiles.subscriptionProfile exist = False for item in subscription: if item.name == doArgs.name: exist = True subscription = item if not exist: printer.out("Subscription profile requested don't exist in [" + org.name + "]") return 0 table = Texttable(200) table.set_cols_align(["l", "l"]) table.header(["Info", "Value"]) table.add_row(["Name", subscription.name]) table.add_row(["Code", subscription.code]) if subscription.active: active = "X" else: active = "" table.add_row(["Active", active]) if subscription.roles.role: nb = len(subscription.roles.role) table.add_row(["Roles", str(nb)]) else: table.add_row(["Roles", "None"]) if subscription.admins.admin: nbAdmin = len(subscription.admins.admin) table.add_row(["Administrators", str(nbAdmin)]) else: table.add_row(["Administrators", "None"]) if subscription.distributions.distribution: nbDist = len(subscription.distributions.distribution) table.add_row(["Operating Systems", str(nbDist)]) else: table.add_row(["Operating Systems", "None"]) if subscription.formats.format: nbFormat = len(subscription.formats.format) table.add_row(["Image Formats", str(nbFormat)]) else: table.add_row(["Image Formats", "None"]) print table.draw() + "\n" if subscription.description is not None or subscription.description == "": printer.out("Description : " + subscription.description + "\n") if subscription.admins.admin: nb = subscription.admins.admin nb = len(nb) printer.out("Administrator Details :") table = Texttable(200) table.set_cols_align(["l"]) table.header(["Name"]) for item in subscription.admins.admin: table.add_row([item.name]) print table.draw() + "\n" printer.out("Found " + str(nb) + " administrator(s).\n") else: printer.out("Subscription profile doesn't have any administrator.\n") if subscription.roles.role: printer.out("Role Details :") table = Texttable(200) table.set_cols_align(["l"]) table.header(["Name"]) for item in subscription.roles.role: table.add_row([item.name]) print table.draw() + "\n" else: printer.out("Subscription profile doesn't have any roles.\n") if subscription.distributions.distribution: nb = subscription.distributions.distribution nb = len(nb) printer.out("Operating system Details :") table = Texttable(200) table.set_cols_align(["l", "l", "l", "l", "l", "l"]) table.header(["Distribution", "Version", "Architecture", "Access", "Visible", "Release Date"]) for item in subscription.distributions.distribution: if item.active: active = "X" else: active = "" if item.visible: visible = "X" else: visible = "" if item.releaseDate is None: releaseDate = "Unknown" else: releaseDate = item.releaseDate table.add_row([item.name, item.version, item.arch, active, visible, releaseDate]) print table.draw() + "\n" printer.out("Found " + str(nb) + " distribution(s).\n") else: printer.out("Subscription profile doesn't have any distribution.\n") if subscription.formats.format: printer.out("Formats Details :") table = Texttable(200) table.set_cols_align(["l", "l"]) table.header(["Format", "Access"]) for item in subscription.formats.format: if item.access: access = "X" else: access = "" table.add_row([item.name, access]) print table.draw() + "\n" printer.out("Found " + str(nbFormat) + " format(s).\n") else: printer.out("Subscription profile doesn't have any formats.\n") return 0 except ArgumentParserError as e: printer.out("ERROR: In Arguments: " + str(e), printer.ERROR) self.help_info() except Exception as e: return handle_uforge_exception(e) def help_info(self): doParser = self.arg_info() doParser.print_help() def arg_create(self): doParser = ArgumentParser(prog=self.cmd_name + " create", add_help=True, description="Create a new subscription profile within an organization.") mandatory = doParser.add_argument_group("mandatory arguments") optional = doParser.add_argument_group("optional arguments") mandatory.add_argument('--name', dest='name', required=True, help="The name of the subscription profile to create") mandatory.add_argument('--code', dest='code', required=True, help="The code of the subscription profile to create") optional.add_argument('--description', dest='description', type=str, required=False, help="The description of the subscription profile to create") optional.add_argument('--active', dest='active', action='store_true', required=False, help="Flag to make the subscription profile active.") optional.add_argument('--admins', dest='admins', nargs='+', required=False, help="Admin users to be added to the subscription profile that can use the subscription profile to create a user (users separated by spaces)") optional.add_argument('--roles', dest='roles', nargs='+', required=False, help="Roles to be added to the subscription profile") optional.add_argument('--org', dest='org', required=False, help="The organization name. If no organization is provided, then the default organization is used.") return doParser def do_create(self, args): try: # add arguments doParser = self.arg_create() doArgs = doParser.parse_args(shlex.split(args)) org = org_utils.org_get(self.api, doArgs.org) # call UForge API printer.out("Creating subscription profile [" + doArgs.name + "] ...") # create a user manually new_subscription_profile = subscriptionProfile() new_subscription_profile.name = doArgs.name new_subscription_profile.code = doArgs.code if doArgs.description: new_subscription_profile.description = doArgs.description if doArgs.active: new_subscription_profile.active = doArgs.active new_subscription_profile.admins = pyxb.BIND() if doArgs.admins: for a in doArgs.admins: new_admin = user() new_admin.loginName = a new_subscription_profile.admins.append(new_admin) new_subscription_profile.roles = pyxb.BIND() if doArgs.roles: for a in doArgs.roles: new_role = role() new_role.name = a new_subscription_profile.roles.append(new_role) # Send the create user request to the server new_subscription_profile = self.api.Orgs(org.dbId).Subscriptions().Add(new_subscription_profile) if new_subscription_profile is None: printer.out("No information about the new subscription profile available", printer.ERROR) else: printer.out("New subscription profile [" + new_subscription_profile.name + "] created.", printer.OK) table = Texttable(200) table.set_cols_align(["c", "c", "c"]) table.header( ["Name", "Code", "Active"]) table.add_row([new_subscription_profile.name, new_subscription_profile.code, "X" if new_subscription_profile.active else ""]) print table.draw() + "\n" return 0 except ArgumentParserError as e: printer.out("ERROR: In Arguments: " + str(e), printer.ERROR) self.help_create() except Exception as e: return handle_uforge_exception(e) def help_create(self): doParser = self.arg_create() doParser.print_help() def arg_delete(self): doParser = ArgumentParser(prog=self.cmd_name + " delete", add_help=True, description="Delete a subscription profile from an organization.") mandatory = doParser.add_argument_group("mandatory arguments") optional = doParser.add_argument_group("optional arguments") mandatory.add_argument('--name', dest='name', required=True, help="The name of the subscription profile to delete") optional.add_argument('--org', dest='org', required=False, help="The organization name. If no organization is provided, then the default organization is used.") return doParser def do_delete(self, args): try: # add arguments doParser = self.arg_delete() doArgs = doParser.parse_args(shlex.split(args)) printer.out("Deleting subscription profile [" + doArgs.name + "] ...") org = org_utils.org_get(self.api, doArgs.org) # call UForge API subscriptions = self.api.Orgs(org.dbId).Subscriptions().Getall(Search=None) exist = False for item in subscriptions.subscriptionProfiles.subscriptionProfile: if item.name == doArgs.name: exist = True subscription = subscriptions.subscriptionProfiles.subscriptionProfile[0] self.api.Orgs(org.dbId).Subscriptions(subscription.dbId).Remove(None) printer.out("Subscription profile [" + doArgs.name + "] deleted", printer.OK) if not exist: printer.out("Subscription profile requested don't exist in [" + org.name + "]") return 0 return 0 except ArgumentParserError as e: printer.out("ERROR: In Arguments: " + str(e), printer.ERROR) self.help_delete() except Exception as e: return handle_uforge_exception(e) def help_delete(self): doParser = self.arg_delete() doParser.print_help() def arg_update(self): doParser = ArgumentParser(prog=self.cmd_name + " update", add_help=True, description="Updates an existing subscription profile.") mandatory = doParser.add_argument_group("mandatory arguments") optional = doParser.add_argument_group("optional arguments") mandatory.add_argument('--name', dest='name', required=True, help="The name of the subscription profile to update.") optional.add_argument('--description', dest='description', type=str, required=False, help="The description of the subscription profile to update.") optional.add_argument('--active', dest='active', action='store_true', required=False, help="Flag to make the subscription profile active.") optional.add_argument('--org', dest='org', required=False, help="The organization name. If no organization is provided, then the default organization is used.") return doParser def do_update(self, args): try: doParser = self.arg_update() doArgs = doParser.parse_args(shlex.split(args)) printer.out("Getting subscription profile with name [" + doArgs.name + "]...") org = org_utils.org_get(self.api, doArgs.org) subscriptions = self.api.Orgs(org.dbId).Subscriptions().Getall(Search=None) exist = False for item in subscriptions.subscriptionProfiles.subscriptionProfile: if item.name == doArgs.name: exist = True updated_subscription = subscriptionProfile() updated_subscription.name = item.name updated_subscription.code = item.code if doArgs.description: updated_subscription.description = doArgs.description if doArgs.active: updated_subscription.active = True else: updated_subscription.active = False printer.out("Updating subscription profile with name [" + doArgs.name + "] ...") # call UForge API self.api.Orgs(org.dbId).Subscriptions(item.dbId).Update(updated_subscription) printer.out("Subscription profile [" + doArgs.name + "] updated.", printer.OK) if not exist: printer.out("Subscription profile requested don't exist in [" + org.name + "]") return 0 return 0 except ArgumentParserError as e: printer.out("ERROR: In Arguments: " + str(e), printer.ERROR) self.help_update() except Exception as e: return handle_uforge_exception(e) def help_update(self): doParser = self.arg_update() doParser.print_help() def arg_enable(self): doParser = ArgumentParser(prog=self.cmd_name + " enable", add_help=True, description="Activates or enables a subscription profile within an organization.") mandatory = doParser.add_argument_group("mandatory arguments") optional = doParser.add_argument_group("optional arguments") mandatory.add_argument('--name', dest='name', required=True, help="The name of the subscription profile to enable.") optional.add_argument('--org', dest='org', required=False, help="The organization name. If no organization is provided, then the default organization is used.") return doParser def do_enable(self, args): try: # add arguments doParser = self.arg_enable() doArgs = doParser.parse_args(shlex.split(args)) printer.out("Getting subscription profile with name [" + doArgs.name + "]...") org = org_utils.org_get(self.api, doArgs.org) # call UForge API subscriptions = self.api.Orgs(org.dbId).Subscriptions().Getall(Search=None) exist = False for item in subscriptions.subscriptionProfiles.subscriptionProfile: if item.name == doArgs.name: exist = True updated_subscription = subscriptionProfile() updated_subscription.name = item.name updated_subscription.code = item.code if not item.active: updated_subscription.active = True printer.out("Enabling subscription profile with name [" + doArgs.name + "] ...") self.api.Orgs(org.dbId).Subscriptions(item.dbId).Update(updated_subscription) printer.out("Subscription [" + doArgs.name + "] is enabled.", printer.OK) else: printer.out("Subscription [" + doArgs.name + "] is already enabled", printer.WARNING) if not exist: printer.out("Subscription profile requested don't exist in [" + org.name + "]") return 0 return 0 except ArgumentParserError as e: printer.out("ERROR: In Arguments: " + str(e), printer.ERROR) self.help_enable() except Exception as e: return handle_uforge_exception(e) def help_enable(self): doParser = self.arg_enable() doParser.print_help() def arg_disable(self):<|fim▁hole|> doParser = ArgumentParser(prog=self.cmd_name + " disable", add_help=True, description="Disables a subscription profile within an organization (cannot be used to reate users).") mandatory = doParser.add_argument_group("mandatory arguments") optional = doParser.add_argument_group("optional arguments") mandatory.add_argument('--name', dest='name', required=True, help="The name of the subscription profile to update") optional.add_argument('--org', dest='org', required=False, help="The organization name. If no organization is provided, then the default organization is used.") return doParser def do_disable(self, args): try: doParser = self.arg_enable() doArgs = doParser.parse_args(shlex.split(args)) printer.out("Getting subscription profile with name [" + doArgs.name + "]...") org = org_utils.org_get(self.api, doArgs.org) subscriptions = self.api.Orgs(org.dbId).Subscriptions().Getall(Search=doArgs.name) exist = False for item in subscriptions.subscriptionProfiles.subscriptionProfile: if item.name == doArgs.name: exist = True updated_subscription = subscriptionProfile() updated_subscription.name = item.name updated_subscription.code = item.code if item.active: updated_subscription.active = False printer.out("Disabling subscription profile with name [" + doArgs.name + "] ...") # call UForge API self.api.Orgs(org.dbId).Subscriptions(item.dbId).Update(updated_subscription) printer.out("Subscription [" + doArgs.name + "] is disabled.", printer.OK) else: printer.out("Subscription [" + doArgs.name + "] is already disabled", printer.WARNING) if not exist: printer.out("Subscription profile requested don't exist in [" + org.name + "]") return 0 return 0 except ArgumentParserError as e: printer.out("ERROR: In Arguments: " + str(e), printer.ERROR) self.help_enable() except Exception as e: return handle_uforge_exception(e) def help_disable(self): doParser = self.arg_enable() doParser.print_help()<|fim▁end|>
<|file_name|>0012_auto__add_unique_company_slug.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding unique constraint on 'Company', fields ['slug'] db.create_unique(u'ecg_balancing_company', ['slug']) def backwards(self, orm): # Removing unique constraint on 'Company', fields ['slug'] db.delete_unique(u'ecg_balancing_company', ['slug']) models = { u'auth.group': { 'Meta': {'object_name': 'Group'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, u'auth.permission': { 'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, u'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, u'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, u'ecg_balancing.company': { 'Meta': {'object_name': 'Company'}, 'activities': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'city': ('django.db.models.fields.CharField', [], {'max_length': '50'}), 'country': ('django.db.models.fields.CharField', [], {'max_length': '50'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}), 'employees_number': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'fax': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), 'foundation_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'industry': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'logo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), 'managing_directors': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'model_creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'owners': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'phone': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), 'revenue': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}), 'street': ('django.db.models.fields.CharField', [], {'max_length': '50'}), 'website': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'zipcode': ('django.db.models.fields.PositiveIntegerField', [], {}) }, u'ecg_balancing.companybalance': { 'Meta': {'object_name': 'CompanyBalance'}, 'auditor': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'common_good': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'company': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'balance'", 'to': u"orm['ecg_balancing.Company']"}), 'end_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'matrix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'company_balances'", 'to': u"orm['ecg_balancing.ECGMatrix']"}), 'peer_companies': ('django.db.models.fields.related.ManyToManyField', [], {'max_length': '255', 'to': u"orm['ecg_balancing.Company']", 'null': 'True', 'symmetrical': 'False', 'blank': 'True'}), 'process_description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'prospect': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'start_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'year': ('django.db.models.fields.SmallIntegerField', [], {'max_length': '4', 'null': 'True', 'blank': 'True'}) }, u'ecg_balancing.companybalanceindicator': { 'Meta': {'object_name': 'CompanyBalanceIndicator'}, 'company_balance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'company_balance'", 'to': u"orm['ecg_balancing.CompanyBalance']"}), 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'evaluation': ('django.db.models.fields.IntegerField', [], {'default': '0'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'indicator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'company_balance'", 'to': u"orm['ecg_balancing.Indicator']"}) }, u'ecg_balancing.ecgmatrix': { 'Meta': {'object_name': 'ECGMatrix'}, 'contact': ('django.db.models.fields.EmailField', [], {'max_length': '75'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'version': ('django.db.models.fields.CharField', [], {'default': "u'4.1'", 'max_length': '6'}) }, u'ecg_balancing.indicator': { 'Meta': {'object_name': 'Indicator'}, 'contact': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), 'ecg_value': ('django.db.models.fields.CharField', [], {'max_length': '1'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'matrix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'indicators'", 'to': u"orm['ecg_balancing.ECGMatrix']"}), 'max_evaluation': ('django.db.models.fields.IntegerField', [], {}), 'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'parent_indicator'", 'null': 'True', 'to': u"orm['ecg_balancing.Indicator']"}), 'stakeholder': ('django.db.models.fields.CharField', [], {'max_length': '1'}), 'subindicator_number': ('django.db.models.fields.IntegerField', [], {}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}) }, u'ecg_balancing.userprofile': { 'Meta': {'object_name': 'UserProfile'}, 'avatar': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),<|fim▁hole|> u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "u'profile'", 'unique': 'True', 'to': u"orm['auth.User']"}) }, u'ecg_balancing.userrole': { 'Meta': {'object_name': 'UserRole'}, 'company': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['ecg_balancing.Company']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'role': ('django.db.models.fields.CharField', [], {'max_length': '5'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}) } } complete_apps = ['ecg_balancing']<|fim▁end|>
'companies': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['ecg_balancing.Company']", 'null': 'True', 'blank': 'True'}),
<|file_name|>Toolbar.js<|end_file_name|><|fim▁begin|>export class Toolbar {<|fim▁hole|><|fim▁end|>
}